亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? multisearcher.java

?? lucene-2.4.0 是一個(gè)全文收索的工具包
?? JAVA
字號(hào):
package org.apache.lucene.search;/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements.  See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */import org.apache.lucene.document.Document;import org.apache.lucene.document.FieldSelector;import org.apache.lucene.index.CorruptIndexException;import org.apache.lucene.index.Term;import java.io.IOException;import java.util.HashMap;import java.util.HashSet;import java.util.Map;import java.util.Set;/** Implements search over a set of <code>Searchables</code>. * * <p>Applications usually need only call the inherited {@link #search(Query)} * or {@link #search(Query,Filter)} methods. */public class MultiSearcher extends Searcher {    /**     * Document Frequency cache acting as a Dummy-Searcher.     * This class is no full-fledged Searcher, but only supports     * the methods necessary to initialize Weights.     */  private static class CachedDfSource extends Searcher {    private Map dfMap; // Map from Terms to corresponding doc freqs    private int maxDoc; // document count    public CachedDfSource(Map dfMap, int maxDoc, Similarity similarity) {      this.dfMap = dfMap;      this.maxDoc = maxDoc;      setSimilarity(similarity);    }    public int docFreq(Term term) {      int df;      try {        df = ((Integer) dfMap.get(term)).intValue();      } catch (NullPointerException e) {        throw new IllegalArgumentException("df for term " + term.text()            + " not available");      }      return df;    }    public int[] docFreqs(Term[] terms) {      int[] result = new int[terms.length];      for (int i = 0; i < terms.length; i++) {        result[i] = docFreq(terms[i]);      }      return result;    }    public int maxDoc() {      return maxDoc;    }    public Query rewrite(Query query) {      // this is a bit of a hack. We know that a query which      // creates a Weight based on this Dummy-Searcher is      // always already rewritten (see preparedWeight()).      // Therefore we just return the unmodified query here      return query;    }    public void close() {      throw new UnsupportedOperationException();    }    public Document doc(int i) {      throw new UnsupportedOperationException();    }        public Document doc(int i, FieldSelector fieldSelector) {        throw new UnsupportedOperationException();    }    public Explanation explain(Weight weight,int doc) {      throw new UnsupportedOperationException();    }    public void search(Weight weight, Filter filter, HitCollector results) {      throw new UnsupportedOperationException();    }    public TopDocs search(Weight weight,Filter filter,int n) {      throw new UnsupportedOperationException();    }    public TopFieldDocs search(Weight weight,Filter filter,int n,Sort sort) {      throw new UnsupportedOperationException();    }  }  private Searchable[] searchables;  private int[] starts;  private int maxDoc = 0;  /** Creates a searcher which searches <i>searchables</i>. */  public MultiSearcher(Searchable[] searchables) throws IOException {    this.searchables = searchables;    starts = new int[searchables.length + 1];	  // build starts array    for (int i = 0; i < searchables.length; i++) {      starts[i] = maxDoc;      maxDoc += searchables[i].maxDoc();          // compute maxDocs    }    starts[searchables.length] = maxDoc;  }    /** Return the array of {@link Searchable}s this searches. */  public Searchable[] getSearchables() {    return searchables;  }  protected int[] getStarts() {  	return starts;  }  // inherit javadoc  public void close() throws IOException {    for (int i = 0; i < searchables.length; i++)      searchables[i].close();  }  public int docFreq(Term term) throws IOException {    int docFreq = 0;    for (int i = 0; i < searchables.length; i++)      docFreq += searchables[i].docFreq(term);    return docFreq;  }  // inherit javadoc  public Document doc(int n) throws CorruptIndexException, IOException {    int i = subSearcher(n);			  // find searcher index    return searchables[i].doc(n - starts[i]);	  // dispatch to searcher  }  // inherit javadoc  public Document doc(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {    int i = subSearcher(n);			  // find searcher index    return searchables[i].doc(n - starts[i], fieldSelector);	  // dispatch to searcher  }    /** Returns index of the searcher for document <code>n</code> in the array   * used to construct this searcher. */  public int subSearcher(int n) {                 // find searcher for doc n:    // replace w/ call to Arrays.binarySearch in Java 1.2    int lo = 0;					  // search starts array    int hi = searchables.length - 1;		  // for first element less						  // than n, return its index    while (hi >= lo) {      int mid = (lo + hi) >> 1;      int midValue = starts[mid];      if (n < midValue)	hi = mid - 1;      else if (n > midValue)	lo = mid + 1;      else {                                      // found a match        while (mid+1 < searchables.length && starts[mid+1] == midValue) {          mid++;                                  // scan to last match        }	return mid;      }    }    return hi;  }  /** Returns the document number of document <code>n</code> within its   * sub-index. */  public int subDoc(int n) {    return n - starts[subSearcher(n)];  }  public int maxDoc() throws IOException {    return maxDoc;  }  public TopDocs search(Weight weight, Filter filter, int nDocs)  throws IOException {    HitQueue hq = new HitQueue(nDocs);    int totalHits = 0;    for (int i = 0; i < searchables.length; i++) { // search each searcher      TopDocs docs = searchables[i].search(weight, filter, nDocs);      totalHits += docs.totalHits;		  // update totalHits      ScoreDoc[] scoreDocs = docs.scoreDocs;      for (int j = 0; j < scoreDocs.length; j++) { // merge scoreDocs into hq	ScoreDoc scoreDoc = scoreDocs[j];        scoreDoc.doc += starts[i];                // convert doc        if(!hq.insert(scoreDoc))            break;                                // no more scores > minScore      }    }    ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];    for (int i = hq.size()-1; i >= 0; i--)	  // put docs in array      scoreDocs[i] = (ScoreDoc)hq.pop();        float maxScore = (totalHits==0) ? Float.NEGATIVE_INFINITY : scoreDocs[0].score;        return new TopDocs(totalHits, scoreDocs, maxScore);  }  public TopFieldDocs search (Weight weight, Filter filter, int n, Sort sort)  throws IOException {    FieldDocSortedHitQueue hq = null;    int totalHits = 0;    float maxScore=Float.NEGATIVE_INFINITY;        for (int i = 0; i < searchables.length; i++) { // search each searcher      TopFieldDocs docs = searchables[i].search (weight, filter, n, sort);            if (hq == null) hq = new FieldDocSortedHitQueue (docs.fields, n);      totalHits += docs.totalHits;		  // update totalHits      maxScore = Math.max(maxScore, docs.getMaxScore());      ScoreDoc[] scoreDocs = docs.scoreDocs;      for (int j = 0; j < scoreDocs.length; j++) { // merge scoreDocs into hq        ScoreDoc scoreDoc = scoreDocs[j];        scoreDoc.doc += starts[i];                // convert doc        if (!hq.insert (scoreDoc))          break;                                  // no more scores > minScore      }    }    ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];    for (int i = hq.size() - 1; i >= 0; i--)	  // put docs in array      scoreDocs[i] = (ScoreDoc) hq.pop();    return new TopFieldDocs (totalHits, scoreDocs, hq.getFields(), maxScore);  }  // inherit javadoc  public void search(Weight weight, Filter filter, final HitCollector results)    throws IOException {    for (int i = 0; i < searchables.length; i++) {      final int start = starts[i];      searchables[i].search(weight, filter, new HitCollector() {	  public void collect(int doc, float score) {	    results.collect(doc + start, score);	  }	});    }  }  public Query rewrite(Query original) throws IOException {    Query[] queries = new Query[searchables.length];    for (int i = 0; i < searchables.length; i++) {      queries[i] = searchables[i].rewrite(original);    }    return queries[0].combine(queries);  }  public Explanation explain(Weight weight, int doc) throws IOException {    int i = subSearcher(doc);			  // find searcher index    return searchables[i].explain(weight,doc-starts[i]); // dispatch to searcher  }  /**   * Create weight in multiple index scenario.   *    * Distributed query processing is done in the following steps:   * 1. rewrite query   * 2. extract necessary terms   * 3. collect dfs for these terms from the Searchables   * 4. create query weight using aggregate dfs.   * 5. distribute that weight to Searchables   * 6. merge results   *   * Steps 1-4 are done here, 5+6 in the search() methods   *   * @return rewritten queries   */  protected Weight createWeight(Query original) throws IOException {    // step 1    Query rewrittenQuery = rewrite(original);    // step 2    Set terms = new HashSet();    rewrittenQuery.extractTerms(terms);    // step3    Term[] allTermsArray = new Term[terms.size()];    terms.toArray(allTermsArray);    int[] aggregatedDfs = new int[terms.size()];    for (int i = 0; i < searchables.length; i++) {      int[] dfs = searchables[i].docFreqs(allTermsArray);      for(int j=0; j<aggregatedDfs.length; j++){        aggregatedDfs[j] += dfs[j];      }    }    HashMap dfMap = new HashMap();    for(int i=0; i<allTermsArray.length; i++) {      dfMap.put(allTermsArray[i], new Integer(aggregatedDfs[i]));    }    // step4    int numDocs = maxDoc();    CachedDfSource cacheSim = new CachedDfSource(dfMap, numDocs, getSimilarity());    return rewrittenQuery.weight(cacheSim);  }}

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
久久久不卡网国产精品一区| 久久免费的精品国产v∧| 国产精品乱人伦中文| 懂色av一区二区在线播放| 久久精品无码一区二区三区| 国产麻豆欧美日韩一区| 精品国产精品网麻豆系列| 久久不见久久见中文字幕免费| 欧美日韩精品一区二区三区蜜桃 | 欧美色网站导航| 自拍偷拍欧美激情| 在线免费观看视频一区| 一区二区三区在线免费观看| 色综合一个色综合亚洲| 欧美aa在线视频| 国产69精品久久久久777| 久久精品免视看| 99re亚洲国产精品| 一区二区在线免费| 日韩欧美中文一区| 精品一区二区久久| 中文字幕一区二区视频| 一本到一区二区三区| 美女任你摸久久| 亚洲国产精品激情在线观看| www.亚洲色图| 美女免费视频一区二区| 国产丝袜在线精品| 欧美午夜在线观看| 久久不见久久见中文字幕免费| 中文字幕视频一区二区三区久| 欧美一区二区私人影院日本| 久久er99热精品一区二区| 久久精品视频在线看| 波波电影院一区二区三区| 亚洲一区二区美女| 日韩免费性生活视频播放| 成人国产视频在线观看| 一区二区三区日韩| 国产喂奶挤奶一区二区三区| 色狠狠av一区二区三区| 日韩国产一二三区| 26uuu亚洲| 91麻豆成人久久精品二区三区| 久久精品国产亚洲一区二区三区| 国产网站一区二区三区| 日韩视频免费直播| eeuss影院一区二区三区| 精品一区二区三区在线播放视频 | 亚洲国产精品高清| 精品视频一区 二区 三区| 国产99久久久久久免费看农村| 亚洲欧美日韩久久精品| 欧美喷潮久久久xxxxx| 色欲综合视频天天天| 久久爱www久久做| 天天色综合天天| 国产精品人人做人人爽人人添| 精品日韩在线观看| 在线视频国内自拍亚洲视频| 成人黄动漫网站免费app| 蜜桃视频在线一区| 亚洲国产你懂的| 国产精品久99| 欧美激情一区二区| 精品国产一区a| 99精品国产视频| 国产aⅴ综合色| 日日摸夜夜添夜夜添亚洲女人| 亚洲精品成人精品456| 久久亚洲精品国产精品紫薇| 91.麻豆视频| 在线视频国内一区二区| 欧洲一区二区av| 96av麻豆蜜桃一区二区| 成人18视频日本| 国产激情一区二区三区| 亚洲国产精品久久艾草纯爱| 精品国产一区a| 一区二区免费在线| 日韩欧美国产精品| 欧美日韩一区二区欧美激情| 欧美亚洲免费在线一区| 91视频一区二区| 韩国成人福利片在线播放| 韩国中文字幕2020精品| 久久精工是国产品牌吗| 精品一区二区三区免费播放| 美腿丝袜亚洲三区| 黄色成人免费在线| 麻豆精品一区二区| 国产麻豆日韩欧美久久| 国内精品在线播放| 成人免费观看男女羞羞视频| 成人在线综合网| 色婷婷综合久色| 在线亚洲免费视频| 欧美美女喷水视频| 欧美一区二区三级| 欧美一区二区三区四区在线观看| 欧美日韩电影在线| 在线观看三级视频欧美| 久久久久久9999| 国产精品一区在线观看乱码 | 亚洲综合另类小说| 亚洲天堂2016| 国产欧美日韩综合| 一区二区三区免费观看| 一区二区成人在线视频| 日韩不卡一二三区| 黄一区二区三区| 99re热这里只有精品视频| 在线欧美小视频| 久久午夜羞羞影院免费观看| 国产精品日日摸夜夜摸av| 亚洲综合视频在线观看| 天天综合网天天综合色| 丁香六月久久综合狠狠色| 91一区二区在线| 欧美日韩国产bt| 久久久久久99久久久精品网站| 亚洲欧美一区二区久久| 亚洲妇女屁股眼交7| 国产原创一区二区| 91丝袜高跟美女视频| 欧美一激情一区二区三区| 久久久蜜桃精品| 亚洲一区二区精品久久av| 经典一区二区三区| 国产成人亚洲精品狼色在线| 国产999精品久久| 国产精品久久夜| 亚洲成人三级小说| 国产电影精品久久禁18| 欧美性xxxxx极品少妇| 中文字幕第一区综合| 亚洲一二三区不卡| voyeur盗摄精品| 欧美精品一二三四| 亚洲男人天堂av| 美国十次综合导航| 欧美日韩中文一区| 久久精品亚洲麻豆av一区二区| 日欧美一区二区| 东方aⅴ免费观看久久av| 91久久人澡人人添人人爽欧美| 欧美一区二区国产| 亚洲国产日韩一级| 国产成人精品网址| 精品国产一区二区三区久久久蜜月 | 久久夜色精品国产噜噜av| 91美女片黄在线观看91美女| 欧美性猛交一区二区三区精品| 精品捆绑美女sm三区| 亚洲一区二区成人在线观看| 国产在线日韩欧美| 91在线小视频| 国产欧美日韩久久| 国产精品1区二区.| 日韩一区二区在线播放| 天天综合网 天天综合色| 99天天综合性| 国产精品初高中害羞小美女文| 蜜桃视频在线一区| 欧美一区二区三区系列电影| 亚洲国产视频网站| 欧美片在线播放| 亚洲国产成人av网| 欧美精品成人一区二区三区四区| 国产精品久久久久久久久免费丝袜 | 日韩亚洲欧美综合| 亚洲高清免费观看| 欧美日韩亚洲综合一区| 亚洲人成在线播放网站岛国| 91论坛在线播放| 欧日韩精品视频| 亚洲国产中文字幕| 欧美一区在线视频| 亚洲高清在线视频| 91精品麻豆日日躁夜夜躁| 一个色综合av| 91精品国产全国免费观看| 亚洲一区二区在线免费观看视频| 在线日韩一区二区| 午夜视频在线观看一区| 欧美男人的天堂一二区| 国产农村妇女毛片精品久久麻豆| 日韩欧美区一区二| 日韩一区二区不卡| 日韩在线观看一区二区| 7777女厕盗摄久久久| 亚洲电影在线免费观看| 欧美丰满高潮xxxx喷水动漫 | 欧美无砖专区一中文字| 亚洲精品欧美专区| 波多野结衣中文字幕一区二区三区| 国产精品三级在线观看| 在线免费观看一区| 国产精品久久综合| 在线观看不卡一区|