亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? docfieldprocessorperthread.java

?? lucene-2.4.0 是一個全文收索的工具包
?? JAVA
字號:
package org.apache.lucene.index;/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements.  See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */import java.util.Collection;import java.util.HashSet;import java.util.List;import java.io.IOException;import org.apache.lucene.document.Document;import org.apache.lucene.document.Fieldable;/** * Gathers all Fieldables for a document under the same * name, updates FieldInfos, and calls per-field consumers * to process field by field. * * Currently, only a single thread visits the fields, * sequentially, for processing. */final class DocFieldProcessorPerThread extends DocConsumerPerThread {  float docBoost;  int fieldGen;  final DocFieldProcessor docFieldProcessor;  final FieldInfos fieldInfos;  final DocFieldConsumerPerThread consumer;  // Holds all fields seen in current doc  DocFieldProcessorPerField[] fields = new DocFieldProcessorPerField[1];  int fieldCount;  // Hash table for all fields ever seen  DocFieldProcessorPerField[] fieldHash = new DocFieldProcessorPerField[2];  int hashMask = 1;  int totalFieldCount;  final DocumentsWriter.DocState docState;    public DocFieldProcessorPerThread(DocumentsWriterThreadState threadState, DocFieldProcessor docFieldProcessor) throws IOException {    this.docState = threadState.docState;    this.docFieldProcessor = docFieldProcessor;    this.fieldInfos = docFieldProcessor.fieldInfos;    this.consumer = docFieldProcessor.consumer.addThread(this);  }  public void abort() {    for(int i=0;i<fieldHash.length;i++) {      DocFieldProcessorPerField field = fieldHash[i];      while(field != null) {        final DocFieldProcessorPerField next = field.next;        field.abort();        field = next;      }    }    consumer.abort();  }  public Collection fields() {    Collection fields = new HashSet();    for(int i=0;i<fieldHash.length;i++) {      DocFieldProcessorPerField field = fieldHash[i];      while(field != null) {        fields.add(field.consumer);        field = field.next;      }    }    assert fields.size() == totalFieldCount;    return fields;  }  /** If there are fields we've seen but did not see again   *  in the last run, then free them up. */  void trimFields(DocumentsWriter.FlushState state) {    for(int i=0;i<fieldHash.length;i++) {      DocFieldProcessorPerField perField = fieldHash[i];      DocFieldProcessorPerField lastPerField = null;      while (perField != null) {        if (perField.lastGen == -1) {          // This field was not seen since the previous          // flush, so, free up its resources now          // Unhash          if (lastPerField == null)            fieldHash[i] = perField.next;          else            lastPerField.next = perField.next;          if (state.docWriter.infoStream != null)            state.docWriter.infoStream.println("  purge field=" + perField.fieldInfo.name);          totalFieldCount--;        } else {          // Reset          perField.lastGen = -1;          lastPerField = perField;        }        perField = perField.next;      }    }  }  private void rehash() {    final int newHashSize = (int) (fieldHash.length*2);    assert newHashSize > fieldHash.length;    final DocFieldProcessorPerField newHashArray[] = new DocFieldProcessorPerField[newHashSize];    // Rehash    int newHashMask = newHashSize-1;    for(int j=0;j<fieldHash.length;j++) {      DocFieldProcessorPerField fp0 = fieldHash[j];      while(fp0 != null) {        final int hashPos2 = fp0.fieldInfo.name.hashCode() & newHashMask;        DocFieldProcessorPerField nextFP0 = fp0.next;        fp0.next = newHashArray[hashPos2];        newHashArray[hashPos2] = fp0;        fp0 = nextFP0;      }    }    fieldHash = newHashArray;    hashMask = newHashMask;  }  public DocumentsWriter.DocWriter processDocument() throws IOException {    consumer.startDocument();    final Document doc = docState.doc;    assert docFieldProcessor.docWriter.writer.testPoint("DocumentsWriter.ThreadState.init start");    fieldCount = 0;        final int thisFieldGen = fieldGen++;    final List docFields = doc.getFields();    final int numDocFields = docFields.size();    // Absorb any new fields first seen in this document.    // Also absorb any changes to fields we had already    // seen before (eg suddenly turning on norms or    // vectors, etc.):    for(int i=0;i<numDocFields;i++) {      Fieldable field = (Fieldable) docFields.get(i);      final String fieldName = field.name();      // Make sure we have a PerField allocated      final int hashPos = fieldName.hashCode() & hashMask;      DocFieldProcessorPerField fp = fieldHash[hashPos];      while(fp != null && !fp.fieldInfo.name.equals(fieldName))        fp = fp.next;      if (fp == null) {        // TODO FI: we need to genericize the "flags" that a        // field holds, and, how these flags are merged; it        // needs to be more "pluggable" such that if I want        // to have a new "thing" my Fields can do, I can        // easily add it        FieldInfo fi = fieldInfos.add(fieldName, field.isIndexed(), field.isTermVectorStored(),                                      field.isStorePositionWithTermVector(), field.isStoreOffsetWithTermVector(),                                      field.getOmitNorms(), false, field.getOmitTf());        fp = new DocFieldProcessorPerField(this, fi);        fp.next = fieldHash[hashPos];        fieldHash[hashPos] = fp;        totalFieldCount++;        if (totalFieldCount >= fieldHash.length/2)          rehash();      } else        fp.fieldInfo.update(field.isIndexed(), field.isTermVectorStored(),                            field.isStorePositionWithTermVector(), field.isStoreOffsetWithTermVector(),                            field.getOmitNorms(), false, field.getOmitTf());      if (thisFieldGen != fp.lastGen) {        // First time we're seeing this field for this doc        fp.fieldCount = 0;        if (fieldCount == fields.length) {          final int newSize = fields.length*2;          DocFieldProcessorPerField newArray[] = new DocFieldProcessorPerField[newSize];          System.arraycopy(fields, 0, newArray, 0, fieldCount);          fields = newArray;        }        fields[fieldCount++] = fp;        fp.lastGen = thisFieldGen;      }      if (fp.fieldCount == fp.fields.length) {        Fieldable[] newArray = new Fieldable[fp.fields.length*2];        System.arraycopy(fp.fields, 0, newArray, 0, fp.fieldCount);        fp.fields = newArray;      }      fp.fields[fp.fieldCount++] = field;    }    // If we are writing vectors then we must visit    // fields in sorted order so they are written in    // sorted order.  TODO: we actually only need to    // sort the subset of fields that have vectors    // enabled; we could save [small amount of] CPU    // here.    quickSort(fields, 0, fieldCount-1);    for(int i=0;i<fieldCount;i++)      fields[i].consumer.processFields(fields[i].fields, fields[i].fieldCount);    if (docState.maxTermPrefix != null && docState.infoStream != null)      docState.infoStream.println("WARNING: document contains at least one immense term (longer than the max length " + DocumentsWriter.MAX_TERM_LENGTH + "), all of which were skipped.  Please correct the analyzer to not produce such terms.  The prefix of the first immense term is: '" + docState.maxTermPrefix + "...'");     return consumer.finishDocument();  }  void quickSort(DocFieldProcessorPerField[] array, int lo, int hi) {    if (lo >= hi)      return;    else if (hi == 1+lo) {      if (array[lo].fieldInfo.name.compareTo(array[hi].fieldInfo.name) > 0) {        final DocFieldProcessorPerField tmp = array[lo];        array[lo] = array[hi];        array[hi] = tmp;      }      return;    }    int mid = (lo + hi) >>> 1;    if (array[lo].fieldInfo.name.compareTo(array[mid].fieldInfo.name) > 0) {      DocFieldProcessorPerField tmp = array[lo];      array[lo] = array[mid];      array[mid] = tmp;    }    if (array[mid].fieldInfo.name.compareTo(array[hi].fieldInfo.name) > 0) {      DocFieldProcessorPerField tmp = array[mid];      array[mid] = array[hi];      array[hi] = tmp;      if (array[lo].fieldInfo.name.compareTo(array[mid].fieldInfo.name) > 0) {        DocFieldProcessorPerField tmp2 = array[lo];        array[lo] = array[mid];        array[mid] = tmp2;      }    }    int left = lo + 1;    int right = hi - 1;    if (left >= right)      return;    DocFieldProcessorPerField partition = array[mid];    for (; ;) {      while (array[right].fieldInfo.name.compareTo(partition.fieldInfo.name) > 0)        --right;      while (left < right && array[left].fieldInfo.name.compareTo(partition.fieldInfo.name) <= 0)        ++left;      if (left < right) {        DocFieldProcessorPerField tmp = array[left];        array[left] = array[right];        array[right] = tmp;        --right;      } else {        break;      }    }    quickSort(array, lo, left);    quickSort(array, left + 1, hi);  }}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美一区二区视频在线观看| 五月天丁香久久| 国产精品一区一区| 国产午夜精品一区二区三区嫩草| 国产一区二区视频在线播放| 国产午夜久久久久| 99国产精品视频免费观看| 一区精品在线播放| 91精品办公室少妇高潮对白| 亚洲国产精品一区二区www| 欧美日产国产精品| 国产在线看一区| 国产精品成人网| 欧美丝袜第三区| 精品一区二区免费在线观看| 国产精品美女www爽爽爽| 色综合天天天天做夜夜夜夜做| 亚洲午夜久久久久久久久久久| 制服丝袜中文字幕一区| 国内欧美视频一区二区| 中文字幕中文字幕在线一区 | 99re热这里只有精品视频| 有码一区二区三区| 日韩一区二区三区电影| 国产成人免费视频一区| 亚洲福中文字幕伊人影院| 精品国产露脸精彩对白| 91亚洲精品久久久蜜桃| 美女任你摸久久| 中文字幕日本不卡| 91精品国产福利| 成人福利电影精品一区二区在线观看| 亚洲一区二区三区国产| 久久蜜桃av一区精品变态类天堂 | 国产精品第13页| 欧美日韩久久一区二区| 国产精品一级片| 午夜久久久久久电影| 久久久久高清精品| 欧美亚洲禁片免费| 国产91精品一区二区麻豆亚洲| 亚洲亚洲人成综合网络| 国产欧美视频一区二区| 91精品国产欧美日韩| 色又黄又爽网站www久久| 国产精品一区二区在线播放| 日精品一区二区| 亚洲天堂免费在线观看视频| 欧美久久久久久久久久| 99re6这里只有精品视频在线观看 99re8在线精品视频免费播放 | 成人性生交大片免费| 日韩电影一区二区三区四区| 亚洲人成7777| 国产午夜精品一区二区三区嫩草 | 日本一区二区三区国色天香| 欧美精品亚洲一区二区在线播放| aaa亚洲精品| 91国产免费看| 成人av小说网| 国产综合久久久久久久久久久久| 日本在线不卡视频一二三区| 亚洲一本大道在线| 成人欧美一区二区三区1314| 久久久精品日韩欧美| 日韩美女视频一区二区在线观看| 欧美日韩一二区| 欧美性大战久久久久久久| 一本一本久久a久久精品综合麻豆| 国产成人精品亚洲777人妖| 黑人巨大精品欧美一区| 免费日本视频一区| 午夜欧美电影在线观看| 亚洲国产一区在线观看| 一区二区三区四区在线| 亚洲欧美aⅴ...| 日韩美女视频一区二区| 亚洲欧美在线aaa| 中文字幕亚洲欧美在线不卡| 亚洲欧洲日韩一区二区三区| 亚洲欧洲日韩女同| 亚洲精品乱码久久久久久黑人| 国产精品嫩草99a| 1024亚洲合集| 夜夜揉揉日日人人青青一国产精品| 亚洲三级电影网站| 一区二区成人在线观看| 亚洲一区二区三区爽爽爽爽爽| 亚洲综合一区二区精品导航| 亚洲国产一区二区a毛片| 亚洲一级二级三级在线免费观看| 亚洲高清不卡在线观看| 免费久久99精品国产| 国产在线播放一区| 国产91丝袜在线观看| 色偷偷88欧美精品久久久| 欧美午夜寂寞影院| 777色狠狠一区二区三区| 欧美videos中文字幕| 欧美国产精品专区| 亚洲欧美日韩中文字幕一区二区三区 | 日本va欧美va欧美va精品| 久久精品国产久精国产| 粉嫩aⅴ一区二区三区四区| 99视频精品在线| 欧美日韩电影在线播放| 2020国产精品久久精品美国| 自拍av一区二区三区| 亚洲大片一区二区三区| 久久99精品国产91久久来源| 播五月开心婷婷综合| 欧美日韩视频专区在线播放| 精品日韩一区二区三区免费视频| 国产精品网站在线播放| 午夜久久久久久久久| 国产乱色国产精品免费视频| 在线一区二区三区四区五区| 欧美xxxxx牲另类人与| 亚洲色图视频网站| 全国精品久久少妇| 成人黄色国产精品网站大全在线免费观看 | 欧美色图12p| 久久综合九色综合久久久精品综合| 亚洲人成亚洲人成在线观看图片 | 国产日韩欧美a| 亚洲午夜精品在线| 国产不卡视频在线播放| 在线不卡免费av| 国产精品国产馆在线真实露脸| 视频一区二区国产| 99久久99久久综合| 欧美成人一区二区三区| 亚洲综合图片区| 国产成人免费在线观看不卡| 91精品国产日韩91久久久久久| 亚洲视频图片小说| 国产99一区视频免费| 日韩女优制服丝袜电影| 亚洲一区二区三区中文字幕| 成人动漫精品一区二区| 精品三级在线观看| 日韩精品一二三区| 色狠狠桃花综合| 中文字幕免费不卡在线| 黄色精品一二区| 91精品国产免费| 午夜av一区二区三区| 一本久久a久久精品亚洲| 国产精品三级在线观看| 国产另类ts人妖一区二区| 91精品啪在线观看国产60岁| 亚洲一区二区成人在线观看| 91在线一区二区| 国产午夜一区二区三区| 国产九色sp调教91| 精品少妇一区二区三区视频免付费 | 欧美精品v国产精品v日韩精品 | a美女胸又www黄视频久久| 久久久久一区二区三区四区| 麻豆精品新av中文字幕| 国产丝袜在线精品| 视频精品一区二区| 欧美乱熟臀69xxxxxx| 亚洲综合色网站| 在线视频国内自拍亚洲视频| 亚洲男人的天堂一区二区| zzijzzij亚洲日本少妇熟睡| 日本一区免费视频| 国产一区二区精品久久99| 精品福利二区三区| 韩国成人精品a∨在线观看| www久久精品| 成人福利视频在线看| 成人免费在线视频观看| 91麻豆国产自产在线观看| 亚洲精品国久久99热| 91成人国产精品| 五月综合激情婷婷六月色窝| 日韩一区二区三区电影在线观看 | 国产精品一线二线三线| 国产精品入口麻豆九色| 97超碰欧美中文字幕| 亚洲在线中文字幕| 欧美一级高清大全免费观看| 久久精品国产澳门| 久久久蜜桃精品| 色综合久久综合网97色综合| 亚洲午夜日本在线观看| 欧美一区二区三区公司| 极品少妇一区二区| 中文字幕不卡在线观看| 97se狠狠狠综合亚洲狠狠| 一卡二卡欧美日韩| 日韩欧美的一区二区| 成人午夜在线免费| 一区二区不卡在线视频 午夜欧美不卡在| 欧美人xxxx| 国产成人av一区二区三区在线观看| 1区2区3区国产精品| 日韩一区二区三| av资源网一区|