亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? termvectorstermswriter.java

?? lucene-2.4.0 是一個全文收索的工具包
?? JAVA
字號:
package org.apache.lucene.index;/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements.  See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */import org.apache.lucene.store.IndexOutput;import org.apache.lucene.store.RAMOutputStream;import org.apache.lucene.util.ArrayUtil;import java.io.IOException;import java.util.Collection;import java.util.Iterator;import java.util.Map;final class TermVectorsTermsWriter extends TermsHashConsumer {  final DocumentsWriter docWriter;  TermVectorsWriter termVectorsWriter;  PerDoc[] docFreeList = new PerDoc[1];  int freeCount;  IndexOutput tvx;  IndexOutput tvd;  IndexOutput tvf;  int lastDocID;  public TermVectorsTermsWriter(DocumentsWriter docWriter) {    this.docWriter = docWriter;  }  public TermsHashConsumerPerThread addThread(TermsHashPerThread termsHashPerThread) {    return new TermVectorsTermsWriterPerThread(termsHashPerThread, this);  }  void createPostings(RawPostingList[] postings, int start, int count) {    final int end = start + count;    for(int i=start;i<end;i++)      postings[i] = new PostingList();  }  synchronized void flush(Map threadsAndFields, final DocumentsWriter.FlushState state) throws IOException {    if (tvx != null) {      if (state.numDocsInStore > 0)        // In case there are some final documents that we        // didn't see (because they hit a non-aborting exception):        fill(state.numDocsInStore - docWriter.getDocStoreOffset());      tvx.flush();      tvd.flush();      tvf.flush();    }    Iterator it = threadsAndFields.entrySet().iterator();    while(it.hasNext()) {      Map.Entry entry = (Map.Entry) it.next();      Iterator it2 = ((Collection) entry.getValue()).iterator();      while(it2.hasNext()) {        TermVectorsTermsWriterPerField perField = (TermVectorsTermsWriterPerField) it2.next();        perField.termsHashPerField.reset();        perField.shrinkHash();      }      TermVectorsTermsWriterPerThread perThread = (TermVectorsTermsWriterPerThread) entry.getKey();      perThread.termsHashPerThread.reset(true);    }  }  synchronized void closeDocStore(final DocumentsWriter.FlushState state) throws IOException {    if (tvx != null) {      // At least one doc in this run had term vectors      // enabled      fill(state.numDocsInStore - docWriter.getDocStoreOffset());      tvx.close();      tvf.close();      tvd.close();      tvx = null;      assert state.docStoreSegmentName != null;      if (4+state.numDocsInStore*16 != state.directory.fileLength(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION))        throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.fileLength(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION) + " length in bytes of " + state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);      state.flushedFiles.add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);      state.flushedFiles.add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);      state.flushedFiles.add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);      docWriter.removeOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);      docWriter.removeOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);      docWriter.removeOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);      lastDocID = 0;    }      }  int allocCount;  synchronized PerDoc getPerDoc() {    if (freeCount == 0) {      allocCount++;      if (allocCount > docFreeList.length) {        // Grow our free list up front to make sure we have        // enough space to recycle all outstanding PerDoc        // instances        assert allocCount == 1+docFreeList.length;        docFreeList = new PerDoc[ArrayUtil.getNextSize(allocCount)];      }      return new PerDoc();    } else      return docFreeList[--freeCount];  }  /** Fills in no-term-vectors for all docs we haven't seen   *  since the last doc that had term vectors. */  void fill(int docID) throws IOException {    final int docStoreOffset = docWriter.getDocStoreOffset();    final int end = docID+docStoreOffset;    if (lastDocID < end) {      final long tvfPosition = tvf.getFilePointer();      while(lastDocID < end) {        tvx.writeLong(tvd.getFilePointer());        tvd.writeVInt(0);        tvx.writeLong(tvfPosition);        lastDocID++;      }    }  }  synchronized void initTermVectorsWriter() throws IOException {            if (tvx == null) {            final String docStoreSegment = docWriter.getDocStoreSegment();      if (docStoreSegment == null)        return;      assert docStoreSegment != null;      // If we hit an exception while init'ing the term      // vector output files, we must abort this segment      // because those files will be in an unknown      // state:      tvx = docWriter.directory.createOutput(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);      tvd = docWriter.directory.createOutput(docStoreSegment +  "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);      tvf = docWriter.directory.createOutput(docStoreSegment +  "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);            tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);      tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);      tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);      docWriter.addOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);      docWriter.addOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);      docWriter.addOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);      lastDocID = 0;    }  }  synchronized void finishDocument(PerDoc perDoc) throws IOException {    assert docWriter.writer.testPoint("TermVectorsTermsWriter.finishDocument start");    initTermVectorsWriter();    fill(perDoc.docID);    // Append term vectors to the real outputs:    tvx.writeLong(tvd.getFilePointer());    tvx.writeLong(tvf.getFilePointer());    tvd.writeVInt(perDoc.numVectorFields);    if (perDoc.numVectorFields > 0) {      for(int i=0;i<perDoc.numVectorFields;i++)        tvd.writeVInt(perDoc.fieldNumbers[i]);      assert 0 == perDoc.fieldPointers[0];      long lastPos = perDoc.fieldPointers[0];      for(int i=1;i<perDoc.numVectorFields;i++) {        long pos = perDoc.fieldPointers[i];        tvd.writeVLong(pos-lastPos);        lastPos = pos;      }      perDoc.tvf.writeTo(tvf);      perDoc.tvf.reset();      perDoc.numVectorFields = 0;    }    assert lastDocID == perDoc.docID + docWriter.getDocStoreOffset();    lastDocID++;    free(perDoc);    assert docWriter.writer.testPoint("TermVectorsTermsWriter.finishDocument end");  }  public boolean freeRAM() {    // We don't hold any state beyond one doc, so we don't    // free persistent RAM here    return false;  }  public void abort() {    if (tvx != null) {      try {        tvx.close();      } catch (Throwable t) {      }      tvx = null;    }    if (tvd != null) {      try {        tvd.close();      } catch (Throwable t) {      }      tvd = null;    }    if (tvf != null) {      try {        tvf.close();      } catch (Throwable t) {      }      tvf = null;    }    lastDocID = 0;  }  synchronized void free(PerDoc doc) {    assert freeCount < docFreeList.length;    docFreeList[freeCount++] = doc;  }  class PerDoc extends DocumentsWriter.DocWriter {    // TODO: use something more memory efficient; for small    // docs the 1024 buffer size of RAMOutputStream wastes alot    RAMOutputStream tvf = new RAMOutputStream();    int numVectorFields;    int[] fieldNumbers = new int[1];    long[] fieldPointers = new long[1];    void reset() {      tvf.reset();      numVectorFields = 0;    }    void abort() {      reset();      free(this);    }    void addField(final int fieldNumber) {      if (numVectorFields == fieldNumbers.length) {        fieldNumbers = ArrayUtil.grow(fieldNumbers);        fieldPointers = ArrayUtil.grow(fieldPointers);      }      fieldNumbers[numVectorFields] = fieldNumber;      fieldPointers[numVectorFields] = tvf.getFilePointer();      numVectorFields++;    }    public long sizeInBytes() {      return tvf.sizeInBytes();    }    public void finish() throws IOException {      finishDocument(this);    }  }  static final class PostingList extends RawPostingList {    int freq;                                       // How many times this term occurred in the current doc    int lastOffset;                                 // Last offset we saw    int lastPosition;                               // Last position where this term occurred  }  int bytesPerPosting() {    return RawPostingList.BYTES_SIZE + 3 * DocumentsWriter.INT_NUM_BYTE;  }}

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲精品一区二区三区蜜桃下载| 在线观看免费成人| 欧美高清在线精品一区| 国产91精品露脸国语对白| 欧美国产日韩精品免费观看| 色婷婷av一区二区三区大白胸| 亚洲制服欧美中文字幕中文字幕| 欧美美女激情18p| 韩国三级电影一区二区| 国产精品久久久久久久浪潮网站| 91国模大尺度私拍在线视频| 天天影视色香欲综合网老头| 久久日韩精品一区二区五区| 成人中文字幕在线| 亚洲妇女屁股眼交7| 久久亚洲一区二区三区明星换脸| www.av精品| 视频一区视频二区在线观看| wwwwxxxxx欧美| 欧美在线视频全部完| 久久99精品久久久久婷婷| 国产精品卡一卡二卡三| 欧美中文字幕一区二区三区 | 欧美日韩情趣电影| 国产乱人伦精品一区二区在线观看| 国产精品伦一区二区三级视频| 欧美放荡的少妇| 色8久久精品久久久久久蜜| 久久se这里有精品| 亚洲午夜一二三区视频| 国产日韩精品一区二区三区在线| 91精品国产91综合久久蜜臀| 亚洲人成网站色在线观看| 成人免费一区二区三区视频 | 日韩欧美一级二级三级久久久| 99精品视频在线观看免费| 老司机午夜精品| 五月激情综合色| 亚洲免费资源在线播放| 国产精品视频yy9299一区| 制服.丝袜.亚洲.中文.综合| 在线日韩一区二区| 91污片在线观看| 波多野结衣中文字幕一区二区三区| 精品亚洲成a人在线观看| 青青草视频一区| 蜜臂av日日欢夜夜爽一区| 丝瓜av网站精品一区二区 | 日韩欧美国产综合| 欧美一级生活片| 7777精品伊人久久久大香线蕉最新版| 91国在线观看| 欧美日韩视频在线第一区| 欧美日韩国产综合草草| 欧美日韩aaaaa| 日韩午夜av一区| 26uuu久久综合| 国产日韩欧美不卡在线| 一区在线观看免费| 亚洲夂夂婷婷色拍ww47| 日韩**一区毛片| 精品影视av免费| av动漫一区二区| 精品视频一区三区九区| 欧美日韩高清影院| 精品日韩一区二区三区免费视频| 久久久99久久精品欧美| 国产精品成人一区二区艾草| 亚洲综合另类小说| 麻豆91精品91久久久的内涵| 国产宾馆实践打屁股91| 色哟哟亚洲精品| 欧美成人精品1314www| 中文字幕二三区不卡| 天天色天天操综合| www.成人在线| 欧美xingq一区二区| 亚洲欧美日韩在线播放| 日韩在线一区二区三区| 国产盗摄女厕一区二区三区| 欧美视频中文字幕| 国产人成一区二区三区影院| 亚洲电影视频在线| 国产成人精品aa毛片| 欧美猛男男办公室激情| 国产日韩精品视频一区| 日韩精品一级中文字幕精品视频免费观看| 久久爱www久久做| 欧美美女喷水视频| 亚洲欧洲日韩综合一区二区| 理论片日本一区| 在线成人av影院| 亚洲免费观看视频| 国产99久久久国产精品潘金| 91精品国产欧美一区二区成人| 亚洲欧美色综合| 色成年激情久久综合| 国产精品久久久久永久免费观看 | 亚洲欧美日韩国产成人精品影院| 激情综合网av| 欧美变态凌虐bdsm| 首页亚洲欧美制服丝腿| 欧美中文字幕久久| 亚洲精选免费视频| 不卡在线观看av| 国产精品色哟哟网站| 国产成人在线视频网址| 2022国产精品视频| 极品少妇xxxx精品少妇偷拍| 欧美一二三区在线| 久久精品免费看| 久久色中文字幕| 国产精品一区二区无线| 久久久久久久网| 国产风韵犹存在线视精品| 欧美激情一区二区三区全黄| 9久草视频在线视频精品| 国产精品国产三级国产三级人妇| 99久久精品费精品国产一区二区| 国产精品久久午夜夜伦鲁鲁| aaa亚洲精品| 亚洲午夜羞羞片| 精品日韩一区二区三区| 国产不卡视频一区二区三区| 亚洲狠狠丁香婷婷综合久久久| 欧美视频在线一区| 蜜桃91丨九色丨蝌蚪91桃色| 国产欧美精品区一区二区三区| 99精品视频在线免费观看| 亚洲午夜精品在线| 欧美电影免费观看高清完整版| 国产成人精品一区二| 亚洲激情中文1区| 日韩欧美二区三区| 成人动漫av在线| 日韩成人精品在线| 国产精品水嫩水嫩| 欧美日韩一区二区在线观看视频| 奇米一区二区三区| 亚洲欧洲另类国产综合| 欧美一区二区三级| 日韩一区二区电影网| 国产精品一区二区男女羞羞无遮挡| 亚洲精品乱码久久久久久日本蜜臀| 91精品久久久久久久91蜜桃| 成人一区二区在线观看| 麻豆精品视频在线观看免费| 中文字幕制服丝袜成人av| 日韩欧美区一区二| 欧美艳星brazzers| jlzzjlzz国产精品久久| 久久er99热精品一区二区| 亚洲永久免费视频| 中文字幕日本乱码精品影院| 久久嫩草精品久久久久| 91超碰这里只有精品国产| 91免费版在线| 成人免费看的视频| 国产精品一级二级三级| 美女视频黄a大片欧美| 天天av天天翘天天综合网色鬼国产| 国产精品久久看| 国产精品久久久久久久久免费丝袜 | 日韩一区二区三免费高清| 色久优优欧美色久优优| caoporn国产精品| 成人激情图片网| 国产成人精品1024| 国产盗摄一区二区| 成人小视频在线观看| 国产毛片精品视频| 国产黄色精品网站| 国产精品1区2区| 成人伦理片在线| 91香蕉国产在线观看软件| 91视频一区二区三区| 一本色道亚洲精品aⅴ| 在线视频一区二区三| 欧美日韩一区小说| 日韩欧美国产三级| 精品国产成人在线影院| 亚洲午夜免费福利视频| 亚洲一级不卡视频| 美女精品一区二区| 国产美女av一区二区三区| 97se亚洲国产综合在线| 欧美伊人久久久久久午夜久久久久| 欧美乱妇一区二区三区不卡视频| 欧美电影免费观看高清完整版在| 欧美精品一区二区三区蜜桃| 亚洲手机成人高清视频| 日韩精品一二三| 丁香亚洲综合激情啪啪综合| 91豆麻精品91久久久久久| 欧美一区二区三区人| 国产色爱av资源综合区| 亚洲国产欧美一区二区三区丁香婷| 全国精品久久少妇| 99久久免费视频.com| 日韩一区二区精品|