亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? documentwriter.java

?? lucene完整源碼
?? JAVA
字號:
package org.apache.lucene.index;/** * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */import java.io.IOException;import java.io.PrintStream;import java.io.Reader;import java.io.StringReader;import java.util.Hashtable;import java.util.Enumeration;import java.util.Arrays;import org.apache.lucene.document.Document;import org.apache.lucene.document.Field;import org.apache.lucene.analysis.Analyzer;import org.apache.lucene.analysis.TokenStream;import org.apache.lucene.analysis.Token;import org.apache.lucene.store.Directory;import org.apache.lucene.store.IndexOutput;import org.apache.lucene.search.Similarity;final class DocumentWriter {  private Analyzer analyzer;  private Directory directory;  private Similarity similarity;  private FieldInfos fieldInfos;  private int maxFieldLength;  private int termIndexInterval = IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;  private PrintStream infoStream;  /** This ctor used by test code only.   *   * @param directory The directory to write the document information to   * @param analyzer The analyzer to use for the document   * @param similarity The Similarity function   * @param maxFieldLength The maximum number of tokens a field may have   */   DocumentWriter(Directory directory, Analyzer analyzer,                 Similarity similarity, int maxFieldLength) {    this.directory = directory;    this.analyzer = analyzer;    this.similarity = similarity;    this.maxFieldLength = maxFieldLength;  }  DocumentWriter(Directory directory, Analyzer analyzer, IndexWriter writer) {    this.directory = directory;    this.analyzer = analyzer;    this.similarity = writer.getSimilarity();    this.maxFieldLength = writer.getMaxFieldLength();    this.termIndexInterval = writer.getTermIndexInterval();  }  final void addDocument(String segment, Document doc)          throws IOException {    // write field names    fieldInfos = new FieldInfos();    fieldInfos.add(doc);    fieldInfos.write(directory, segment + ".fnm");    // write field values    FieldsWriter fieldsWriter =            new FieldsWriter(directory, segment, fieldInfos);    try {      fieldsWriter.addDocument(doc);    } finally {      fieldsWriter.close();    }    // invert doc into postingTable    postingTable.clear();			  // clear postingTable    fieldLengths = new int[fieldInfos.size()];    // init fieldLengths    fieldPositions = new int[fieldInfos.size()];  // init fieldPositions    fieldOffsets = new int[fieldInfos.size()];    // init fieldOffsets    fieldBoosts = new float[fieldInfos.size()];	  // init fieldBoosts    Arrays.fill(fieldBoosts, doc.getBoost());    invertDocument(doc);    // sort postingTable into an array    Posting[] postings = sortPostingTable();    /*    for (int i = 0; i < postings.length; i++) {      Posting posting = postings[i];      System.out.print(posting.term);      System.out.print(" freq=" + posting.freq);      System.out.print(" pos=");      System.out.print(posting.positions[0]);      for (int j = 1; j < posting.freq; j++)	System.out.print("," + posting.positions[j]);      System.out.println("");    }    */    // write postings    writePostings(postings, segment);    // write norms of indexed fields    writeNorms(segment);  }  // Keys are Terms, values are Postings.  // Used to buffer a document before it is written to the index.  private final Hashtable postingTable = new Hashtable();  private int[] fieldLengths;  private int[] fieldPositions;  private int[] fieldOffsets;  private float[] fieldBoosts;  // Tokenizes the fields of a document into Postings.  private final void invertDocument(Document doc)          throws IOException {    Enumeration fields = doc.fields();    while (fields.hasMoreElements()) {      Field field = (Field) fields.nextElement();      String fieldName = field.name();      int fieldNumber = fieldInfos.fieldNumber(fieldName);      int length = fieldLengths[fieldNumber];     // length of field      int position = fieldPositions[fieldNumber]; // position in field      if (length>0) position+=analyzer.getPositionIncrementGap(fieldName);      int offset = fieldOffsets[fieldNumber];       // offset field      if (field.isIndexed()) {        if (!field.isTokenized()) {		  // un-tokenized field          String stringValue = field.stringValue();          if(field.isStoreOffsetWithTermVector())            addPosition(fieldName, stringValue, position++, new TermVectorOffsetInfo(offset, offset + stringValue.length()));          else            addPosition(fieldName, stringValue, position++, null);          offset += stringValue.length();          length++;        } else         {          Reader reader;			  // find or make Reader          if (field.readerValue() != null)            reader = field.readerValue();          else if (field.stringValue() != null)            reader = new StringReader(field.stringValue());          else            throw new IllegalArgumentException                    ("field must have either String or Reader value");          // Tokenize field and add to postingTable          TokenStream stream = analyzer.tokenStream(fieldName, reader);          try {            Token lastToken = null;            for (Token t = stream.next(); t != null; t = stream.next()) {              position += (t.getPositionIncrement() - 1);                            if(field.isStoreOffsetWithTermVector())                addPosition(fieldName, t.termText(), position++, new TermVectorOffsetInfo(offset + t.startOffset(), offset + t.endOffset()));              else                addPosition(fieldName, t.termText(), position++, null);                            lastToken = t;              if (++length > maxFieldLength) {                if (infoStream != null)                  infoStream.println("maxFieldLength " +maxFieldLength+ " reached, ignoring following tokens");                break;              }            }                        if(lastToken != null)              offset += lastToken.endOffset() + 1;                      } finally {            stream.close();          }        }        fieldLengths[fieldNumber] = length;	  // save field length        fieldPositions[fieldNumber] = position;	  // save field position        fieldBoosts[fieldNumber] *= field.getBoost();        fieldOffsets[fieldNumber] = offset;      }    }  }  private final Term termBuffer = new Term("", ""); // avoid consing  private final void addPosition(String field, String text, int position, TermVectorOffsetInfo offset) {    termBuffer.set(field, text);    //System.out.println("Offset: " + offset);    Posting ti = (Posting) postingTable.get(termBuffer);    if (ti != null) {				  // word seen before      int freq = ti.freq;      if (ti.positions.length == freq) {	  // positions array is full        int[] newPositions = new int[freq * 2];	  // double size        int[] positions = ti.positions;        for (int i = 0; i < freq; i++)		  // copy old positions to new          newPositions[i] = positions[i];        ti.positions = newPositions;      }      ti.positions[freq] = position;		  // add new position      if (offset != null) {        if (ti.offsets.length == freq){          TermVectorOffsetInfo [] newOffsets = new TermVectorOffsetInfo[freq*2];          TermVectorOffsetInfo [] offsets = ti.offsets;          for (int i = 0; i < freq; i++)          {            newOffsets[i] = offsets[i];          }          ti.offsets = newOffsets;        }        ti.offsets[freq] = offset;      }      ti.freq = freq + 1;			  // update frequency    } else {					  // word not seen before      Term term = new Term(field, text, false);      postingTable.put(term, new Posting(term, position, offset));    }  }  private final Posting[] sortPostingTable() {    // copy postingTable into an array    Posting[] array = new Posting[postingTable.size()];    Enumeration postings = postingTable.elements();    for (int i = 0; postings.hasMoreElements(); i++)      array[i] = (Posting) postings.nextElement();    // sort the array    quickSort(array, 0, array.length - 1);    return array;  }  private static final void quickSort(Posting[] postings, int lo, int hi) {    if (lo >= hi)      return;    int mid = (lo + hi) / 2;    if (postings[lo].term.compareTo(postings[mid].term) > 0) {      Posting tmp = postings[lo];      postings[lo] = postings[mid];      postings[mid] = tmp;    }    if (postings[mid].term.compareTo(postings[hi].term) > 0) {      Posting tmp = postings[mid];      postings[mid] = postings[hi];      postings[hi] = tmp;      if (postings[lo].term.compareTo(postings[mid].term) > 0) {        Posting tmp2 = postings[lo];        postings[lo] = postings[mid];        postings[mid] = tmp2;      }    }    int left = lo + 1;    int right = hi - 1;    if (left >= right)      return;    Term partition = postings[mid].term;    for (; ;) {      while (postings[right].term.compareTo(partition) > 0)        --right;      while (left < right && postings[left].term.compareTo(partition) <= 0)        ++left;      if (left < right) {        Posting tmp = postings[left];        postings[left] = postings[right];        postings[right] = tmp;        --right;      } else {        break;      }    }    quickSort(postings, lo, left);    quickSort(postings, left + 1, hi);  }  private final void writePostings(Posting[] postings, String segment)          throws IOException {    IndexOutput freq = null, prox = null;    TermInfosWriter tis = null;    TermVectorsWriter termVectorWriter = null;    try {      //open files for inverse index storage      freq = directory.createOutput(segment + ".frq");      prox = directory.createOutput(segment + ".prx");      tis = new TermInfosWriter(directory, segment, fieldInfos,                                termIndexInterval);      TermInfo ti = new TermInfo();      String currentField = null;      for (int i = 0; i < postings.length; i++) {        Posting posting = postings[i];        // add an entry to the dictionary with pointers to prox and freq files        ti.set(1, freq.getFilePointer(), prox.getFilePointer(), -1);        tis.add(posting.term, ti);        // add an entry to the freq file        int postingFreq = posting.freq;        if (postingFreq == 1)				  // optimize freq=1          freq.writeVInt(1);			  // set low bit of doc num.        else {          freq.writeVInt(0);			  // the document number          freq.writeVInt(postingFreq);			  // frequency in doc        }        int lastPosition = 0;			  // write positions        int[] positions = posting.positions;        for (int j = 0; j < postingFreq; j++) {		  // use delta-encoding          int position = positions[j];          prox.writeVInt(position - lastPosition);          lastPosition = position;        }        // check to see if we switched to a new field        String termField = posting.term.field();        if (currentField != termField) {          // changing field - see if there is something to save          currentField = termField;          FieldInfo fi = fieldInfos.fieldInfo(currentField);          if (fi.storeTermVector) {            if (termVectorWriter == null) {              termVectorWriter =                new TermVectorsWriter(directory, segment, fieldInfos);              termVectorWriter.openDocument();            }            termVectorWriter.openField(currentField);          } else if (termVectorWriter != null) {            termVectorWriter.closeField();          }        }        if (termVectorWriter != null && termVectorWriter.isFieldOpen()) {            termVectorWriter.addTerm(posting.term.text(), postingFreq, posting.positions, posting.offsets);        }      }      if (termVectorWriter != null)        termVectorWriter.closeDocument();    } finally {      // make an effort to close all streams we can but remember and re-throw      // the first exception encountered in this process      IOException keep = null;      if (freq != null) try { freq.close(); } catch (IOException e) { if (keep == null) keep = e; }      if (prox != null) try { prox.close(); } catch (IOException e) { if (keep == null) keep = e; }      if (tis  != null) try {  tis.close(); } catch (IOException e) { if (keep == null) keep = e; }      if (termVectorWriter  != null) try {  termVectorWriter.close(); } catch (IOException e) { if (keep == null) keep = e; }      if (keep != null) throw (IOException) keep.fillInStackTrace();    }  }  private final void writeNorms(String segment) throws IOException {     for(int n = 0; n < fieldInfos.size(); n++){      FieldInfo fi = fieldInfos.fieldInfo(n);      if(fi.isIndexed && !fi.omitNorms){        float norm = fieldBoosts[n] * similarity.lengthNorm(fi.name, fieldLengths[n]);        IndexOutput norms = directory.createOutput(segment + ".f" + n);        try {          norms.writeByte(Similarity.encodeNorm(norm));        } finally {          norms.close();        }      }    }  }    /** If non-null, a message will be printed to this if maxFieldLength is reached.   */  void setInfoStream(PrintStream infoStream) {    this.infoStream = infoStream;  }}final class Posting {				  // info about a Term in a doc  Term term;					  // the Term  int freq;					  // its frequency in doc  int[] positions;				  // positions it occurs at  TermVectorOffsetInfo [] offsets;  Posting(Term t, int position, TermVectorOffsetInfo offset) {    term = t;    freq = 1;    positions = new int[1];    positions[0] = position;    if(offset != null){    offsets = new TermVectorOffsetInfo[1];    offsets[0] = offset;    }    else      offsets = null;  }}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美一区二区三区免费| 91精品国产综合久久香蕉麻豆| 亚洲在线一区二区三区| 欧美mv日韩mv国产网站app| av中文字幕亚洲| 裸体歌舞表演一区二区| 国产精品国产三级国产a| 欧美成人精品1314www| 欧美主播一区二区三区美女| 成人一二三区视频| 激情五月激情综合网| 亚洲午夜av在线| 国产精品久久久久一区| 久久综合久久99| 欧美人妇做爰xxxⅹ性高电影| 不卡一区中文字幕| 精品一区二区综合| 视频在线观看91| 一区二区三区 在线观看视频| 国产日韩欧美a| 久久综合色天天久久综合图片| 欧美日韩在线播放一区| 一本一道久久a久久精品| 成人动漫在线一区| 国产成人小视频| 国产麻豆精品视频| 久久99国产乱子伦精品免费| 日韩高清中文字幕一区| 亚洲一区二区三区四区在线| 亚洲美女少妇撒尿| 中文字幕日韩精品一区| 国产精品热久久久久夜色精品三区 | 亚洲国产精品成人综合| 欧美v日韩v国产v| 欧美v国产在线一区二区三区| 欧美一区二区三区婷婷月色| 6080午夜不卡| 日韩一区二区三区免费看| 欧美片网站yy| 欧美日韩国产一级片| 88在线观看91蜜桃国自产| 欧美日高清视频| 91精品国产综合久久香蕉的特点| 欧美丰满少妇xxxxx高潮对白| 欧美乱妇15p| 日韩一区二区三区免费观看| 日韩无一区二区| 精品久久一区二区三区| 久久久国际精品| 中文幕一区二区三区久久蜜桃| 国产午夜亚洲精品羞羞网站| 久久久久亚洲蜜桃| 久久久久久久综合日本| 日本一区二区电影| 亚洲视频免费在线观看| 亚洲自拍偷拍综合| 天涯成人国产亚洲精品一区av| 亚洲一区二区高清| 日本人妖一区二区| 国产麻豆视频一区| 91在线播放网址| 欧美在线免费视屏| 欧美一区二区精品在线| 国产喷白浆一区二区三区| 亚洲色图制服诱惑 | 免费在线观看成人| 久久国产尿小便嘘嘘| 高清国产午夜精品久久久久久| 91麻豆文化传媒在线观看| 欧美日韩一区二区不卡| 日韩三级高清在线| 国产精品―色哟哟| 亚洲影院免费观看| 蜜芽一区二区三区| 成人理论电影网| 欧美三片在线视频观看| 2020国产精品久久精品美国| 国产一区二区免费看| caoporen国产精品视频| 91精品在线一区二区| 国产精品视频第一区| 天天影视涩香欲综合网| 国产二区国产一区在线观看| 色欧美片视频在线观看| 精品精品国产高清一毛片一天堂| 国产精品国产三级国产普通话三级| 午夜婷婷国产麻豆精品| 成人蜜臀av电影| 日韩精品一区二区三区在线| 亚洲人亚洲人成电影网站色| 免费久久99精品国产| 91丨porny丨国产入口| 欧美成人一区二区三区| 亚洲免费观看高清完整| 国产精选一区二区三区| 欧美日韩大陆在线| 国产精品妹子av| 久久超碰97人人做人人爱| 91视频一区二区三区| 久久久久久久久久久久久久久99| 一区二区免费在线播放| 成人三级在线视频| 日韩免费观看2025年上映的电影| 亚洲精品ww久久久久久p站| 国产精华液一区二区三区| 欧美一区二区免费| 亚洲国产日韩综合久久精品| 国产+成+人+亚洲欧洲自线| 91精品国产色综合久久不卡电影| 中文字幕综合网| 成人影视亚洲图片在线| 亚洲精品一区在线观看| 日韩电影在线观看一区| 欧美中文字幕亚洲一区二区va在线| 亚洲国产成人在线| 国产精品自拍在线| 日韩欧美一级片| 日韩精品国产精品| 欧美性生活久久| 亚洲女子a中天字幕| caoporn国产精品| 国产精品久久久久影院| 国产91在线看| 国产亚洲午夜高清国产拍精品| 久久99最新地址| 欧美变态口味重另类| 蜜芽一区二区三区| 日韩免费高清电影| 久久国产视频网| 精品乱人伦一区二区三区| 免费一区二区视频| 欧美一级国产精品| 奇米精品一区二区三区在线观看 | 亚洲一区免费视频| 欧美日韩亚洲另类| 亚洲图片有声小说| 欧美乱妇23p| 美国三级日本三级久久99| 7777精品伊人久久久大香线蕉的| 亚洲18色成人| 欧美精品久久99| 蜜桃av一区二区| 精品国产伦一区二区三区观看方式 | 久久久久97国产精华液好用吗| 黄一区二区三区| 久久午夜电影网| 国产精品一色哟哟哟| 国产欧美视频一区二区| 不卡视频一二三| 99精品视频免费在线观看| 中文字幕在线不卡一区二区三区| 成人高清视频在线| 亚洲日本在线看| 色琪琪一区二区三区亚洲区| 亚洲无人区一区| 日韩欧美电影在线| 国产福利一区二区三区视频| 国产精品久久久久一区二区三区 | 日日摸夜夜添夜夜添亚洲女人| 91精品国产综合久久香蕉麻豆 | 强制捆绑调教一区二区| 精品国产一区二区三区久久影院| 国产成+人+日韩+欧美+亚洲| 亚洲日本va午夜在线电影| 欧美日韩在线一区二区| 久久国产成人午夜av影院| 国产欧美精品一区二区色综合 | 肉丝袜脚交视频一区二区| 日韩欧美国产系列| 成人在线视频一区| 亚洲3atv精品一区二区三区| 久久久亚洲高清| 色94色欧美sute亚洲线路一ni| 日韩av在线播放中文字幕| 久久综合久久综合久久综合| 91捆绑美女网站| 蜜桃一区二区三区四区| 国产精品麻豆视频| 91精品麻豆日日躁夜夜躁| 国产91精品露脸国语对白| 亚洲成a人片在线观看中文| 久久午夜色播影院免费高清 | 色婷婷av一区二区三区之一色屋| 日韩精品视频网| 亚洲欧洲精品天堂一级| 51精品秘密在线观看| 成人黄色777网| 免费高清视频精品| 尤物在线观看一区| 久久久噜噜噜久久中文字幕色伊伊| 在线影视一区二区三区| 国产精品主播直播| 日韩精品亚洲一区二区三区免费| 国产精品免费av| 欧美成人aa大片| 欧美日韩三级视频| av激情综合网| 韩国毛片一区二区三区| 午夜精品福利一区二区蜜股av| 中文字幕一区二区三区不卡在线|