亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? fieldswriter.java

?? lucene-2.4.0 是一個(gè)全文收索的工具包
?? JAVA
字號(hào):
package org.apache.lucene.index;/** * Copyright 2004 The Apache Software Foundation *  * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at *  * http://www.apache.org/licenses/LICENSE-2.0 *  * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */import java.io.ByteArrayOutputStream;import java.io.IOException;import java.util.Iterator;import java.util.zip.Deflater;import org.apache.lucene.document.Document;import org.apache.lucene.document.Fieldable;import org.apache.lucene.store.Directory;import org.apache.lucene.store.RAMOutputStream;import org.apache.lucene.store.IndexOutput;import org.apache.lucene.store.IndexInput;final class FieldsWriter{  static final byte FIELD_IS_TOKENIZED = 0x1;  static final byte FIELD_IS_BINARY = 0x2;  static final byte FIELD_IS_COMPRESSED = 0x4;  // Original format  static final int FORMAT = 0;  // Changed strings to UTF8  static final int FORMAT_VERSION_UTF8_LENGTH_IN_BYTES = 1;  // NOTE: if you introduce a new format, make it 1 higher  // than the current one, and always change this if you  // switch to a new format!  static final int FORMAT_CURRENT = FORMAT_VERSION_UTF8_LENGTH_IN_BYTES;      private FieldInfos fieldInfos;    private IndexOutput fieldsStream;    private IndexOutput indexStream;    private boolean doClose;    FieldsWriter(Directory d, String segment, FieldInfos fn) throws IOException {        fieldInfos = fn;        boolean success = false;        final String fieldsName = segment + "." + IndexFileNames.FIELDS_EXTENSION;        try {          fieldsStream = d.createOutput(fieldsName);          fieldsStream.writeInt(FORMAT_CURRENT);          success = true;        } finally {          if (!success) {            try {              close();            } catch (Throwable t) {              // Suppress so we keep throwing the original exception            }            try {              d.deleteFile(fieldsName);            } catch (Throwable t) {              // Suppress so we keep throwing the original exception            }          }        }        success = false;        final String indexName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION;        try {          indexStream = d.createOutput(indexName);          indexStream.writeInt(FORMAT_CURRENT);          success = true;        } finally {          if (!success) {            try {              close();            } catch (IOException ioe) {            }            try {              d.deleteFile(fieldsName);            } catch (Throwable t) {              // Suppress so we keep throwing the original exception            }            try {              d.deleteFile(indexName);            } catch (Throwable t) {              // Suppress so we keep throwing the original exception            }          }        }        doClose = true;    }    FieldsWriter(IndexOutput fdx, IndexOutput fdt, FieldInfos fn) {        fieldInfos = fn;        fieldsStream = fdt;        indexStream = fdx;        doClose = false;    }    void setFieldsStream(IndexOutput stream) {      this.fieldsStream = stream;    }    // Writes the contents of buffer into the fields stream    // and adds a new entry for this document into the index    // stream.  This assumes the buffer was already written    // in the correct fields format.    void flushDocument(int numStoredFields, RAMOutputStream buffer) throws IOException {      indexStream.writeLong(fieldsStream.getFilePointer());      fieldsStream.writeVInt(numStoredFields);      buffer.writeTo(fieldsStream);    }    void skipDocument() throws IOException {      indexStream.writeLong(fieldsStream.getFilePointer());      fieldsStream.writeVInt(0);    }    void flush() throws IOException {      indexStream.flush();      fieldsStream.flush();    }    final void close() throws IOException {      if (doClose) {        try {          if (fieldsStream != null) {            try {              fieldsStream.close();            } finally {              fieldsStream = null;            }          }        } catch (IOException ioe) {          try {            if (indexStream != null) {              try {                indexStream.close();              } finally {                indexStream = null;              }            }          } catch (IOException ioe2) {            // Ignore so we throw only first IOException hit          }          throw ioe;        } finally {          if (indexStream != null) {            try {              indexStream.close();            } finally {              indexStream = null;            }          }        }      }    }    final void writeField(FieldInfo fi, Fieldable field) throws IOException {      // if the field as an instanceof FieldsReader.FieldForMerge, we're in merge mode      // and field.binaryValue() already returns the compressed value for a field      // with isCompressed()==true, so we disable compression in that case      boolean disableCompression = (field instanceof FieldsReader.FieldForMerge);      fieldsStream.writeVInt(fi.number);      byte bits = 0;      if (field.isTokenized())        bits |= FieldsWriter.FIELD_IS_TOKENIZED;      if (field.isBinary())        bits |= FieldsWriter.FIELD_IS_BINARY;      if (field.isCompressed())        bits |= FieldsWriter.FIELD_IS_COMPRESSED;                      fieldsStream.writeByte(bits);                      if (field.isCompressed()) {        // compression is enabled for the current field        final byte[] data;        final int len;        final int offset;        if (disableCompression) {          // optimized case for merging, the data          // is already compressed          data = field.getBinaryValue();          assert data != null;          len = field.getBinaryLength();          offset = field.getBinaryOffset();          } else {          // check if it is a binary field          if (field.isBinary()) {            data = compress(field.getBinaryValue(), field.getBinaryOffset(), field.getBinaryLength());          } else {            byte x[] = field.stringValue().getBytes("UTF-8");            data = compress(x, 0, x.length);          }          len = data.length;          offset = 0;        }                fieldsStream.writeVInt(len);        fieldsStream.writeBytes(data, offset, len);      }      else {        // compression is disabled for the current field        if (field.isBinary()) {          final byte[] data;          final int len;          final int offset;          data = field.getBinaryValue();          len = field.getBinaryLength();          offset =  field.getBinaryOffset();          fieldsStream.writeVInt(len);          fieldsStream.writeBytes(data, offset, len);        }        else {          fieldsStream.writeString(field.stringValue());        }      }    }    /** Bulk write a contiguous series of documents.  The     *  lengths array is the length (in bytes) of each raw     *  document.  The stream IndexInput is the     *  fieldsStream from which we should bulk-copy all     *  bytes. */    final void addRawDocuments(IndexInput stream, int[] lengths, int numDocs) throws IOException {      long position = fieldsStream.getFilePointer();      long start = position;      for(int i=0;i<numDocs;i++) {        indexStream.writeLong(position);        position += lengths[i];      }      fieldsStream.copyBytes(stream, position-start);      assert fieldsStream.getFilePointer() == position;    }    final void addDocument(Document doc) throws IOException {        indexStream.writeLong(fieldsStream.getFilePointer());        int storedCount = 0;        Iterator fieldIterator = doc.getFields().iterator();        while (fieldIterator.hasNext()) {            Fieldable field = (Fieldable) fieldIterator.next();            if (field.isStored())                storedCount++;        }        fieldsStream.writeVInt(storedCount);        fieldIterator = doc.getFields().iterator();        while (fieldIterator.hasNext()) {            Fieldable field = (Fieldable) fieldIterator.next();            if (field.isStored())              writeField(fieldInfos.fieldInfo(field.name()), field);        }    }    private final byte[] compress (byte[] input, int offset, int length) {      // Create the compressor with highest level of compression      Deflater compressor = new Deflater();      compressor.setLevel(Deflater.BEST_COMPRESSION);      // Give the compressor the data to compress      compressor.setInput(input, offset, length);      compressor.finish();      /*       * Create an expandable byte array to hold the compressed data.       * You cannot use an array that's the same size as the orginal because       * there is no guarantee that the compressed data will be smaller than       * the uncompressed data.       */      ByteArrayOutputStream bos = new ByteArrayOutputStream(length);      try {        compressor.setLevel(Deflater.BEST_COMPRESSION);        // Give the compressor the data to compress        compressor.setInput(input);        compressor.finish();        // Compress the data        byte[] buf = new byte[1024];        while (!compressor.finished()) {          int count = compressor.deflate(buf);          bos.write(buf, 0, count);        }      } finally {              compressor.end();      }      // Get the compressed data      return bos.toByteArray();    }}

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
91精品国产手机| 欧美视频自拍偷拍| 欧美午夜精品久久久久久孕妇| 欧美一区二区福利视频| 中文字幕日本不卡| 蓝色福利精品导航| 欧美天堂一区二区三区| 色中色一区二区| 国产日韩欧美精品在线| 美女精品一区二区| 欧美日韩国产系列| 亚洲欧美视频在线观看视频| 国产精品夜夜嗨| 日韩女优毛片在线| 日韩精品一级二级| 欧美日韩一区久久| 一区二区视频在线| 成人黄色软件下载| 欧美激情一区二区三区在线| 国产资源在线一区| 日韩成人一区二区三区在线观看| 91激情在线视频| 日本一区二区三区四区| 国产成人精品免费网站| 精品少妇一区二区三区免费观看| 一区二区国产视频| 色婷婷综合激情| 亚洲第一精品在线| 精品一区二区三区在线播放| 欧美欧美欧美欧美| 日韩不卡手机在线v区| 欧美亚洲免费在线一区| 一区二区三国产精华液| 日本大香伊一区二区三区| 亚洲女厕所小便bbb| 99精品久久只有精品| 亚洲图片欧美激情| 在线观看一区日韩| 日韩精品视频网站| 精品国产一区二区精华| 国产福利电影一区二区三区| 中文在线资源观看网站视频免费不卡| 精品国产区一区| 国产成都精品91一区二区三| 国产精品福利在线播放| 欧美日韩精品一区二区在线播放| 91麻豆精品91久久久久同性| 免费三级欧美电影| 国产清纯白嫩初高生在线观看91| 欧美久久久久中文字幕| 欧美激情自拍偷拍| 在线观看国产91| 日本aⅴ亚洲精品中文乱码| 精品国产91洋老外米糕| 99精品视频中文字幕| 亚洲第一主播视频| 欧美精品少妇一区二区三区| 免费精品视频在线| 国产aⅴ综合色| 国产欧美日韩另类视频免费观看| 9色porny自拍视频一区二区| 亚洲成人综合在线| 日韩免费在线观看| 久久精品一区蜜桃臀影院| 91在线无精精品入口| 免费在线观看视频一区| 国产亚洲一区二区三区四区| 欧美午夜精品一区二区蜜桃| 久久不见久久见免费视频1| 国产精品久久久久久久久免费桃花 | www国产精品av| 福利视频网站一区二区三区| 亚洲一卡二卡三卡四卡无卡久久| 精品日韩在线观看| 日韩av中文在线观看| 欧美日韩国产中文| 欧美一区中文字幕| 成人高清免费观看| 蜜臀av性久久久久蜜臀av麻豆 | 色综合久久久久网| 日本午夜精品一区二区三区电影| 国产精品久久久久久久久免费樱桃 | 91免费视频网址| 美女www一区二区| 亚洲国产日韩一级| 亚洲欧洲日产国产综合网| 精品欧美一区二区在线观看| 91国偷自产一区二区开放时间| 国产精品一区免费视频| 成人午夜私人影院| 亚洲国产精品一区二区www | 日本大香伊一区二区三区| 免费成人结看片| 亚洲高清不卡在线观看| 日本中文字幕一区二区视频| 欧美日韩在线一区二区| 国产69精品一区二区亚洲孕妇| 日韩黄色在线观看| 一区二区成人在线| 亚洲欧美在线aaa| 中文字幕av一区 二区| 一区二区三区色| 国产拍揄自揄精品视频麻豆| 制服视频三区第一页精品| 色婷婷国产精品久久包臀| 国产91精品免费| 国内久久婷婷综合| 国内精品不卡在线| 欧美精品丝袜久久久中文字幕| www.欧美.com| 91亚洲精华国产精华精华液| 国产精品夜夜嗨| 成人午夜大片免费观看| 成人动漫中文字幕| 91丨九色丨黑人外教| 色综合久久综合网欧美综合网| 日韩欧美一区二区三区在线| 另类小说综合欧美亚洲| 男女激情视频一区| 精品一区二区三区免费视频| 韩国欧美国产1区| 国产成人在线视频网址| 高潮精品一区videoshd| 成人免费毛片嘿嘿连载视频| 成人ar影院免费观看视频| eeuss鲁片一区二区三区| 色综合天天综合狠狠| 精品视频一区二区不卡| 这里只有精品99re| 精品美女在线观看| 欧美激情在线免费观看| 亚洲日韩欧美一区二区在线| 久久久午夜精品理论片中文字幕| 精品国产伦理网| 日韩综合小视频| 麻豆免费看一区二区三区| 黑人巨大精品欧美黑白配亚洲| 国产一区二区看久久| 成人激情免费电影网址| 欧美日韩激情在线| 久久免费国产精品| 亚洲女同一区二区| 韩国av一区二区三区四区| 午夜日韩在线电影| 国产乱码精品一区二区三 | 国产91精品在线观看| 不卡的av电影在线观看| 欧美美女视频在线观看| 精品久久久久久久久久久久久久久久久| 97久久精品人人爽人人爽蜜臀| 波多野结衣的一区二区三区| 欧美精品tushy高清| 国产精品视频一区二区三区不卡| 亚洲精品日日夜夜| 免费一级欧美片在线观看| 日日摸夜夜添夜夜添精品视频| www久久精品| 成人免费一区二区三区在线观看| 亚洲妇女屁股眼交7| 国产精品影视网| 欧美女孩性生活视频| 国产精品你懂的| 久久精品国产在热久久| 色香蕉成人二区免费| 久久精品男人天堂av| 亚洲影院在线观看| 免费在线观看不卡| 在线日韩一区二区| 欧美剧情电影在线观看完整版免费励志电影 | 三级不卡在线观看| 成人在线综合网| 欧美电视剧免费全集观看| 中文字幕一区二区三区在线观看 | 成人精品亚洲人成在线| 欧美日本精品一区二区三区| 中文字幕一区二区三| 国产精一区二区三区| 91精品国产入口| 欧美日韩一本到| 中文字幕在线一区二区三区| 蜜桃视频一区二区三区| 欧美日韩国产精品成人| 亚洲丝袜自拍清纯另类| 成av人片一区二区| 久久麻豆一区二区| 国产一区二区在线视频| 欧美大度的电影原声| 视频一区视频二区中文| 26uuu国产电影一区二区| 国产精品成人一区二区三区夜夜夜 | 亚洲日本在线视频观看| 国产激情视频一区二区三区欧美| 精品乱人伦小说| 久久国产综合精品| 欧美二区在线观看| 午夜久久久久久久久久一区二区| 91久久精品一区二区| 一区二区三区四区亚洲| 色呦呦网站一区| 一个色在线综合|