亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? searchfiles.java

?? lucene實現全文檢索的實際小例子,可以實現對文本文件的檢索,和對內容的查詢.! lucene實現全文檢索的實際小例子,可以實現對文本文件的檢索,和對內容的查詢.!
?? JAVA
字號:
package org.apache.lucene.demo;/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements.  See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */import java.io.BufferedReader;import java.io.FileReader;import java.io.IOException;import java.io.InputStreamReader;import java.util.Date;import org.apache.lucene.analysis.Analyzer;import org.apache.lucene.analysis.standard.StandardAnalyzer;import org.apache.lucene.document.Document;import org.apache.lucene.index.FilterIndexReader;import org.apache.lucene.index.IndexReader;import org.apache.lucene.queryParser.QueryParser;import org.apache.lucene.search.HitCollector;import org.apache.lucene.search.IndexSearcher;import org.apache.lucene.search.Query;import org.apache.lucene.search.ScoreDoc;import org.apache.lucene.search.Searcher;import org.apache.lucene.search.TopDocCollector;/** Simple command-line based search demo. */public class SearchFiles {  /** Use the norms from one field for all fields.  Norms are read into memory,   * using a byte of memory per document per searched field.  This can cause   * search of large collections with a large number of fields to run out of   * memory.  If all of the fields contain only a single token, then the norms   * are all identical, then single norm vector may be shared. */  private static class OneNormsReader extends FilterIndexReader {    private String field;    public OneNormsReader(IndexReader in, String field) {      super(in);      this.field = field;    }    public byte[] norms(String field) throws IOException {      return in.norms(this.field);    }  }  private SearchFiles() {}  /** Simple command-line based search demo. */  public static void main(String[] args) throws Exception {    String usage =      "Usage:\tjava org.apache.lucene.demo.SearchFiles [-index dir] [-field f] [-repeat n] [-queries file] [-raw] [-norms field] [-paging hitsPerPage]";    usage += "\n\tSpecify 'false' for hitsPerPage to use streaming instead of paging search.";    if (args.length > 0 && ("-h".equals(args[0]) || "-help".equals(args[0]))) {      System.out.println(usage);      System.exit(0);    }    String index = "index";    String field = "contents";    String queries = null;    int repeat = 0;    boolean raw = false;    String normsField = null;    boolean paging = true;    int hitsPerPage = 10;        for (int i = 0; i < args.length; i++) {      if ("-index".equals(args[i])) {        index = args[i+1];        i++;      } else if ("-field".equals(args[i])) {        field = args[i+1];        i++;      } else if ("-queries".equals(args[i])) {        queries = args[i+1];        i++;      } else if ("-repeat".equals(args[i])) {        repeat = Integer.parseInt(args[i+1]);        i++;      } else if ("-raw".equals(args[i])) {        raw = true;      } else if ("-norms".equals(args[i])) {        normsField = args[i+1];        i++;      } else if ("-paging".equals(args[i])) {        if (args[i+1].equals("false")) {          paging = false;        } else {          hitsPerPage = Integer.parseInt(args[i+1]);          if (hitsPerPage == 0) {            paging = false;          }        }        i++;      }    }        IndexReader reader = IndexReader.open(index);    if (normsField != null)      reader = new OneNormsReader(reader, normsField);    Searcher searcher = new IndexSearcher(reader);    Analyzer analyzer = new StandardAnalyzer();    BufferedReader in = null;    if (queries != null) {      in = new BufferedReader(new FileReader(queries));    } else {      in = new BufferedReader(new InputStreamReader(System.in, "UTF-8"));    }      QueryParser parser = new QueryParser(field, analyzer);    while (true) {      if (queries == null)                        // prompt the user        System.out.println("Enter query: ");      String line = in.readLine();      if (line == null || line.length() == -1)        break;      line = line.trim();      if (line.length() == 0)        break;            Query query = parser.parse(line);      System.out.println("Searching for: " + query.toString(field));                  if (repeat > 0) {                           // repeat & time as benchmark        Date start = new Date();        for (int i = 0; i < repeat; i++) {          searcher.search(query, null, 100);        }        Date end = new Date();        System.out.println("Time: "+(end.getTime()-start.getTime())+"ms");      }      if (paging) {        doPagingSearch(in, searcher, query, hitsPerPage, raw, queries == null);      } else {        doStreamingSearch(searcher, query);      }    }    reader.close();  }    /**   * This method uses a custom HitCollector implementation which simply prints out   * the docId and score of every matching document.    *    *  This simulates the streaming search use case, where all hits are supposed to   *  be processed, regardless of their relevance.   */  public static void doStreamingSearch(final Searcher searcher, Query query) throws IOException {    HitCollector streamingHitCollector = new HitCollector() {            // simply print docId and score of every matching document      public void collect(int doc, float score) {        System.out.println("doc="+doc+" score="+score);      }          };        searcher.search(query, streamingHitCollector);  }  /**   * This demonstrates a typical paging search scenario, where the search engine presents    * pages of size n to the user. The user can then go to the next page if interested in   * the next hits.   *    * When the query is executed for the first time, then only enough results are collected   * to fill 5 result pages. If the user wants to page beyond this limit, then the query   * is executed another time and all hits are collected.   *    */  public static void doPagingSearch(BufferedReader in, Searcher searcher, Query query,                                      int hitsPerPage, boolean raw, boolean interactive) throws IOException {     // Collect enough docs to show 5 pages    TopDocCollector collector = new TopDocCollector(5 * hitsPerPage);    searcher.search(query, collector);    ScoreDoc[] hits = collector.topDocs().scoreDocs;        int numTotalHits = collector.getTotalHits();    System.out.println(numTotalHits + " total matching documents");    int start = 0;    int end = Math.min(numTotalHits, hitsPerPage);            while (true) {      if (end > hits.length) {        System.out.println("Only results 1 - " + hits.length +" of " + numTotalHits + " total matching documents collected.");        System.out.println("Collect more (y/n) ?");        String line = in.readLine();        if (line.length() == 0 || line.charAt(0) == 'n') {          break;        }        collector = new TopDocCollector(numTotalHits);        searcher.search(query, collector);        hits = collector.topDocs().scoreDocs;      }            end = Math.min(hits.length, start + hitsPerPage);            for (int i = start; i < end; i++) {        if (raw) {                              // output raw format          System.out.println("doc="+hits[i].doc+" score="+hits[i].score);          continue;        }        Document doc = searcher.doc(hits[i].doc);        String path = doc.get("path");        if (path != null) {          System.out.println((i+1) + ". " + path);          String title = doc.get("title");          if (title != null) {            System.out.println("   Title: " + doc.get("title"));          }        } else {          System.out.println((i+1) + ". " + "No path for this document");        }                        }      if (!interactive) {        break;      }      if (numTotalHits >= end) {        boolean quit = false;        while (true) {          System.out.print("Press ");          if (start - hitsPerPage >= 0) {            System.out.print("(p)revious page, ");            }          if (start + hitsPerPage < numTotalHits) {            System.out.print("(n)ext page, ");          }          System.out.println("(q)uit or enter number to jump to a page.");                    String line = in.readLine();          if (line.length() == 0 || line.charAt(0)=='q') {            quit = true;            break;          }          if (line.charAt(0) == 'p') {            start = Math.max(0, start - hitsPerPage);            break;          } else if (line.charAt(0) == 'n') {            if (start + hitsPerPage < numTotalHits) {              start+=hitsPerPage;            }            break;          } else {            int page = Integer.parseInt(line);            if ((page - 1) * hitsPerPage < numTotalHits) {              start = (page - 1) * hitsPerPage;              break;            } else {              System.out.println("No such page");            }          }        }        if (quit) break;        end = Math.min(numTotalHits, start + hitsPerPage);      }          }  }}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲午夜视频在线观看| 国产喷白浆一区二区三区| 亚洲天天做日日做天天谢日日欢 | 一区二区三区在线影院| 日韩免费高清电影| 久久99久久久久久久久久久| 欧美一区二区在线观看| 视频一区二区国产| 欧美一区二区三级| 精品一区二区三区视频 | 久久99国产精品成人| 欧美成人综合网站| 国产经典欧美精品| 国产精品你懂的在线欣赏| 97久久精品人人爽人人爽蜜臀| 亚洲色图一区二区| 欧美日本乱大交xxxxx| 免费人成在线不卡| 中文字幕不卡一区| 91久久精品日日躁夜夜躁欧美| 亚洲午夜免费电影| 欧美zozo另类异族| av一区二区三区| 亚洲国产日韩综合久久精品| 欧美一二三区在线观看| 福利一区福利二区| 亚洲一级二级三级| 久久综合五月天婷婷伊人| 国产99久久久精品| 亚洲成人激情av| 久久久久久久久久久久久久久99| 91色在线porny| 日本不卡1234视频| 国产精品成人在线观看| 欧美剧情片在线观看| 欧美二区三区91| 不卡视频在线观看| 免费观看在线综合| 亚洲欧洲av一区二区三区久久| 欧美日韩一二三区| 成人夜色视频网站在线观看| 午夜视频一区二区三区| 中文字幕欧美国产| 在线播放/欧美激情| zzijzzij亚洲日本少妇熟睡| 日韩极品在线观看| 亚洲欧美国产三级| 久久久.com| 欧美一卡在线观看| 日本韩国欧美在线| 国产福利视频一区二区三区| 日韩高清在线电影| 亚洲精品中文字幕在线观看| 久久午夜电影网| 91精品国产综合久久精品麻豆 | 精品在线视频一区| 亚洲超碰97人人做人人爱| **欧美大码日韩| 国产网站一区二区| 精品国产乱码久久久久久老虎| 欧美日韩在线三区| 色哟哟精品一区| 成人黄色小视频| 国产激情一区二区三区| 麻豆精品视频在线观看视频| 香蕉影视欧美成人| av午夜一区麻豆| 国产成人一区在线| 国产麻豆视频一区| 韩国三级电影一区二区| 蜜乳av一区二区三区| 亚洲一区二区高清| 香蕉久久夜色精品国产使用方法| 1024成人网色www| 国产精品人成在线观看免费| 国产亚洲一区二区三区| 欧美精品一区二区三区蜜臀| 日韩亚洲欧美成人一区| 日韩三级伦理片妻子的秘密按摩| 欧美日韩午夜精品| 欧美男同性恋视频网站| 欧美欧美欧美欧美| 日韩一区二区三区免费观看| 91精品国产色综合久久| 日韩欧美高清在线| 亚洲精品在线观看视频| 久久免费国产精品| 国产精品午夜在线| 国产精品视频yy9299一区| 国产精品日韩成人| 亚洲视频免费在线观看| 亚洲男人的天堂av| 亚洲高清不卡在线观看| 日韩成人一区二区三区在线观看| 日韩激情一区二区| 精品一区二区三区影院在线午夜| 国产综合色产在线精品| 成人小视频在线| 99久久er热在这里只有精品15 | 色猫猫国产区一区二在线视频| 色哟哟国产精品免费观看| 91久久精品一区二区三区| 欧美撒尿777hd撒尿| 日韩欧美一区二区三区在线| 日韩欧美成人激情| 国产欧美1区2区3区| 亚洲欧美日韩在线| 日韩高清国产一区在线| 韩国成人在线视频| 91网站视频在线观看| 欧美日韩一区二区三区在线| 欧美大片免费久久精品三p| 久久久久99精品一区| 亚洲欧美一区二区三区久本道91| 亚洲va韩国va欧美va| 久久电影网站中文字幕| 波波电影院一区二区三区| 欧美日韩一区二区在线观看视频 | 波波电影院一区二区三区| 欧美日韩国产高清一区二区三区 | 久久久久久久综合日本| 一区二区三区日本| 精品一区二区三区免费视频| 91亚洲男人天堂| 日韩欧美色综合| 亚洲精品国久久99热| 久久国产精品72免费观看| 色综合天天在线| 精品美女被调教视频大全网站| 激情综合色播五月| 91精品91久久久中77777| 日韩欧美一级精品久久| 亚洲日本在线观看| 国产麻豆视频精品| 欧美人牲a欧美精品| 国产精品色哟哟网站| 久久精品国产99久久6| 色偷偷88欧美精品久久久| 久久亚洲精华国产精华液| 亚洲一级电影视频| 9久草视频在线视频精品| 精品久久人人做人人爱| 偷拍与自拍一区| 色综合久久久久综合体桃花网| 日韩精品最新网址| 视频一区在线播放| 色悠悠久久综合| 国产欧美日韩麻豆91| 蜜臀av亚洲一区中文字幕| 欧美午夜精品免费| 国产精品久久久久久久午夜片| 麻豆精品一二三| 欧美一区二区免费观在线| 亚洲毛片av在线| 99在线视频精品| 国产日韩欧美a| 黑人巨大精品欧美黑白配亚洲| 欧美一卡二卡在线| 天天色 色综合| 在线观看成人小视频| 亚洲少妇中出一区| 99久久777色| 中文字幕免费一区| 成人免费毛片嘿嘿连载视频| 久久这里只有精品视频网| 久久精品久久综合| 日韩免费高清视频| 久久99精品国产麻豆婷婷洗澡| 欧美高清性hdvideosex| 午夜不卡av免费| 欧美人xxxx| 欧洲激情一区二区| 一区二区三区国产精华| 色狠狠一区二区| 亚洲一区二区免费视频| 欧美三级视频在线| 亚洲国产成人高清精品| 欧美色图免费看| 午夜激情一区二区三区| 欧美日本免费一区二区三区| 午夜影院久久久| 日韩视频一区二区在线观看| 蜜臀91精品一区二区三区 | 蜜臀久久久久久久| 日韩精品一区二区三区蜜臀| 久久97超碰色| 国产女人18毛片水真多成人如厕 | 99re成人精品视频| 亚洲综合色丁香婷婷六月图片| 一本到一区二区三区| 午夜激情久久久| 欧美大片一区二区| 国产69精品久久久久777| 国产精品进线69影院| 91久久精品日日躁夜夜躁欧美| 亚洲自拍欧美精品| 精品少妇一区二区三区视频免付费 | 久久99精品国产.久久久久| 国产调教视频一区| 色悠久久久久综合欧美99|