亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? postaggerme.java

?? 自然語言處理領域的一個開發包
?? JAVA
字號:
///////////////////////////////////////////////////////////////////////////////// Copyright (C) 2002 Jason Baldridge and Gann Bierner// // This library is free software; you can redistribute it and/or// modify it under the terms of the GNU Lesser General Public// License as published by the Free Software Foundation; either// version 2.1 of the License, or (at your option) any later version.// // This library is distributed in the hope that it will be useful,// but WITHOUT ANY WARRANTY; without even the implied warranty of// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the// GNU Lesser General Public License for more details.// // You should have received a copy of the GNU Lesser General Public// License along with this program; if not, write to the Free Software// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.//////////////////////////////////////////////////////////////////////////////package opennlp.tools.postag;import java.io.BufferedReader;import java.io.File;import java.io.FileInputStream;import java.io.IOException;import java.io.InputStreamReader;import java.io.Reader;import java.util.ArrayList;import java.util.Arrays;import java.util.Iterator;import java.util.List;import java.util.StringTokenizer;import opennlp.maxent.DataStream;import opennlp.maxent.Evalable;import opennlp.maxent.EventCollector;import opennlp.maxent.EventStream;import opennlp.maxent.GISModel;import opennlp.maxent.MaxentModel;import opennlp.maxent.PlainTextByLineDataStream;import opennlp.maxent.TwoPassDataIndexer;import opennlp.maxent.io.SuffixSensitiveGISModelWriter;import opennlp.tools.ngram.Dictionary;import opennlp.tools.ngram.MutableDictionary;import opennlp.tools.util.BeamSearch;import opennlp.tools.util.Pair;import opennlp.tools.util.Sequence;/** * A part-of-speech tagger that uses maximum entropy.  Trys to predict whether * words are nouns, verbs, or any of 70 other POS tags depending on their * surrounding context. * * @author      Gann Bierner * @version $Revision: 1.16 $, $Date: 2005/11/14 19:50:43 $ */public class POSTaggerME implements Evalable, POSTagger {  /**   * The maximum entropy model to use to evaluate contexts.   */  protected MaxentModel _posModel;  /**   * The feature context generator.   */  protected POSContextGenerator _contextGen;  /**   * Tag dictionary used for restricting words to a fixed set of tags.   */  protected TagDictionary tagDictionary;    protected Dictionary ngramDictionary;  /**   * Says whether a filter should be used to check whether a tag assignment   * is to a word outside of a closed class.   */  protected boolean _useClosedClassTagsFilter = false;    private static final int DEFAULT_BEAM_SIZE =3;  /** The size of the beam to be used in determining the best sequence of pos tags.*/  protected int size;  private Sequence bestSequence;    /** The search object used for search multiple sequences of tags. */  protected  BeamSearch beam;  public POSTaggerME(MaxentModel mod, Dictionary dict) {    this(mod, new DefaultPOSContextGenerator(dict));  }    public POSTaggerME(MaxentModel mod,Dictionary dict,TagDictionary tagdict) {      this(DEFAULT_BEAM_SIZE,mod, new DefaultPOSContextGenerator(dict),tagdict);    }    public POSTaggerME(MaxentModel mod, POSContextGenerator cg) {    this(DEFAULT_BEAM_SIZE,mod,cg,null);  }    public POSTaggerME(MaxentModel mod, POSContextGenerator cg, TagDictionary dict) {      this(DEFAULT_BEAM_SIZE,mod,cg,dict);    }  public POSTaggerME(int beamSize, MaxentModel mod, POSContextGenerator cg, TagDictionary tagdict) {    size = beamSize;    _posModel = mod;    _contextGen = cg;    beam = new PosBeamSearch(size, cg, mod);    tagDictionary = tagdict;  }  public String getNegativeOutcome() {    return "";  }    /**   * Returns the number of different tags predicted by this model.   * @return the number of different tags predicted by this model.   */  public int getNumTags() {    return _posModel.getNumOutcomes();  }  public EventCollector getEventCollector(Reader r) {    return new POSEventCollector(r, _contextGen);  }  public List tag(List sentence) {    bestSequence = beam.bestSequence(sentence,null);    return bestSequence.getOutcomes();  }  public String[] tag(String[] sentence) {    List t = tag(Arrays.asList(sentence));    return ((String[]) t.toArray(new String[t.size()]));  }  public void probs(double[] probs) {    bestSequence.getProbs(probs);  }  public double[] probs() {    return bestSequence.getProbs();  }  public String tag(String sentence) {    ArrayList toks = new ArrayList();    StringTokenizer st = new StringTokenizer(sentence);    while (st.hasMoreTokens())      toks.add(st.nextToken());    List tags = tag(toks);    StringBuffer sb = new StringBuffer();    for (int i = 0; i < tags.size(); i++)      sb.append(toks.get(i) + "/" + tags.get(i) + " ");    return sb.toString().trim();  }  public void localEval(MaxentModel posModel, Reader r, Evalable e, boolean verbose) {    _posModel = posModel;    float total = 0, correct = 0, sentences = 0, sentsCorrect = 0;    BufferedReader br = new BufferedReader(r);    String line;    try {      while ((line = br.readLine()) != null) {        sentences++;        Pair p = POSEventCollector.convertAnnotatedString(line);        List words = (List) p.a;        List outcomes = (List) p.b;        List tags = beam.bestSequence(words, null).getOutcomes();        int c = 0;        boolean sentOk = true;        for (Iterator t = tags.iterator(); t.hasNext(); c++) {          total++;          String tag = (String) t.next();          if (tag.equals(outcomes.get(c)))            correct++;          else            sentOk = false;        }        if (sentOk)          sentsCorrect++;      }    }    catch (IOException E) {      E.printStackTrace();    }    System.out.println("Accuracy         : " + correct / total);    System.out.println("Sentence Accuracy: " + sentsCorrect / sentences);  }  private class PosBeamSearch extends BeamSearch {    public PosBeamSearch(int size, POSContextGenerator cg, MaxentModel model) {      super(size, cg, model);    }        public PosBeamSearch(int size, POSContextGenerator cg, MaxentModel model, int cacheSize) {      super(size, cg, model, cacheSize);    }        protected boolean validSequence(int i, Object[] inputSequence, String[] outcomesSequence, String outcome) {      if (tagDictionary == null) {        return true;      }      else {        String[] tags = tagDictionary.getTags(inputSequence[i].toString());        if (tags == null) {          return true;        }        else {          return Arrays.asList(tags).contains(outcome);        }      }    }        protected boolean validSequence(int i, List inputSequence, Sequence outcomesSequence, String outcome) {      if (tagDictionary == null) {        return true;      }      else {        String[] tags = tagDictionary.getTags(inputSequence.get(i).toString());        if (tags == null) {          return true;        }        else {          return Arrays.asList(tags).contains(outcome);        }      }    }  }    public String[] getOrderedTags(List words, List tags, int index) {    return getOrderedTags(words,tags,index,null);  }    public String[] getOrderedTags(List words, List tags, int index,double[] tprobs) {    double[] probs = _posModel.eval(_contextGen.getContext(index,words.toArray(),(String[]) tags.toArray(new String[tags.size()]),null));    String[] orderedTags = new String[probs.length];    for (int i = 0; i < probs.length; i++) {      int max = 0;      for (int ti = 1; ti < probs.length; ti++) {        if (probs[ti] > probs[max]) {          max = ti;        }      }      orderedTags[i] = _posModel.getOutcome(max);      if (tprobs != null){        tprobs[i]=probs[max];      }      probs[max] = 0;    }    return (orderedTags);  }  public static GISModel train(EventStream es, int iterations, int cut) throws IOException {    return opennlp.maxent.GIS.trainModel(iterations, new TwoPassDataIndexer(es, cut));  }    private static void usage() {    System.err.println("Usage: POSTaggerME [-encoding encoding] [-dict dict_file] training model [cutoff] [iterations]");    System.err.println("This trains a new model on the specified training file and writes the trained model to the model file.");    System.err.println("-encoding Specifies the encoding of the training file");    System.err.println("-dict Specifies that a dictionary file should be created for use in distinguising between rare and non-rare words");    System.exit(1);  }  /**     * <p>Trains a new pos model.</p>     *     * <p>Usage: java opennlp.postag.POStaggerME [-encoding charset] [-d dict_file] data_file  new_model_name (iterations cutoff)?</p>     *     */  public static void main(String[] args) throws IOException {    if (args.length == 0){      usage();    }    int ai=0;    try {      String encoding = null;      String dict = null;      while (args[ai].startsWith("-")) {        if (args[ai].equals("-encoding")) {          ai++;          if (ai < args.length) {            encoding = args[ai++];          }          else {            usage();          }        }        else if (args[ai].equals("-dict")) {          ai++;          if (ai < args.length) {            dict = args[ai++];          }          else {            usage();          }        }        else {          System.err.println("Unknown option "+args[ai]);          usage();        }      }      File inFile = new File(args[ai++]);      File outFile = new File(args[ai++]);      int cutoff = 5;      int iterations = 100;      if (args.length > ai) {        cutoff = Integer.parseInt(args[ai++]);        iterations = Integer.parseInt(args[ai++]);      }      GISModel mod;      if (dict != null) {        System.err.println("Building dictionary");        MutableDictionary mdict = new MutableDictionary(cutoff);        DataStream data = new opennlp.maxent.PlainTextByLineDataStream(new java.io.FileReader(inFile));        while(data.hasNext()) {          String tagStr = (String) data.nextToken();          String[] tt = tagStr.split(" ");          String[] words = new String[tt.length];          for (int wi=0;wi<words.length;wi++) {            words[wi] = tt[wi].substring(0,tt[wi].lastIndexOf('_'));          }          mdict.add(words,1,true);        }        System.out.println("Saving the dictionary");        mdict.persist(new File(dict));      }      EventStream es;      if (encoding == null) {        if (dict == null) {          es = new POSEventStream(new PlainTextByLineDataStream(new InputStreamReader(new FileInputStream(inFile))));        }        else {          es = new POSEventStream(new PlainTextByLineDataStream(new InputStreamReader(new FileInputStream(inFile))), new Dictionary(dict));        }      }      else {        if (dict == null) {          es = new POSEventStream(new PlainTextByLineDataStream(new InputStreamReader(new FileInputStream(inFile),encoding)));        }        else {          es = new POSEventStream(new PlainTextByLineDataStream(new InputStreamReader(new FileInputStream(inFile),encoding)), new Dictionary(dict));        }      }      mod = train(es, iterations, cutoff);      System.out.println("Saving the model as: " + outFile);      new SuffixSensitiveGISModelWriter(mod, outFile).persist();    }    catch (Exception e) {      e.printStackTrace();    }  }}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
久久你懂得1024| av亚洲精华国产精华| 亚洲综合丝袜美腿| 中文文精品字幕一区二区| 久久久久久免费网| 国产日韩欧美一区二区三区综合| 2021国产精品久久精品| 国产午夜精品在线观看| 国产精品久久久久影视| 1000部国产精品成人观看| 亚洲激情综合网| 三级久久三级久久久| 免费一级欧美片在线观看| 精品一区二区三区久久久| 国产成人在线影院| 国产高清亚洲一区| 色8久久精品久久久久久蜜| 精品污污网站免费看| 日韩欧美成人一区二区| 国产日本欧美一区二区| 一区二区三区在线视频免费观看| 一区二区三区四区中文字幕| 日韩成人精品在线| 精品在线亚洲视频| 成人app软件下载大全免费| 欧洲精品视频在线观看| 日韩精品中文字幕在线一区| 久久精品夜色噜噜亚洲aⅴ| 国产精品久久看| 免费视频最近日韩| 亚洲夂夂婷婷色拍ww47| 精品一二三四在线| 91久久线看在观草草青青| 欧美一级xxx| 日韩伦理免费电影| 免费高清在线一区| 91在线观看下载| 久久亚洲一级片| 亚洲一区二区成人在线观看| 国产一区福利在线| 欧美日韩精品一区二区三区 | 成人国产精品免费网站| 欧美午夜精品一区二区蜜桃| 久久亚洲精品国产精品紫薇| 亚洲一区二区三区在线播放| 国产精品2024| 日韩精品一区二区在线| 一区二区三区免费在线观看| 国产剧情av麻豆香蕉精品| 欧美猛男gaygay网站| 中文字幕中文在线不卡住| 久久99精品久久久久久久久久久久| 在线视频一区二区三区| 国产天堂亚洲国产碰碰| 久久综合综合久久综合| 欧美猛男男办公室激情| 亚洲欧美激情插| av亚洲精华国产精华精华| 国产欧美日韩视频在线观看| 麻豆精品一区二区三区| 欧美妇女性影城| 亚洲成a人片在线不卡一二三区| av在线这里只有精品| 久久男人中文字幕资源站| 美国一区二区三区在线播放| 欧美日韩极品在线观看一区| 一区二区三区不卡在线观看| 91丨九色丨黑人外教| 国产精品久久久久天堂| 成人性生交大片免费看视频在线| 久久久精品免费观看| 国产永久精品大片wwwapp | 成人毛片视频在线观看| 久久久美女艺术照精彩视频福利播放| 秋霞午夜av一区二区三区| 精品视频一区三区九区| 午夜精品久久久久久久| youjizz国产精品| 亚洲三级在线看| 在线观看免费视频综合| 夜夜嗨av一区二区三区四季av| 91蜜桃网址入口| 五月天一区二区三区| 在线成人高清不卡| 精品一区二区在线免费观看| 久久免费国产精品| 成人少妇影院yyyy| 尤物在线观看一区| 欧美日韩一级视频| 久久99热这里只有精品| 久久久久久久国产精品影院| 成人污视频在线观看| 亚洲精品国久久99热| 6080午夜不卡| 国产精品综合二区| 亚洲人成伊人成综合网小说| 在线观看亚洲a| 久草在线在线精品观看| 国产精品网曝门| 欧美日韩国产经典色站一区二区三区| 美女一区二区视频| 日本一区二区三区国色天香| 91成人网在线| 国产伦精品一区二区三区免费| 中文字幕在线观看不卡| 777午夜精品免费视频| 福利91精品一区二区三区| 亚洲一区二区三区四区不卡| 日韩美女一区二区三区| 91在线精品秘密一区二区| 亚洲国产另类精品专区| 久久久久久久久久久久久久久99 | 精品视频在线免费看| 国产激情视频一区二区三区欧美 | 亚洲色图欧美激情| 精品国产免费人成在线观看| 99久久99久久久精品齐齐| 日本免费在线视频不卡一不卡二| 国产人妖乱国产精品人妖| 欧美一区二区免费| 色偷偷成人一区二区三区91| 国内精品免费**视频| 亚洲国产欧美日韩另类综合| 久久久影视传媒| 欧美丰满少妇xxxbbb| 亚洲男同1069视频| 精品国产一区二区三区忘忧草 | 97精品久久久午夜一区二区三区| 亚洲电影欧美电影有声小说| 久久精品水蜜桃av综合天堂| 欧美一区二区三区白人| 成人精品免费看| 国产在线播放一区三区四| 日韩成人精品在线观看| 一区二区三区色| 亚洲免费色视频| 精品人在线二区三区| 欧美精品在欧美一区二区少妇 | 日韩码欧中文字| 国产人妖乱国产精品人妖| 久久综合九色综合97婷婷女人 | 欧美国产精品劲爆| 久久综合国产精品| 欧美乱妇20p| 欧美伊人久久大香线蕉综合69| 99re66热这里只有精品3直播| 成人动漫一区二区| 国产福利一区在线| 国产成a人亚洲精| 精品一区二区三区香蕉蜜桃 | 欧美视频一区在线观看| 91在线国内视频| 日本精品裸体写真集在线观看| 欧美系列亚洲系列| 色噜噜狠狠成人网p站| 91精品办公室少妇高潮对白| 日本韩国视频一区二区| 在线免费不卡电影| 欧美精品在欧美一区二区少妇| 91麻豆精品国产自产在线观看一区 | 国产剧情在线观看一区二区| 国产一区二区三区综合| 国产剧情一区在线| 99久久99久久综合| 欧美在线三级电影| 91麻豆精品国产| 精品国产乱码久久久久久夜甘婷婷| 精品久久久久久亚洲综合网 | 依依成人精品视频| 亚洲二区视频在线| 日韩高清一级片| 久久97超碰国产精品超碰| 国产一区二区不卡老阿姨| av影院午夜一区| 欧美日韩一区二区欧美激情| 欧美一区二区三区白人| 国产欧美一区二区精品性色| 日韩一本二本av| 国产日韩精品一区| 亚洲色图欧洲色图婷婷| 日韩国产成人精品| 成人午夜视频免费看| 亚洲免费资源在线播放| 国产综合色在线视频区| a在线欧美一区| 欧美日韩一区三区四区| 精品99一区二区| 亚洲欧洲另类国产综合| 亚洲成人先锋电影| 国产成人精品亚洲日本在线桃色| 色婷婷国产精品综合在线观看| 欧美在线一二三四区| 日韩欧美成人一区二区| 亚洲综合激情另类小说区| 日韩av网站在线观看| www.视频一区| 亚洲精品一区二区三区99| 亚洲福利视频一区| 成人av网站免费| 精品国产精品网麻豆系列|