?? tokeniser.cs
字號(hào):
/*
Tokenization
Author: Thanh Ngoc Dao - Thanh.dao@gmx.net
Copyright (c) 2005 by Thanh Ngoc Dao.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using WawaSoft.Search.Common;
namespace WawaSoft.Search.Common
{
/// <summary>
/// Summary description for Tokeniser.
/// Partition string into SUBwords
/// </summary>
internal class Tokeniser : ITokeniser
{
/// <summary>
/// 以空白字符進(jìn)行簡(jiǎn)單分詞,并忽略大小寫,
/// 實(shí)際情況中可以用其它中文分詞算法
/// </summary>
/// <param name="input"></param>
/// <returns></returns>
public IList<string> Partition(string input)
{
Regex r=new Regex("([ \\t{}():;. \n])");
input=input.ToLower() ;
String [] tokens=r.Split(input);
List<string> filter=new List<string>() ;
for (int i=0; i < tokens.Length ; i++)
{
MatchCollection mc=r.Matches(tokens[i]);
if (mc.Count <= 0 && tokens[i].Trim().Length > 0
&& !StopWordsHandler.IsStopword (tokens[i]) )
filter.Add(tokens[i]) ;
}
return filter.ToArray();
}
public Tokeniser()
{
}
}
}
?? 快捷鍵說明
復(fù)制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號(hào)
Ctrl + =
減小字號(hào)
Ctrl + -