亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? similargenerator.cs

?? 是用c#編寫的
?? CS
字號:
/* Generate similar sentences of a given setence
 * Author : Dao Ngoc Thanh , thanh.dao@gmx.net 
 * (c) Dao Ngoc Thanh.
 * Methods Used: Simulated Annealing & BackTracking generator
 * 
 * Acknowledgements: To J. Martin for the reuse of his original function "FindSynonyms" .
 * 
 */

using System;
using System.Collections;
using Wnlib;
using WordsMatching;

namespace SimilarSentence
{
	/// <summary>
	/// This class inputs a sentence and produces a list of approximately similar sentences
    /// 
	/// </summary>
	public class SimilarGenerator
	{
		private string _originalSentence;
		const int CONTEXT_SIZE=6;//Local disambiguation within the context size 
		private string[][][][] _dictInfo ;//[words][relations][tokens]
		
		Tokeniser tokenize=new Tokeniser() ;

		ArrayList list=new ArrayList() ;
		MyWordInfo[] _myPos;
		int _numItems=0;
		int _numWord;
		private string bestSentence;
		int bestScore=0;
		private MyWordInfo[][] _alterWord;
		private int[][] dx;
		private int[] _selected;
		const int _max=50;

		private MyWordInfo[] _contextWords;		
		private Opt[] _relatedness=null;
		private int _overallScore=0;					
		private int[][][][] _scores;//[i][alter_i][j][alter_j]

		public SimilarGenerator(MyWordInfo[] pos, string originalSentence)
		{
			_myPos=pos;
			_originalSentence=originalSentence;
			MyInit();			
			Generate();
			list=k_best_sentence;
			
			//
			// TODO: Add constructor logic here
			//
		}
		

		public ArrayList GetResult
		{
			get
			{	
				list.Insert(0, bestSentence) ;
				return list;
			}
		}
		
		public MyWordInfo[] FindSynonyms(ref MyWordInfo pos, bool includeMorphs)
		{
			pos.Word = pos.Word.ToLower();
			Wnlib.Index index = Wnlib.Index.lookup( pos.Word, PartOfSpeech.of( pos.Pos  ) );
		
			if( index == null )
			{
				if( !includeMorphs )
					return null;

				Wnlib.MorphStr morphs = new Wnlib.MorphStr( pos.Word, Wnlib.PartOfSpeech.of( pos.Pos  ) );
				string morph = "";
				while( ( morph = morphs.next() ) != null )
				{
					index = Wnlib.Index.lookup( morph, Wnlib.PartOfSpeech.of( pos.Pos  ) );
					pos.Word=morph;
					if( index != null )
						break;
				}
			}

			
			if( index == null )
				return null;
			
			return LookupCandidates( index, pos );
		}

		public static int GetSynsetIndex(string word, PartsOfSpeech pos)
		{
			word=word.ToLower() ;
			//word=RemoveBadChars (word);
			Wnlib.Index index=Wnlib.Index.lookup( word, PartOfSpeech.of(pos) );
			
			if( index == null )
			{
				Wnlib.MorphStr morphs=new Wnlib.MorphStr(word, Wnlib.PartOfSpeech.of( pos  ) );
				string morph = "";
				while( ( morph = morphs.next() ) != null )
				{
					index = Wnlib.Index.lookup(morph, Wnlib.PartOfSpeech.of(pos) );
					if( index != null )
						break;
				}
			}			

			if (index == null) return -1;
			else 
				return 0;
		}

		class CompareLexeme : IComparer
		{
			public int Compare(object x, object y) //descent sorting
			{
				return ((MyWordInfo)y).Frequency - ((MyWordInfo)x).Frequency;
			}
		}

		private MyWordInfo[] LookupCandidates(Wnlib.Index index, MyWordInfo pos )
		{						
			if (pos.Sense < 0) pos.Sense=1;						
			SynSet synset=new Wnlib.SynSet( index.offs[pos.Sense - 1 ], index.pos , index.wd, null , pos.Sense - 1);					
						
			ArrayList lexemes=new ArrayList() ;
			ArrayList synIndex=new ArrayList() ;

			foreach (Lexeme obj in synset.words)
			{
				lexemes.Add(obj) ;
				synIndex.Add(index.offs[pos.Sense - 1 ]);
			}
			
			if (index.offs.Length > 1)
			{
				if (lexemes.Count <= 1)
				{
					for(int i=0; i < index.offs.Length; i++ )
					{				
						synset=new Wnlib.SynSet( index.offs[i], index.pos, index.wd, null, i );

						foreach (Lexeme obj in synset.words)
						{
							synIndex.Add(index.offs[i]);
							lexemes.Add(obj) ;
						}
					}
				}
				else
				{
					synset=new Wnlib.SynSet( index.offs[0], index.pos, index.wd, null, 0 );
					int count=0; //get top most frequency word senses
					foreach (Lexeme obj in synset.words)
					{
						lexemes.Add(obj) ;
						synIndex.Add(index.offs[0]);
						++count;
						if (count > 4) break;
					}

				}
			}
			
			ArrayList sortedSet=new ArrayList() ;
			Hashtable trace=new Hashtable() ;
			int hasSem=0;
			for (int i = 0; i < lexemes.Count; i++)
			{				
				Lexeme word=(Lexeme)lexemes[i];				
				word.word=word.word.ToLower() ;

				int senIndex=(int)synIndex[i];
				if (senIndex != -1  && word.wnsns > 0)
				{
					word.semcor=new Wnlib.SemCor(word, senIndex);
					lexemes[i]=word;					
					++hasSem;
				}

				if (!trace.ContainsKey(word.word) )					
				{					
					if ((word.semcor != null &&  word.semcor.semcor  > 0 ) || (hasSem < 4))
					{
						trace[word.word]=1;
						sortedSet.Add(word) ;
					}
				}
				//catch
				{}
			}
			
			Lexeme[] words=(Lexeme[])sortedSet.ToArray( typeof(Lexeme) );						

			ArrayList candidates=new ArrayList();

			for( int i=0; i < words.Length; i++ )
			{
				string word=words[i].word.Replace("_", " " );				
				if( word[0] <= 'Z' ) continue;

				MyWordInfo newpos=new MyWordInfo(word, pos.Pos) ;
				newpos.Sense=words[i].wnsns;
				if (words[i].semcor != null)
					newpos.Frequency=words[i].semcor.semcor;
				else
					newpos.Frequency=0;

				candidates.Add( newpos);								
			}

			if (!trace.ContainsKey (index.wd))
				candidates.Add(pos) ;

			if (candidates.Count > 1)
			{
				CompareLexeme comparer=new CompareLexeme();
				candidates.Sort(comparer);
			}
			

			return (MyWordInfo[])candidates.ToArray( typeof(MyWordInfo) );
		}

		
		private int GetNeighbour(MyWordInfo[] current, out MyWordInfo[] trial)
		{			
			trial=(MyWordInfo[])current.Clone() ;

			int wordIndex=random.Next(current.Length);
			if (_alterWord[wordIndex] != null )
			{
				int candIndex=random.Next(_alterWord[wordIndex].Length) ;
				
				_selected[wordIndex]=candIndex;
				_contextWords[wordIndex]=_alterWord[wordIndex][candIndex];
				
				if (!Read_WordSenseInfo (wordIndex))  return -1;

				trial[wordIndex]=_alterWord[wordIndex][candIndex];
				int overall=0;
				for (int i=0; i < trial.Length; i++)
				{
					overall += ComputeScore (i);
				}

				return overall;
			}

			return -1;
		}
		

		private int InitialSentence(out MyWordInfo[] current)
		{
			current=new MyWordInfo[_myPos.Length] ;
			int overall=0;
			for (int i=0; i < current.Length; i++)
				if (_alterWord[i] != null && _alterWord[i].Length > 0)
			{
				for (int j = 0; j < _alterWord[i].Length; j++)
				{
					_selected[i]=j;
					_contextWords[i]=_alterWord[i][j];	
				
					if (Read_WordSenseInfo (i))
					{
						int score=ComputeScore(i);				
						overall +=score;

						break;
					}

				}
			}

			return overall;
		}

		private bool Metropolis(float delta, float temperature)
		{
			double p=random.NextDouble() ;
			return (p <= Math.Exp( delta / temperature));
			//return (delta >= 0) && (p.NextDouble() <= Math.Exp(- delta / temperature));

		}

		private float GetCost(int num)
		{
			return (float)1.0f/(1 + num);
		}

		private Random random;
		private void IterativeGenerate(int trialNum, float descentFactor)
		{
			float temperature=1;
			float ANNEAL_SCHEDULE=0.9F;
			int MAX_TRIAL=100;
			int MAX_SUCCEED=10; // 1/10 of N_TRIAL

			MyWordInfo[] current;
			int current_cost=InitialSentence(out current);
			Add_Sentence(Convert.ToInt32(current_cost) );

			random=new Random() ;
			for (int i=0; i < 100; i++)
			{								
				int nsuccess=0;
				for (int j=0; j < MAX_TRIAL; j++)
				{
					MyWordInfo[] trial;
					int new_cost=GetNeighbour (current,out trial);

					if ( new_cost != -1)
					{						
						///float delta=current_cost - new_cost;
						float delta=new_cost - current_cost;
						//float delta=GetCost(trial_cost) - GetCost(current_cost);

						if (delta > 0 || Metropolis (delta, temperature)) //accept trial higher than current or with a probability
						{
							current_cost=new_cost;
							current=trial;
							Add_Sentence(new_cost) ;
							++nsuccess;
							
						}						
					}
				}
				
				temperature *= ANNEAL_SCHEDULE;
			}
		}

		private void Generate()
		{				
			_alterWord=new MyWordInfo[_myPos.Length][] ;
			
			_numWord=_myPos.Length;
			dx=new int[_numWord][] ;
			_selected=new int[_numWord] ;

			for(int i=0; i <_myPos.Length; i++)				
			{		
				_selected[i]=-1;
				MyWordInfo pos=_myPos[i];				
				if (pos.Pos != PartsOfSpeech.Unknown && pos.Sense != -1)
				{
					_alterWord[i]=FindSynonyms(ref pos , true );
					
					if (_alterWord[i] != null)
					{
						foreach(MyWordInfo poss in _alterWord[i])						
							poss.Pos=pos.Pos;
						
						dx[i]=new int[_alterWord[i].Length] ;
						_dictInfo[i]=new string[_alterWord[i].Length][][] ;

					}				
				}
			}
			
			_scores=new int[_myPos.Length][][][] ;
			for(int i=0; i <_myPos.Length; i++)
				if (_alterWord[i] != null)
			{				
				_scores[i]=new int[_alterWord[i].Length][][] ;
				for(int a_i=0; a_i < _alterWord[i].Length; a_i++)					
				{
					_scores[i][a_i]=new int[_myPos.Length][] ;
					
					for(int j=0; j <_myPos.Length; j++)
						if (_alterWord[j] != null)	
					{
						_scores[i][a_i][j]=new int[_alterWord[j].Length] ;
					}
				}
			}

			_numItems=0 ;
			list.Clear() ;
			bestScore=0;
			bestSentence=string.Empty;
			IterativeGenerate(100, 0.9F);
			//TryAll (0);
			
			list.Insert(0, bestSentence) ;

		}
		
		private ArrayList k_best_sentence=new ArrayList() ;
		private ArrayList k_best_score=new ArrayList() ;

		private void Add_Sentence(int score)
		{
			++_numItems ;
			MyWordInfo[] pos=new MyWordInfo[_myPos.Length] ;
			string newsen="";
			string[] words=new string[_numWord] ;
			for (int i=0; i <_numWord ; i++)
			{									
				string word=string.Empty ;

				if (_selected[i] == -1 )
				{					
					word=_myPos[i].Word;
				}
				else
				{						
					word=_alterWord[i][_selected[i]].Word;										
				}
				words[i]=word;
				pos[i]=new MyWordInfo(word, _myPos[i].Pos) ;
			}

			newsen=string.Format(_originalSentence, words) ;
			
			if (score > bestScore)
			{
				bestSentence=newsen + " " + score;
				bestScore=score;
			}

			if (k_best_sentence.Count < 500)
			{
				newsen=newsen + " " + score;
				if (!k_best_sentence.Contains(newsen) )
				{
					k_best_sentence.Add(newsen);
					k_best_score.Add(score);
				}
			}
			else
			{
				int min=10000000;
				int rem=-1;
				for (int j=0; j < k_best_sentence.Count ; j++)
				{
					if ((int)k_best_score[j] < min)
					{
						min=(int)k_best_score[j];
						rem=j;
					}
				}
				newsen=newsen + " " + score;
				if (!k_best_sentence.Contains(newsen) && rem != -1)				
				{
					k_best_sentence.RemoveAt(rem) ;
					k_best_score.RemoveAt(rem) ;

					k_best_sentence.Add(newsen);
					k_best_score.Add(score);					
				}

			}							
			
		}

		private void BackTracking(int index)
		{
			//if (_numItems > _max) return;
			
			if (_numItems > 1000) return;
			if (index == _numWord)
			{

				Add_Sentence(_overallScore);

				return;
			}

			if (_alterWord[index] != null && _alterWord[index].Length > 0)
			{
				for (int j = 0; j < _alterWord[index].Length; j++)
					if (dx[index][j] == 0)
					{
						dx[index][j]=1;
						_selected[index]=j;
						_contextWords[index]=_alterWord[index][j];
						int delta=0;
						if (Read_WordSenseInfo (index)) delta=ComputeScore(index);
						_overallScore +=delta;

						BackTracking(index + 1);

						_selected[index]=-1;
						_overallScore -=delta;
						_contextWords[index]=null;						
						dx[index][j]=0;
					}
			}
			else
			{
				_selected[index] = -1;
				BackTracking(index + 1);
			}
		}

		private int ComputeScore(int currentIndex)
		{
			int total=0;
			{
				if (_dictInfo[currentIndex] == null) return 0;
				int med=CONTEXT_SIZE/2;
				
				for (int i=currentIndex - med; i < currentIndex; i++)
					if ( i >= 0 && _dictInfo[i] != null && _selected[i] >= 0)
					{
						if (_scores[i][_selected[i]][currentIndex][_selected[currentIndex]] > 0)
							total += _scores[i][_selected[i]][currentIndex][_selected[currentIndex]];
						else
						{
							int score=ScoringSensePair (_dictInfo[i][_selected[i]], _dictInfo[currentIndex][_selected[currentIndex]]);
							total += score;
							if (score > 0 )
							{
								_scores[i][_selected[i]][currentIndex][_selected[currentIndex]]=score;
								_scores[currentIndex][_selected[currentIndex]][i][_selected[i]]=score;
								
								for (int a_i=0; a_i < _alterWord[i].Length; a_i++ )
									if (_alterWord[i][a_i].SynsetIndex == _alterWord[i][_selected[i]].SynsetIndex)
								{
									for (int a_j=0; a_j < _alterWord[currentIndex].Length; a_j++ )
										if (_alterWord[currentIndex][a_j].SynsetIndex == _alterWord[currentIndex][_selected[currentIndex]].SynsetIndex)
									{
										_scores[i][a_i][currentIndex][a_j]=score;
										_scores[currentIndex][a_j][i][a_i]=score;
									}
								}
							}
						}
					}

			}

			return total;
		}		

		private void MyInit()
		{
			tokenize.UseStemming=true;
			_contextWords=new MyWordInfo[_myPos.Length] ;
			_dictInfo=new string[_contextWords.Length][][][];			

			for (int i=0; i < _myPos.Length; i++)				
			{			
				++_myPos[i].Sense ;					
			}								
		}

		private bool Read_WordSenseInfo(int wordIndex)
		{
			if (_dictInfo[wordIndex][_selected[wordIndex]] != null) return true;

            _relatedness = Relatedness.GetRelatedness(_contextWords[wordIndex].Pos);

			//if (stop) 						
			{
                string[][] tmp = Relatedness.GetRelatednessGlosses(_contextWords[wordIndex].Word, _contextWords[wordIndex].Sense, _relatedness);						
				_dictInfo[wordIndex][_selected[wordIndex]]=tmp;

				for (int i=0; i < _dictInfo[wordIndex].Length ; i++)				
					if (_alterWord[wordIndex][i].SynsetIndex == _contextWords[wordIndex].SynsetIndex)
				{
					_dictInfo[wordIndex][i]=tmp;
				}
			}

			if (_dictInfo[wordIndex][_selected[wordIndex]] != null && _dictInfo[wordIndex][_selected[wordIndex]].Length > 0 ) 
				return true;
			else 
				return false;

		}

		private int GetOverlap(string[] a,string[] b)
		{
			//IOverlapCounter overlap=new SimpleOverlapCounter() ;
			IOverlapCounter overlap=new ExtOverlapCounter() ;
			return overlap.GetScore(a, b) ;
		}
			
		private int ScoringSensePair(string[][] sense1, string[][] sense2)
		{
			int score=0;
			
			try
			{
				int m=sense1.Length , n=sense2.Length ;
				for(int i=0; i < m; i++)
				{
					for(int j=0; j < n; j++)
					{
						score +=GetOverlap(sense1[i], sense2[j]) ;
					}
				}
			}catch (Exception e)
			{
				int uu=0;
			}

			return score;
		}


		
		private string RemoveBadChars(string s)
		{
			string[] badChars=new string[]{"=>", "==","=","->",">","+",";",",","_","-","."} ;
			foreach(string ch in badChars)			
				s=s.Replace(ch, " ") ;

			return s;
		}
		


	}
}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
午夜电影一区二区三区| 欧美日本一区二区三区| 青娱乐精品在线视频| 亚洲电影视频在线| 亚洲成人高清在线| 免费人成在线不卡| 麻豆免费看一区二区三区| 日韩精品成人一区二区三区| 日韩av不卡一区二区| 久久精品99国产精品| 国产伦精品一区二区三区免费迷 | 欧美日韩你懂的| 91蝌蚪porny| 欧美性生交片4| 7777精品久久久大香线蕉 | 激情欧美一区二区三区在线观看| 久久99精品久久久久婷婷| 国产一区二区在线观看视频| 成人精品在线视频观看| 欧美综合一区二区三区| 日韩一区二区三区免费观看| 久久婷婷久久一区二区三区| 国产精品久久毛片| 亚洲精品免费视频| 美女诱惑一区二区| 夫妻av一区二区| 在线视频你懂得一区二区三区| 欧美乱妇一区二区三区不卡视频| 精品国产电影一区二区| 综合久久久久久| 日本美女视频一区二区| 成人毛片在线观看| 91精品久久久久久久久99蜜臂| 久久久www成人免费毛片麻豆| 亚洲六月丁香色婷婷综合久久| 蜜桃91丨九色丨蝌蚪91桃色| 成人免费va视频| 欧美xxxxxxxx| 有坂深雪av一区二区精品| 精品一区二区三区影院在线午夜| 91网站在线播放| 久久久综合精品| 午夜精品福利一区二区三区蜜桃| 国产福利电影一区二区三区| 欧美日韩一卡二卡| 亚洲欧洲精品一区二区精品久久久| 青青草97国产精品免费观看 | 北条麻妃一区二区三区| 日韩精品综合一本久道在线视频| 亚洲人123区| 大白屁股一区二区视频| 精品久久一区二区三区| 婷婷久久综合九色国产成人| 91亚洲永久精品| 国产拍揄自揄精品视频麻豆| 日韩成人免费在线| 精品视频999| 亚洲精品老司机| 99re亚洲国产精品| 国产精品丝袜一区| 国产乱码精品一区二区三区av| 91精品一区二区三区久久久久久| 一区二区三区蜜桃网| 99久久精品国产观看| 国产精品麻豆一区二区| 国产麻豆视频精品| 久久久久久久综合| 国产麻豆精品久久一二三| 欧美变态tickling挠脚心| 卡一卡二国产精品| 日韩女优av电影| 老司机免费视频一区二区| 91精品国产综合久久精品app| 一区二区不卡在线视频 午夜欧美不卡在| www.在线成人| 成人欧美一区二区三区| 91麻豆国产自产在线观看| 成人免费小视频| 日本电影欧美片| 亚洲福中文字幕伊人影院| 欧美吻胸吃奶大尺度电影 | 国产日韩欧美亚洲| 成人福利视频网站| 最新成人av在线| 欧美午夜视频网站| 日日夜夜免费精品| 26uuu另类欧美| 成人动漫中文字幕| 亚洲永久精品大片| 91精品国产综合久久久久| 极品少妇xxxx精品少妇| 国产视频一区在线观看| 97se亚洲国产综合在线| 视频一区在线视频| 久久综合国产精品| 91视频在线观看| 日韩精品乱码免费| 久久久久99精品国产片| 91免费国产在线| 日韩影院免费视频| www成人在线观看| 色999日韩国产欧美一区二区| 丝袜美腿亚洲一区| 国产欧美精品一区aⅴ影院 | 日韩你懂的在线播放| 成人免费毛片高清视频| 亚洲成人动漫一区| 国产情人综合久久777777| 欧美日韩三级视频| 粉嫩aⅴ一区二区三区四区| 亚洲一二三区视频在线观看| 久久久不卡影院| 777精品伊人久久久久大香线蕉| 国产精品一区二区不卡| 亚洲国产中文字幕在线视频综合 | 成人动漫视频在线| 天天av天天翘天天综合网| 日本一区二区免费在线| 宅男在线国产精品| 色综合久久久久综合体| 国产一区二区三区四区五区美女| 亚洲午夜久久久久久久久电影院| 国产亚洲精品中文字幕| 日韩一区二区免费在线电影| 日本韩国一区二区三区| 国产jizzjizz一区二区| 久久精品99国产精品日本| 亚洲va中文字幕| 亚洲精品欧美综合四区| 国产欧美一区二区在线| 精品少妇一区二区三区日产乱码| 欧美视频一区二| 91网站在线播放| 成人91在线观看| 国产剧情一区在线| 久久99久久99小草精品免视看| 亚洲国产成人porn| 亚洲曰韩产成在线| 亚洲精品美国一| 亚洲乱码国产乱码精品精的特点 | 中文字幕亚洲在| 欧美国产日韩一二三区| 亚洲精品一区二区三区蜜桃下载| 欧美年轻男男videosbes| 在线观看国产精品网站| 色噜噜狠狠色综合欧洲selulu| 成人激情文学综合网| a在线播放不卡| a亚洲天堂av| 91麻豆国产在线观看| 一本大道av伊人久久综合| 一本大道av伊人久久综合| 色一情一伦一子一伦一区| 91视频观看视频| 欧美在线观看一二区| 欧美日本免费一区二区三区| 欧美日韩亚洲丝袜制服| 91精品久久久久久久99蜜桃| 欧美一区二区三区四区五区| 欧美一区二区黄色| 精品捆绑美女sm三区| 久久久久久久免费视频了| 中文字幕免费不卡| 亚洲猫色日本管| 天天综合色天天| 久久99九九99精品| 不卡的av电影在线观看| 91久久精品午夜一区二区| 欧美日韩另类一区| 精品久久人人做人人爰| 国产精品―色哟哟| 一区二区三区日韩欧美精品| 午夜久久久久久久久久一区二区| 美女国产一区二区| 99这里只有精品| 欧美喷潮久久久xxxxx| 久久精品一区二区三区不卡牛牛| 中文字幕在线不卡一区二区三区| 夜夜精品视频一区二区| 免费av网站大全久久| 成人动漫中文字幕| 欧美日韩国产美| 国产精品久久夜| 日韩制服丝袜av| 99r国产精品| 久久影院午夜片一区| 亚洲欧洲综合另类| 免费高清在线一区| 99在线精品一区二区三区| 欧美一级黄色大片| 亚洲欧美另类小说视频| 久久国产夜色精品鲁鲁99| 91色在线porny| 久久久久亚洲蜜桃| 亚洲不卡av一区二区三区| 成人毛片在线观看| 欧美一级高清大全免费观看| 亚洲欧美日韩国产综合在线| 国产主播一区二区| 欧美军同video69gay|