亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? textquery.c

?? 很顯然
?? C
字號:
#include "TextQuery.h"

string TextQuery::filt_elems( "\",.;:!?)(\\/" );

void
TextQuery::
// display_solution( ostream &os )
display_solution()
{
	cout << "\n"
	     << "Requested query: "
	     << *query << "\n\n";

	const set<short,less<short>,allocator> *solution = query->solution();
	
	if ( ! solution->size() ) {
	     cout << "\n\t" 
		  << "Sorry, no matching lines were found in text.\n"
		  << endl;
	}

	set<short,less<short>,allocator>::const_iterator 
		it = solution->begin(),
		end_it = solution->end();

	for ( ; it != end_it; ++it ) {
		int line = *it;
		// don't confound user with text lines starting at 0 ...
		cout << "( " << line+1 << " ) "
		     << (*lines_of_text)[line] << '\n';
	}

	cout << endl;
}

void
TextQuery::
retrieve_text()
{
	string file_name;
	
	cout << "please enter file name: ";
	cin  >> file_name;

	ifstream infile( file_name.c_str(), ios::in );
	if ( !infile ) {
		cerr << "oops! unable to open file "
		     << file_name << " -- bailing out!\n";
		exit( -1 );
	}
	else cout << "\n";

	lines_of_text = new vector<string,allocator>;
	string textline;

	while ( getline( infile, textline, '\n' ))
		  lines_of_text->push_back( textline );
}

void
TextQuery::
separate_words()
{
	vector<string,allocator>   *words     = new vector<string,allocator>;
	vector<location,allocator> *locations = new vector<location,allocator>;

        for ( short line_pos = 0; line_pos < lines_of_text->size(); line_pos++ )
        {
		short  word_pos = 0;
                string textline = (*lines_of_text)[ line_pos ];

                string::size_type eol = textline.length();
                string::size_type pos = 0, prev_pos = 0;

                while (( pos = textline.find_first_of( ' ', pos )) 
                            != string::npos )
                {
                        words->push_back( 
                               textline.substr( prev_pos, pos - prev_pos ));
                        locations->push_back( make_pair( line_pos, word_pos ));

                        word_pos++; pos++; prev_pos = pos;
                }

                words->push_back( textline.substr( prev_pos, pos - prev_pos ));
                locations->push_back( make_pair( line_pos, word_pos ));

                // record size of each line
                line_cnt.push_back( word_pos );
        }
	
        text_locations = new text_loc( words, locations );
}

void 
TextQuery::
filter_text()
{
	if ( filt_elems.empty() )
	     return;

	vector<string,allocator> *words = text_locations->first;

	vector<string,allocator>::iterator iter = words->begin();
	vector<string,allocator>::iterator iter_end = words->end();

	while ( iter != iter_end )
	{
                string::size_type pos = 0;
                while (( pos = (*iter).find_first_of( filt_elems, pos )) 
                            != string::npos )
                       (*iter).erase(pos,1);
		iter++;
	}
}

void
TextQuery::
suffix_text()
{
        vector<string,allocator> *words = text_locations->first;

        vector<string,allocator>::iterator iter = words->begin();
        vector<string,allocator>::iterator iter_end = words->end();

        while ( iter != iter_end )
        {
		// if 3 or less characters, let it be
		if ( (*iter).size() <= 3 ) { iter++; continue; }
		if ( (*iter)[ (*iter).size()-1 ] == 's' )
		       suffix_s( *iter );

		// additional suffix handling goes here ...

		iter++;
        }
}

void
TextQuery::
suffix_s( string &word )
{
        string::size_type spos = 0;
        string::size_type pos3 = word.size()-3;

        // "ous", "ss", "is", "ius"
        string suffixes( "oussisius" );

        if ( ! word.compare( pos3, 3, suffixes, spos, 3 ) ||
             ! word.compare( pos3, 3, suffixes, spos+6, 3 ) ||
             ! word.compare( pos3+1, 2, suffixes, spos+2, 2 ) ||
             ! word.compare( pos3+1, 2, suffixes, spos+4, 2 ))
                return;

        string ies( "ies" );
        if ( ! word.compare( pos3, 3, ies ))
        {
             word.replace( pos3, 3, 1, 'y' );
             return;
        }

        string ses( "ses" );
        if ( ! word.compare( pos3, 3, ses ))
        {
             word.erase( pos3+1, 2 );
             return;
        }

        // erase ending 's'
        word.erase( pos3+2 );

        // watch out for "'s"
        if ( word[ pos3+1 ] == '\'' )
             word.erase( pos3+1 );
}

void
TextQuery::
strip_caps()
{
        vector<string,allocator> *words = text_locations->first;

        vector<string,allocator>::iterator iter = words->begin();
        vector<string,allocator>::iterator iter_end = words->end();

        string caps( "ABCDEFGHIJKLMNOPQRSTUVWXYZ" );

        while ( iter != iter_end ) {
                string::size_type pos = 0;
                while (( pos = (*iter).find_first_of( caps, pos )) 
                            != string::npos )
                       (*iter)[ pos ] = tolower( (*iter)[pos] );
                ++iter;
        }
}


void
TextQuery::
build_word_map()
{
     word_map = new map< string, loc*, less<string>, allocator >;

     typedef map<string,loc*,less<string>,allocator>::value_type value_type;
     typedef set<string,less<string>,allocator>::difference_type diff_type;

     set<string,less<string>,allocator> exclusion_set;

     ifstream infile( "exclusion_set" );
     if ( !infile )
     {
          static string default_excluded_words[25] = {
            "the","and","but","that","then","are","been",
            "can","can't","cannot","could","did","for",
            "had","have","him","his","her","its","into",
            "were","which","when","with","would"
          };

          cerr << "warning! unable to open word exclusion file! -- "
               << "using default set\n";

          copy( default_excluded_words, default_excluded_words+25, 
                inserter( exclusion_set, exclusion_set.begin() ));
     }
     else {
          istream_iterator< string, diff_type > input_set( infile ), eos;
          copy( input_set, eos, 
                inserter( exclusion_set, exclusion_set.begin() ));
     }

     // iterate through the the words, entering the key/pair

     vector<string,allocator>   *text_words = text_locations->first;
     vector<location,allocator> *text_locs  = text_locations->second;

     register int elem_cnt = text_words->size();
     for ( int ix = 0; ix < elem_cnt; ++ix )
         {
                string textword = ( *text_words )[ ix ];

                // exclusion strategies
                // less than 3 character or in exclusion set
                if ( textword.size() < 3 ||
                     exclusion_set.count( textword ))
                        continue;

                if ( ! word_map->count((*text_words)[ix] ))
                {  // not present, add it:
                   loc *ploc = new vector<location,allocator>;
                   ploc->push_back( (*text_locs)[ix] );
                   word_map->insert( value_type( (*text_words)[ix], ploc ));
                }
                else (*word_map)[(*text_words)[ix]]->push_back( (*text_locs)[ix] );
         }
}

void 
TextQuery::
query_text() 
{
	string text;
	string caps( "ABCDEFGHIJKLMNOPQRSTUVWXYZ" );

	vector<string, allocator> query_text;

	UserQuery user_query;
 	init_query_statics();
    
	do {
		query_text.clear();

 	cout << "Enter a query -- please separate each item "
			<< "by a space.\n"
		<< "Terminate query (or session) with a dot( . ).\n\n"
			<< "==> ";
	   
	   	while( cin  >> text ) 
   		{ 
		     if ( text == "." )
		          break;

		     // remove all capitalization ...
 		     string::size_type pos = 0;
     while (( pos = text.find_first_of( caps, pos )) 
				     != string::npos )
                          text[pos] = tolower( text[pos] );

		     query_text.push_back( text );
	   	}

	   	if ( ! query_text.empty() ) 
		{
	   		user_query.query( &query_text );
	   		query = user_query.eval();
	   		query->eval();
	   		display_solution();
	    		cout << endl;
		}
        }
	while ( ! query_text.empty() );
        cout << "Ok, bye!\n";
}

void
TextQuery::
display_map_text()
{
        typedef map<string,loc*,less<string>,allocator> map_text;
        map_text::iterator iter = word_map->begin(), iter_end = word_map->end();

        while ( iter != iter_end ) {
                cerr << "word: " << (*iter).first << " (";

                int           loc_cnt = 0;
                loc          *text_locs = (*iter).second;
                loc::iterator liter     = text_locs->begin(),
                              liter_end = text_locs->end();

                while ( liter != liter_end )
                {
                        if ( loc_cnt )
                             cerr << ",";
                        else ++loc_cnt;

                        cerr << "(" << (*liter).first
                             << "," << (*liter).second << ")";

                        ++liter;
                }

                cerr << ")\n";
                ++iter;
        }

        cerr << endl;
}

void
TextQuery::
display_text_locations()
{
        vector<string,allocator>   *text_words     = text_locations->first;
        vector<location,allocator> *text_locs      = text_locations->second;

        register int elem_cnt = text_words->size();

        if ( elem_cnt != text_locs->size() )
        {
             cerr << "oops! internal error: word and position vectors "
                  << "are of unequal size\n"
                  << "words: " << elem_cnt << " "
                  << "locs: "  << text_locs->size()
                  << " -- bailing out!\n";
             exit( -2 );
        }

        for ( int ix = 0; ix < elem_cnt; ix++ )
        {
                cout << "word: " << (*text_words)[ ix ] << "\t"
                     << "location: ("
                     << (*text_locs)[ix].first  << ","
                     << (*text_locs)[ix].second << ")"
                     << "\n";
        }

        cout << endl;
}

/*
sample input text:
------------------

Alice Emma has long flowing red hair. Her Daddy says
when the wind blows through her hair, it looks almost alive,
like a fiery bird in flight. A beautiful fiery bird, he tells her,
magical but untamed. "Daddy, shush, there is no such thing,"
she tells him, at the same time wanting him to tell her more.
Shyly, she asks, "I mean, Daddy, is there?"

---------------------
sample query session:
---------------------

please enter file name: alice_emma

warning! unable to open word exclusion file! -- using default set

enter a word against which to search the text.
to quit, enter a single character ==>  alice

alice occurs 1 time:

        ( line 1 ) Alice Emma has long flowing red hair. Her Daddy says

enter a word against which to search the text.
to quit, enter a single character ==>  daddy

daddy occurs 3 times:

        ( line 1 ) Alice Emma has long flowing red hair. Her Daddy says
        ( line 4 ) magical but untamed. "Daddy, shush, there is no such thing,"
        ( line 6 ) Shyly, she asks, "I mean, Daddy, is there?"

enter a word against which to search the text.
to quit, enter a single character ==>  phoenix

Sorry. There are no entries for phoenix.

enter a word against which to search the text.
to quit, enter a single character ==>  .
Ok, bye!

----------------------------------------------------------
sample text map after: (a) stripping out punctuation,
(b) eliminating semantically neutral words such as `the`,
(c) suffixing, so that fixes and fix become fix, and
(d) removal of capitalization
-----------------------------------------------------------

word: alice ((0,0))
word: alive ((1,10))
word: almost ((1,9))
word: ask ((5,2))
word: beautiful ((2,7))
word: bird ((2,3),(2,9))
word: blow ((1,3))
word: daddy ((0,8),(3,3),(5,5))
word: emma ((0,1))
word: fiery ((2,2),(2,8))
word: flight ((2,5))
word: flowing ((0,4))
word: hair ((0,6),(1,6))
word: has ((0,2))
word: like ((2,0))
word: long ((0,3))
word: look ((1,8))
word: magical ((3,0))
word: mean ((5,4))
word: more ((4,12))
word: red ((0,5))
word: same ((4,5))
word: say ((0,9))
word: she ((4,0),(5,1))
word: shush ((3,4))
word: shyly ((5,0))
word: such ((3,8))
word: tell ((2,11),(4,1),(4,10))
word: there ((3,5),(5,7))
word: thing ((3,9))
word: through ((1,4))
word: time ((4,6))
word: untamed ((3,2))
word: wanting ((4,7))
word: wind ((1,2))

*/

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美色图第一页| 色噜噜偷拍精品综合在线| 日韩美女啊v在线免费观看| 在线播放视频一区| 成人精品一区二区三区四区| 爽好多水快深点欧美视频| 国产精品国产馆在线真实露脸| 欧美三级日韩在线| 99re6这里只有精品视频在线观看| 久久er99精品| 日韩在线一区二区三区| 一区二区三区在线免费| 久久九九全国免费| 日韩精品一区二区三区中文精品| 91色乱码一区二区三区| 国产成人99久久亚洲综合精品| 日韩专区一卡二卡| 亚洲小说春色综合另类电影| 国产精品九色蝌蚪自拍| 久久综合网色—综合色88| 欧美日韩免费一区二区三区视频| av资源站一区| 成人性生交大片免费看视频在线 | 国产日韩精品一区| 欧美一级午夜免费电影| 欧美日韩国产另类一区| 91麻豆福利精品推荐| av一二三不卡影片| 国产**成人网毛片九色 | 欧美一区二区三区四区久久| 欧美日韩综合一区| 高清在线观看日韩| 国产欧美视频在线观看| 91视频国产观看| www.欧美色图| 成人短视频下载| 国产丶欧美丶日本不卡视频| 国产高清不卡一区| 国产精品99精品久久免费| 国产一区久久久| 九九国产精品视频| 国产精品一区专区| 国产乱子伦一区二区三区国色天香| 麻豆精品新av中文字幕| 精品在线视频一区| 国产成人a级片| 99精品黄色片免费大全| 99精品热视频| 在线一区二区三区做爰视频网站| 91久久国产综合久久| 在线免费亚洲电影| 欧美剧情电影在线观看完整版免费励志电影 | 国产精品午夜在线| 最近日韩中文字幕| 欧美日韩高清一区二区不卡| 亚洲精品菠萝久久久久久久| 欧美精品少妇一区二区三区| 欧美剧在线免费观看网站| 日韩免费性生活视频播放| 欧美精品一区二区蜜臀亚洲| 精品动漫一区二区三区在线观看| 久久久久久夜精品精品免费| 国产精品二三区| 亚洲国产精品一区二区尤物区| 三级在线观看一区二区| 国内一区二区在线| 白白色 亚洲乱淫| 在线观看免费一区| 精品入口麻豆88视频| 国产日韩欧美麻豆| 亚洲一二三四久久| 久久不见久久见免费视频7| 成人精品视频一区二区三区| 欧美性受xxxx黑人xyx| 精品成人a区在线观看| ...av二区三区久久精品| 亚洲国产精品视频| 国产精品18久久久久久久久| 日本精品视频一区二区| 日韩一级免费观看| 亚洲女人小视频在线观看| 男男视频亚洲欧美| 色综合色狠狠综合色| 欧美成人乱码一区二区三区| 亚洲男同1069视频| 国产一区二三区好的| 欧美中文字幕一区二区三区亚洲| 26uuu久久天堂性欧美| 亚洲一区二区四区蜜桃| 国内偷窥港台综合视频在线播放| 91福利视频在线| 国产免费成人在线视频| 五月天激情小说综合| av网站免费线看精品| 欧美videos大乳护士334| 亚洲视频中文字幕| 国产在线精品免费| 欧美日本在线播放| 亚洲欧美日韩精品久久久久| 国内精品久久久久影院薰衣草 | 成人国产亚洲欧美成人综合网| 91精品婷婷国产综合久久竹菊| 国产精品国产自产拍在线| 美女一区二区三区| 丁香六月久久综合狠狠色| 亚洲精品视频在线看| 91美女精品福利| 亚洲成人资源在线| 亚洲欧洲精品一区二区三区| 久久99久久久久久久久久久| 欧洲精品在线观看| 成人免费一区二区三区在线观看| 精品影视av免费| 69堂成人精品免费视频| 亚洲最新在线观看| 99精品黄色片免费大全| 国产精品视频观看| 国产精品正在播放| 精品裸体舞一区二区三区| 丝袜亚洲精品中文字幕一区| 欧美在线不卡视频| 亚洲免费电影在线| 一本大道av伊人久久综合| 国产精品久久久一区麻豆最新章节| 国内精品写真在线观看| 精品国产网站在线观看| 久久国产生活片100| 91精品国产欧美一区二区成人| 亚洲18女电影在线观看| 欧美日韩在线直播| 亚洲国产精品久久久久秋霞影院| bt欧美亚洲午夜电影天堂| 99久久伊人精品| 国产999精品久久久久久绿帽| 人人精品人人爱| 欧美一级一区二区| 中文字幕第一页久久| 久久精品国产色蜜蜜麻豆| 国产精品中文字幕欧美| 成人久久视频在线观看| 欧美国产日韩a欧美在线观看| 国产精品一区在线观看你懂的| 久久久午夜精品理论片中文字幕| 国产毛片精品国产一区二区三区| 精品少妇一区二区三区| 国产老肥熟一区二区三区| 国产欧美精品国产国产专区| 国产精品一级片| 中文字幕一区在线观看视频| 色爱区综合激月婷婷| 五月婷婷另类国产| 日韩一区二区麻豆国产| 韩国女主播一区二区三区| 欧美国产在线观看| 91免费版在线看| 亚洲成人在线免费| 日韩精品一区二区三区视频| 国产精品一二三四| 亚洲视频一区在线观看| 久久综合国产精品| 日本免费在线视频不卡一不卡二| 日本高清不卡视频| 午夜激情综合网| 精品久久久久久久久久久久包黑料| 国产一区二区0| 亚洲欧洲性图库| 欧美精品日韩一本| 国产成人一区在线| 亚洲一级二级三级| 精品成人一区二区| 91久久人澡人人添人人爽欧美 | 欧美一区二区三区视频| 国产精品综合视频| 一区二区三区中文字幕精品精品| 欧美精选一区二区| 国产不卡免费视频| 亚洲成人免费看| 国产三级精品在线| 欧美日韩亚洲不卡| 成人美女视频在线观看| 天使萌一区二区三区免费观看| 精品成人a区在线观看| 91在线看国产| 久久99在线观看| 亚洲综合丝袜美腿| 欧美国产日产图区| 欧美老肥妇做.爰bbww| 床上的激情91.| 午夜电影一区二区| 国产精品久久久久久久久动漫| 51精品久久久久久久蜜臀| 不卡av在线网| 韩国三级电影一区二区| 亚洲一区二区影院| 国产精品欧美久久久久无广告 | 国产99久久久国产精品| 日韩在线a电影| 亚洲猫色日本管| 日本一区二区成人| 精品国产髙清在线看国产毛片|