亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? mailnews.pl

?? namazu. 雖然是日語的,也適用于文件中單詞索引后全文檢索.
?? PL
字號:
## -*- Perl -*-# $Id: mailnews.pl,v 1.27 2001/04/23 09:09:38 knok Exp $# Copyright (C) 1997-2000 Satoru Takabayashi ,#               1999 NOKUBI Takatsugu All rights reserved.#     This is free software with ABSOLUTELY NO WARRANTY.##  This program is free software; you can redistribute it and/or modify#  it under the terms of the GNU General Public License as published by#  the Free Software Foundation; either versions 2, or (at your option)#  any later version.# #  This program is distributed in the hope that it will be useful#  but WITHOUT ANY WARRANTY; without even the implied warranty of#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the#  GNU General Public License for more details.##  You should have received a copy of the GNU General Public License#  along with this program; if not, write to the Free Software#  Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA#  02111-1307, USA##  This file must be encoded in EUC-JP encoding#package mailnews;use strict;require 'util.pl';require 'gfilter.pl';sub mediatype() {    return ('message/rfc822', 'message/news');}sub status() {    return 'yes';}sub recursive() {    return 0;}sub pre_codeconv() {    return 1;}sub post_codeconv () {    return 0;}sub add_magic ($) {    return;}sub filter ($$$$$) {    my ($orig_cfile, $cont, $weighted_str, $headings, $fields)      = @_;    my $cfile = defined $orig_cfile ? $$orig_cfile : '';    util::vprint("Processing mail/news file ...\n");    uuencode_filter($cont);    mailnews_filter($cont, $weighted_str, $fields);    mailnews_citation_filter($cont, $weighted_str);    gfilter::line_adjust_filter($cont);    gfilter::line_adjust_filter($weighted_str);    gfilter::white_space_adjust_filter($cont);    gfilter::white_space_adjust_filter($weighted_str);    gfilter::show_filter_debug_info($cont, $weighted_str,			   $fields, $headings);    return undef;}# Original of this code was contributed by <furukawa@tcp-ip.or.jp>. sub mailnews_filter ($$$) {    my ($contref, $weighted_str, $fields) = @_;    my $boundary = "";    my $line     = "";    my $partial  = 0;    $$contref =~ s/^\s+//;    # Don't handle if first like does'nt seem like a mail/news header.    return unless $$contref =~ /(^\S+:|^from )/i;    my @tmp = split(/\n/, $$contref);  HEADER_PROCESSING:    while (@tmp) {	$line = shift @tmp;	last if ($line =~ /^$/);  # if an empty line, header is over	# Connect the two lines if next line has leading spaces	while (defined($tmp[0]) && $tmp[0] =~ /^\s+/) {	    # if connection is Japanese character, remove spaces	    # from Furukawa-san's idea [1998-09-22]	    my $nextline = shift @tmp;	    $line =~ s/([\xa1-\xfe])\s+$/$1/;	    $nextline =~ s/^\s+([\xa1-\xfe])/$1/;	    $line .= $nextline;	}	# Handle fields.	if ($line =~ s/^subject:\s*//i){	    $fields->{'title'} = $line;	    # Skip [foobar-ML:000] for a typical mailing list subject.	    # Practically skip first [...] for simple implementation.	    $line =~ s/^\[.*?\]\s*//;	    # Skip 'Re:'	    $line =~ s/\bre:\s*//gi;	    my $weight = $conf::Weight{'html'}->{'title'};	    $$weighted_str .= "\x7f$weight\x7f$line\x7f/$weight\x7f\n"; 	} elsif ($line =~ s/^content-type:\s*//i) {	    if ($line =~ /multipart.*boundary="(.*)"/i){		$boundary = $1;		util::dprint("((boundary: $boundary))\n");  	    } elsif ($line =~ m!message/partial;\s*(.*)!i) {		# The Message/Partial subtype routine [1998-10-12]		# contributed by Hiroshi Kato <tumibito@mm.rd.nttdata.co.jp>  		$partial = $1;  		util::dprint("((partial: $partial))\n");	    }	} elsif ($line =~ /^(\S+):\s*(.*)/i) {	    my $name = $1;	    my $value = $2;	    $fields->{lc($name)} = $value;	    if ($name =~ /^($conf::REMAIN_HEADER)$/io) {		# keep some fields specified REMAIN_HEADER for search keyword		my $weight = $conf::Weight{'headers'};		$$weighted_str .= 		    "\x7f$weight\x7f$value\x7f/$weight\x7f\n";	    }	}     }    if ($partial) {	# MHonARC makes several empty lines between header and body,	# so remove them.	while(@tmp) {	    last if (! $line =~ /^\s*$/);	    $line = shift @tmp;	}	undef $partial;	goto HEADER_PROCESSING;    }    $$contref = join("\n", @tmp);    # Handle MIME multipart message.    if ($boundary) {	$boundary =~ s/(\W)/\\$1/g;	$$contref =~ s/This is multipart message.\n//i;	# MIME multipart processing,	# modified by Furukawa-san's patch on [1998/08/27] 	$$contref =~ s/--$boundary(--)?\n?/\xff/g; 	my (@parts) = split(/\xff/, $$contref); 	$$contref = ''; 	for $_ (@parts){ 	    if (s/^(.*?\n\n)//s){ 		my ($head) = $1; 		$$contref .= $_ if $head =~ m!^content-type:.*text/plain!mi; 	    } 	}    }}# Make mail/news citation marks not to be indexed.# And a greeting message at the beginning.# And a meaningless message such as "foo wrote:".# Thanks to Akira Yamada for great idea.sub mailnews_citation_filter ($$) {    my ($contref, $weighted_str) = @_;    my $omake = "";    $$contref =~ s/^\s+//;    my @tmp = split(/\n/, $$contref);    $$contref = "";    # Greeting at the beginning (first one or two lines)    for (my $i = 0; $i < 2 && defined($tmp[$i]); $i++) {	if ($tmp[$i] =~ /(^\s*((([\xa1-\xfe][\xa1-\xfe]){1,8}|([\x21-\x7e]{1,16}))\s*(。|ˉ|\.|·|,|、|\@|△|の)\s*){0,2}\s*(([\xa1-\xfe][\xa1-\xfe]){1,8}|([\x21-\x7e]{1,16}))\s*(です|と拷します|ともうします|といいます)(.{0,2})?\s*$)/) {	    # for searching debug info by perl -n00e 'print if /^<<<</'	    util::dprint("\n\n<<<<$tmp[$i]>>>>\n\n");	    $omake .= $tmp[$i] . "\n";	    $tmp[$i] = "";        }    }    # Isolate citation parts.    for my $line (@tmp) {	# Don't do that if there is an HTML tag at first.	if ($line !~ /^[^>]*</ &&	    $line =~ s/^((\S{1,10}>)|(\s*[\>\|\:\#]+\s*))+//) {	    $omake .= $line . "\n";	    $$contref .= "\n";  # Insert LF.	    next;	}	$$contref .= $line. "\n";    }	    # Process text as chunks of paragraphs.    # Isolate meaningless message such as "foo wrote:".    @tmp = split(/\n\n+/, $$contref);    $$contref = "";    my $i = 0;    for my $line (@tmp) {	# Complete excluding is impossible. I tnink it's good enough.        # Process only first five paragrahs.	# And don't handle the paragrah which has five or longer lines.	# Hmm, this regex looks very hairly.	if ($i < 5 && ($line =~ tr/\n/\n/) <= 5 && $line =~ /(^\s*(Date:|Subject:|Message-ID:|From:|鳳嘆|汗叫客|泣箕))|(^.+(手禍です|reply\s*です|鄆く|いわく|今きました|咐いました|廈で|wrote|said|writes|says)(.{0,2})?\s*$)|(^.*In .*(article|message))|(<\S+\@([\w\-.]\.)+\w+>)/im) {	    util::dprint("\n\n<<<<$line>>>>\n\n");	    $omake .= $line . "\n";	    $line = "";	    next;	}	$$contref .= $line. "\n\n";        $i++;    }    $$weighted_str .= "\x7f1\x7f$omake\x7f/1\x7f\n";}# Skip uuencode and BinHex texts.# Original of this code was contributed by <furukawa@tcp-ip.or.jp>. sub uuencode_filter ($) {    my ($content) = @_;    my @tmp = split(/\n/, $$content);    $$content = "";        my $uuin = 0;    while (@tmp) {	my $line = shift @tmp;	$line .= "\n";	# Skip BinHex texts.	# All lines will be skipped.	last if $line =~ /^\(This file must be converted with BinHex/; #)	# Skip uuencode texts.	# References : SunOS 4.1.4: man 5 uuencode	#              FreeBSD 2.2: uuencode.c	# For avoiding accidental matching, check a format.	#	# There are many netnews messages which is separated into several 	# files. This kind of files has usually no "begin" line. 	# This function handle them as well.	#	# There are two fashion for line length 62 and 63.	# This function handle both.	#	# In the case of following the specification strictly,	# int((ord($line) - ord(' ') + 2) / 3)	#     != (length($line) - 2) / 4	# but it can be transformed into a simple equation.	# 4 * int(ord($line) / 3) != length($line) + $uunumb;        # Hey, SunOS's uuencode use SPACE for encoding.        # But allowing SPACE is dangerous for misrecognizing.	# For compromise, only the following case are acceptable.        #   1. inside of begin - end        #   2. previous line is recognized as uuencoded line 	#      and ord is identical with previous one.		# a line consists of only characters of 0x20-0x60 is recognized 	# as uuencoded line. v1.1.2.3 (bug fix)        $uuin = 1, next if $line =~ /^begin [0-7]{3,4} \S+$/;        if ($line =~ /^end$/){            $uuin = 0,next if $uuin;        } else {            # Restrict ord value in range of 32-95.	    my $uuord = ord($line);	    $uuord = 32 if $uuord == 96;            # if the line of uunumb = 38 is over this loop,	    # a normal line of 63 length can be ruined accidentaly.            my $uunumb = (length($line)==63)? 37: 38;            if ((32 <= $uuord && $uuord < 96) &&                length($line) <= 63 &&                (4 * int($uuord / 3) == length($line) + $uunumb)){                if ($uuin == 1 || $uuin == $uuord){                    next if $line =~ /^[\x20-\x60]+$/;                } else {		    # Be strict for files which doesn't begin with "begin".                    $uuin = $uuord, next if $line =~ /^M[\x21-\x60]+$/;                }            }        }        $uuin = 0;        $$content .= $line;    }}1;

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲精品一区二区在线观看| 中文字幕欧美国产| 国产成人aaa| 丝袜亚洲精品中文字幕一区| 国产亚洲欧美一区在线观看| 日本韩国欧美三级| 国产成人av一区二区| 日韩成人午夜电影| 亚洲精品水蜜桃| 国产精品视频第一区| 日韩你懂的在线播放| 欧美视频第二页| eeuss鲁片一区二区三区在线观看 eeuss鲁片一区二区三区在线看 | 日韩精品一区二区三区视频 | 国产一级精品在线| 丝袜亚洲另类丝袜在线| 亚洲另类春色校园小说| 国产欧美日本一区视频| 欧美成人video| 欧美军同video69gay| 色又黄又爽网站www久久| 国产成人自拍网| 国产毛片精品国产一区二区三区| 婷婷丁香久久五月婷婷| 一区av在线播放| 亚洲欧洲av在线| 国产精品网友自拍| 国产欧美一区二区三区网站| 欧美大尺度电影在线| 欧美放荡的少妇| 欧美男女性生活在线直播观看| 91久久奴性调教| 91黄色免费看| 在线视频你懂得一区| 色婷婷国产精品综合在线观看| 91亚洲精品久久久蜜桃网站| 成人免费毛片aaaaa**| 成人激情免费视频| 91在线播放网址| 日本高清免费不卡视频| 欧美亚男人的天堂| 欧美欧美欧美欧美| 91精品国产综合久久国产大片| 欧美日韩不卡视频| 欧美一区欧美二区| 精品少妇一区二区三区免费观看| 精品蜜桃在线看| 欧美精品一区二区蜜臀亚洲| 精品盗摄一区二区三区| 久久久三级国产网站| 国产日韩影视精品| 国产精品传媒入口麻豆| 一区二区欧美精品| 一区二区理论电影在线观看| 亚洲午夜激情网页| 日韩av中文字幕一区二区| 经典一区二区三区| 国产成人精品影视| 99re热视频这里只精品| 欧美午夜电影一区| 日韩欧美激情一区| 国产精品麻豆视频| 亚洲小说欧美激情另类| 蜜桃av一区二区三区电影| 国产一区二区视频在线播放| 成年人午夜久久久| 欧美日韩三级一区| 久久综合一区二区| 日韩理论片一区二区| 性做久久久久久| 国产麻豆精品theporn| 日本韩国精品一区二区在线观看| 91精品久久久久久久99蜜桃| 久久精品一区二区三区四区| 中文av一区特黄| 日本不卡视频在线| 不卡在线视频中文字幕| 欧美高清你懂得| 国产欧美精品一区二区三区四区| 亚洲久本草在线中文字幕| 蜜臀a∨国产成人精品| 成人黄色777网| 欧美一区二区三区免费视频| 欧美高清在线精品一区| 午夜精品久久久久久久| 国产成人午夜片在线观看高清观看| 91丨porny丨国产入口| 91麻豆精品久久久久蜜臀| 中文字幕精品一区二区精品绿巨人| 亚洲综合在线第一页| 精品午夜久久福利影院| 欧美视频在线一区二区三区| 国产偷国产偷精品高清尤物| 五月天视频一区| 成人av在线电影| 日韩精品一区二区三区在线观看| 自拍偷拍亚洲欧美日韩| 久久99国内精品| 欧美日韩黄色一区二区| 亚洲色图在线播放| 国产精品88888| 欧美不卡一二三| 亚洲国产aⅴ天堂久久| 97久久超碰国产精品| 久久精品视频一区二区三区| 午夜欧美大尺度福利影院在线看| 成人免费毛片a| 久久久精品一品道一区| 免费观看久久久4p| 欧美日韩一区不卡| 亚洲欧美偷拍三级| 高清shemale亚洲人妖| 久久久久久久免费视频了| 三级影片在线观看欧美日韩一区二区| 成人av网在线| 欧美国产欧美亚州国产日韩mv天天看完整| 奇米一区二区三区| 欧美猛男超大videosgay| 亚洲老司机在线| 91麻豆6部合集magnet| 国产精品欧美一级免费| 国产精品一线二线三线精华| 日韩免费观看2025年上映的电影| 亚洲va中文字幕| 在线观看网站黄不卡| 亚洲欧美国产77777| 91啪九色porn原创视频在线观看| 国产精品乱码一区二区三区软件| 成人性视频网站| 国产视频911| 成人a免费在线看| 国产精品美女www爽爽爽| 北条麻妃一区二区三区| 亚洲欧美aⅴ...| 欧美三片在线视频观看| 亚洲成人免费电影| 337p亚洲精品色噜噜噜| 蜜臀久久久久久久| 欧美一二三在线| 精品一区二区三区不卡 | 国产欧美一区二区三区在线看蜜臀 | 欧美日韩第一区日日骚| 日韩国产精品大片| 日韩一区二区影院| 久久国产免费看| 精品国产成人在线影院| 国产福利视频一区二区三区| 国产香蕉久久精品综合网| 大胆亚洲人体视频| 亚洲欧美色图小说| 欧美色倩网站大全免费| 亚洲mv大片欧洲mv大片精品| 91精选在线观看| 韩国三级中文字幕hd久久精品| 日本一区二区视频在线观看| 97久久超碰国产精品| 丝袜国产日韩另类美女| 日韩免费视频线观看| 成人黄色小视频在线观看| 亚洲精品视频在线看| 在线播放/欧美激情| 国产伦精品一区二区三区在线观看| 久久精品亚洲乱码伦伦中文 | 欧美日韩中文精品| 日韩二区三区四区| 国产日韩欧美精品综合| 欧美在线小视频| 久久99精品一区二区三区三区| 欧美激情资源网| 欧美日韩国产高清一区二区三区| 精品一区二区三区香蕉蜜桃| 亚洲国产电影在线观看| 欧美丰满嫩嫩电影| 成人午夜私人影院| 日韩激情在线观看| 国产偷国产偷精品高清尤物| 欧美三级视频在线观看| 精品在线观看免费| 一区二区三区影院| 久久色在线观看| 欧美色爱综合网| 成人免费三级在线| 免费不卡在线观看| 亚洲精品视频一区| 久久婷婷一区二区三区| 欧美性一二三区| 波多野结衣在线aⅴ中文字幕不卡| 香蕉成人啪国产精品视频综合网| 国产欧美日韩另类一区| 制服丝袜亚洲播放| 色综合天天性综合| 久88久久88久久久| 亚洲一二三四在线观看| 国产日产欧美精品一区二区三区| 欧美日韩精品一区二区在线播放| 成人午夜av影视| 另类小说图片综合网| 亚洲一区二区精品久久av| 国产精品国产三级国产普通话三级 | 国产精品久久久久精k8|