亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? robotrules.pm

?? ARM上的如果你對底層感興趣
?? PM
字號:
# $Id: RobotRules.pm,v 1.16 1998/06/09 12:14:43 aas Exp $

package WWW::RobotRules;

=head1 NAME

WWW::RobotsRules - Parse robots.txt files

=head1 SYNOPSIS

 require WWW::RobotRules;
 my $robotsrules = new WWW::RobotRules 'MOMspider/1.0';

 use LWP::Simple qw(get);

 $url = "http://some.place/robots.txt";
 my $robots_txt = get $url;
 $robotsrules->parse($url, $robots_txt);

 $url = "http://some.other.place/robots.txt";
 my $robots_txt = get $url;
 $robotsrules->parse($url, $robots_txt);

 # Now we are able to check if a URL is valid for those servers that
 # we have obtained and parsed "robots.txt" files for.
 if($robotsrules->allowed($url)) {
     $c = get $url;
     ...
 }

=head1 DESCRIPTION

This module parses a F<robots.txt> file as specified in
"A Standard for Robot Exclusion", described in
<URL:http://info.webcrawler.com/mak/projects/robots/norobots.html>
Webmasters can use the F<robots.txt> file to disallow conforming
robots access to parts of their WWW server.

The parsed file is kept in the WWW::RobotRules object, and this object
provide methods to check if access to a given URL is prohibited.  The
same WWW::RobotRules object can parse multiple F<robots.txt> files.

The following methods are provided:

=over 4

=cut

$VERSION = sprintf("%d.%02d", q$Revision: 1.16 $ =~ /(\d+)\.(\d+)/);
sub Version { $VERSION; }


use URI::URL ();
use strict;


=item $rules = new WWW::RobotRules 'MOMspider/1.0'

This is the constructor for WWW::RobotRules objects.  The first 
argument given to new() is the name of the robot. 

=cut

sub new {
    my($class, $ua) = @_;

    # This ugly hack is needed to ensure backwards compatability.
    # The "WWW::RobotRules" class is now really abstract.
    $class = "WWW::RobotRules::InCore" if $class eq "WWW::RobotRules";

    my $self = bless { }, $class;
    $self->agent($ua);
    $self;
}


=item $rules->parse($url, $content, $fresh_until)

The parse() method takes as arguments the URL that was used to
retrieve the F</robots.txt> file, and the contents of the file.

=cut

sub parse {
    my($self, $url, $txt, $fresh_until) = @_;

    $url = new URI::URL $url unless ref($url);	# make it URL
    my $netloc = $url->netloc;

    $self->clear_rules($netloc);
    $self->fresh_until($netloc, $fresh_until || (time + 365*24*3600));

    my $ua;
    my $is_me = 0;		# 1 iff this record is for me
    my $is_anon = 0;		# 1 iff this record is for *
    my @me_disallowed = ();	# rules disallowed for me
    my @anon_disallowed = ();	# rules disallowed for *

    # blank lines are significant, so turn CRLF into LF to avoid generating
    # false ones
    $txt =~ s/\015\012/\012/g;

    # split at \012 (LF) or \015 (CR) (Mac text files have just CR for EOL)
    for(split(/[\012\015]/, $txt)) {

	# Lines containing only a comment are discarded completely, and
        # therefore do not indicate a record boundary.
	next if /^\s*\#/;

	s/\s*\#.*//;        # remove comments at end-of-line

	if (/^\s*$/) {	    # blank line
	    last if $is_me; # That was our record. No need to read the rest.
	    $is_anon = 0;
	}
        elsif (/^User-Agent:\s*(.*)/i) {
	    $ua = $1;
	    $ua =~ s/\s+$//;
	    if ($is_me) {
		# This record already had a User-agent that
		# we matched, so just continue.
	    }
	    elsif ($ua eq '*') {
		$is_anon = 1;
	    }
	    elsif($self->is_me($ua)) {
		$is_me = 1;
	    }
	}
	elsif (/^Disallow:\s*(.*)/i) {
	    unless (defined $ua) {
		warn "RobotRules: Disallow without preceding User-agent\n";
		$is_anon = 1;  # assume that User-agent: * was intended
	    }
	    my $disallow = $1;
	    $disallow =~ s/\s+$//;
	    if (length $disallow) {
		$disallow = URI::URL->new($disallow, $url)->full_path;
	    }

	    if ($is_me) {
		push(@me_disallowed, $disallow);
	    }
	    elsif ($is_anon) {
		push(@anon_disallowed, $disallow);
	    }
	}
	else {
	    warn "RobotRules: Unexpected line: $_\n";
	}
    }

    if ($is_me) {
	$self->push_rules($netloc, @me_disallowed);
    } else {
	$self->push_rules($netloc, @anon_disallowed);
    }
}

# is_me()
#
# Returns TRUE if the given name matches the
# name of this robot
#
sub is_me {
    my($self, $ua) = @_;
    my $me = $self->agent;
    return index(lc($ua), lc($me)) >= 0;
}

=item $rules->allowed($url)

Returns TRUE if this robot is allowed to retrieve this URL.

=cut

sub allowed {
    my($self, $url) = @_;
    $url = URI::URL->new($url) unless ref $url;	# make it URL

    my $netloc = $url->netloc;

    my $fresh_until = $self->fresh_until($netloc);
    return -1 if !defined($fresh_until) || $fresh_until < time;

    my $str = $url->full_path;
    my $rule;
    for $rule ($self->rules($netloc)) {
	return 1 unless length $rule;
	return 0 if index($str, $rule) == 0;
    }
    return 1;
}

# The following methods must be provided by the subclass.
sub agent;
sub visit;
sub no_visits;
sub last_visits;
sub fresh_until;
sub push_rules;
sub clear_rules;
sub rules;
sub dump;

package WWW::RobotRules::InCore;

use vars qw(@ISA);
@ISA = qw(WWW::RobotRules);

=item $rules->agent([$name])

Get/set the agent name. NOTE: Changing the agent name will clear the robots.txt
rules and expire times out of the cache.

=cut

sub agent {
    my ($self, $name) = @_;
    my $old = $self->{'ua'};
    if ($name) {
	delete $self->{'loc'};   # all old info is now stale
	$name =~ s!/?\s*\d+.\d+\s*$!!;  # loose version
	$self->{'ua'}=$name;
    }
    $old;
}

sub visit {
    my($self, $netloc, $time) = @_;
    $time ||= time;
    $self->{'loc'}{$netloc}{'last'} = $time;
    
    my $count = \$self->{'loc'}{$netloc}{'count'};
    if (!defined $$count) {
	$$count = 1;
    } else {
	$$count++;
    }
}

sub no_visits {
    my ($self, $netloc) = @_;
    $self->{'loc'}{$netloc}{'count'};
}

sub last_visit {
    my ($self, $netloc) = @_;
    $self->{'loc'}{$netloc}{'last'};
}

sub fresh_until {
    my ($self, $netloc, $fresh_until) = @_;
    my $old = $self->{'loc'}{$netloc}{'fresh'};
    if (defined $fresh_until) {
	$self->{'loc'}{$netloc}{'fresh'} = $fresh_until;
    }
    $old;
}

sub push_rules {
    my($self, $netloc, @rules) = @_;
    push (@{$self->{'loc'}{$netloc}{'rules'}}, @rules);
}

sub clear_rules {
    my($self, $netloc) = @_;
    delete $self->{'loc'}{$netloc}{'rules'};
}

sub rules {
    my($self, $netloc) = @_;
    if (defined $self->{'loc'}{$netloc}{'rules'}) {
	return @{$self->{'loc'}{$netloc}{'rules'}};
    } else {
	return ();
    }
}

sub dump
{
    my $self = shift;
    for (keys %$self) {
	next if $_ eq 'loc';
	print "$_ = $self->{$_}\n";
    }
    for (keys %{$self->{'loc'}}) {
	my @rules = $self->rules($_);
	print "$_: ", join("; ", @rules), "\n";
	
    }
}

1;

__END__

=back

=head1 ROBOTS.TXT

The format and semantics of the "/robots.txt" file are as follows
(this is an edited abstract of
<URL:http://info.webcrawler.com/mak/projects/robots/norobots.html>):

The file consists of one or more records separated by one or more
blank lines. Each record contains lines of the form

  <field-name>: <value>

The field name is case insensitive.  Text after the '#' character on a
line is ignored during parsing.  This is used for comments.  The
following <field-names> can be used:

=over 3

=item User-Agent

The value of this field is the name of the robot the record is
describing access policy for.  If more than one I<User-Agent> field is
present the record describes an identical access policy for more than
one robot. At least one field needs to be present per record.  If the
value is '*', the record describes the default access policy for any
robot that has not not matched any of the other records.

=item Disallow

The value of this field specifies a partial URL that is not to be
visited. This can be a full path, or a partial path; any URL that
starts with this value will not be retrieved

=back

=head1 ROBOTS.TXT EXAMPLES

The following example "/robots.txt" file specifies that no robots
should visit any URL starting with "/cyberworld/map/" or "/tmp/":

  User-agent: *
  Disallow: /cyberworld/map/ # This is an infinite virtual URL space
  Disallow: /tmp/ # these will soon disappear

This example "/robots.txt" file specifies that no robots should visit
any URL starting with "/cyberworld/map/", except the robot called
"cybermapper":

  User-agent: *
  Disallow: /cyberworld/map/ # This is an infinite virtual URL space

  # Cybermapper knows where to go.
  User-agent: cybermapper
  Disallow:

This example indicates that no robots should visit this site further:

  # go away
  User-agent: *
  Disallow: /

=head1 SEE ALSO

L<LWP::RobotUA>, L<WWW::RobotRules::AnyDBM_File>

=cut

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产激情一区二区三区四区| 国产在线日韩欧美| 国产精品天美传媒沈樵| 久久综合久久久久88| 日韩午夜中文字幕| 日韩欧美国产一区二区在线播放| 欧美日韩国产另类一区| 精品视频999| 91精品欧美久久久久久动漫| 91精品国产品国语在线不卡| 在线综合视频播放| 精品三级在线看| 国产欧美日韩视频一区二区| 国产精品久久久久久久久果冻传媒| 国产精品二三区| 一区二区三区免费在线观看| 水蜜桃久久夜色精品一区的特点| 免费看日韩a级影片| 国产精品自产自拍| 日本精品视频一区二区三区| 欧美日韩aaa| 久久久久青草大香线综合精品| 中文字幕av免费专区久久| 亚洲啪啪综合av一区二区三区| 亚洲一区免费视频| 精品一区二区影视| 99视频一区二区| 3d成人动漫网站| 亚洲国产精品v| 亚洲高清在线精品| 国产东北露脸精品视频| 日本道色综合久久| 日韩欧美国产综合| 亚洲黄色av一区| 久久99久久精品欧美| 色婷婷一区二区三区四区| 日韩亚洲欧美在线| 洋洋av久久久久久久一区| 久久成人免费日本黄色| 成人在线一区二区三区| 欧美巨大另类极品videosbest| 久久综合色之久久综合| 亚洲国产精品久久久久秋霞影院| 国产成人免费视频一区| 欧美日韩一区国产| 一区二区中文字幕在线| 久久精品国产**网站演员| 99re免费视频精品全部| 欧美成人女星排名| 婷婷久久综合九色国产成人| 97久久精品人人做人人爽| 26uuu国产日韩综合| 日本美女一区二区三区| 在线中文字幕不卡| 中文字幕五月欧美| 国产精品一线二线三线| 日韩午夜精品视频| 亚洲制服欧美中文字幕中文字幕| 国产 欧美在线| 久久天天做天天爱综合色| 丝袜美腿亚洲色图| 在线一区二区三区| 亚洲人成网站精品片在线观看| 国产不卡在线视频| 久久综合色8888| 久久精品99国产精品日本| 88在线观看91蜜桃国自产| 一区二区三区国产精华| 一本色道综合亚洲| 一区二区三区四区在线| 91女神在线视频| 中文字幕在线一区| 99视频有精品| 亚洲久草在线视频| 在线免费不卡电影| 一片黄亚洲嫩模| 欧美无乱码久久久免费午夜一区| 一区二区三区四区av| 在线一区二区三区| 亚洲国产人成综合网站| 欧美日韩精品一区二区天天拍小说 | 26uuu亚洲| 国产一区二区三区免费看| 久久精品夜夜夜夜久久| 成人高清视频免费观看| 中文字幕一区二区三区av| 91在线视频免费91| 亚洲成av人在线观看| 欧美疯狂性受xxxxx喷水图片| 日本欧美一区二区三区乱码| 日韩色视频在线观看| 国产精品99久久久久久久女警 | 国产一区在线精品| 国产欧美一区视频| 色综合视频一区二区三区高清| 亚洲人成伊人成综合网小说| 欧美影视一区在线| 久久99精品久久久久婷婷| 国产日韩欧美激情| 91国偷自产一区二区开放时间| 男女男精品视频网| 国产亚洲女人久久久久毛片| 91在线视频播放| 亚洲成人福利片| 久久久久国产精品麻豆ai换脸| 波多野洁衣一区| 亚洲成a人v欧美综合天堂| 久久夜色精品一区| 色哟哟一区二区在线观看| 日本中文字幕不卡| 中文字幕在线观看一区| 日韩一区二区三区av| av在线这里只有精品| 日本视频一区二区| 国产精品第四页| 精品美女被调教视频大全网站| 成人91在线观看| 久久精品国产在热久久| 亚洲激情五月婷婷| 久久九九99视频| 91精品国产一区二区三区蜜臀 | 中文字幕av一区二区三区高| 欧美疯狂做受xxxx富婆| 91在线一区二区| 精品一区二区三区久久| 亚洲a一区二区| 国产精品久久久久久亚洲伦| 337p日本欧洲亚洲大胆精品 | 蜜桃视频在线观看一区| 亚洲精品国产一区二区精华液| 日韩精品一区二区三区视频播放| 在线视频一区二区三区| 97se亚洲国产综合在线| 国产精品一区二区视频| 蜜臀av一区二区在线免费观看| 亚洲精品美国一| **欧美大码日韩| 中文字幕乱码亚洲精品一区| 欧美mv和日韩mv的网站| 日韩一级免费一区| 91麻豆精品国产91久久久更新时间| 色综合一个色综合亚洲| 99综合电影在线视频| 国产99久久精品| 国产黄色精品网站| 国产伦理精品不卡| 国产成人亚洲综合a∨婷婷| 久久电影网电视剧免费观看| 蜜乳av一区二区| 麻豆精品国产91久久久久久| 日韩av不卡在线观看| 欧美aa在线视频| 久久99精品久久久久久动态图| 日韩国产成人精品| 美女在线视频一区| 国模无码大尺度一区二区三区| 国产一区二区三区在线观看免费| 激情综合网激情| 国产伦精一区二区三区| 成人午夜又粗又硬又大| 成人精品视频一区| 91啪九色porn原创视频在线观看| 91亚洲精品一区二区乱码| 91丨porny丨国产入口| 在线亚洲高清视频| 欧美一区二区三区思思人| 日韩限制级电影在线观看| 26uuu精品一区二区三区四区在线 26uuu精品一区二区在线观看 | 日韩一区日韩二区| 亚洲精品第1页| 天天操天天综合网| 狠狠色丁香久久婷婷综合_中| 国产精品综合久久| 色综合久久中文字幕| 欧美日韩国产免费一区二区| 欧美精品一区二区三| 国产精品国产馆在线真实露脸 | 日韩欧美一区在线观看| 国产日韩欧美精品电影三级在线| 一区二区中文字幕在线| 日韩电影在线观看电影| 国产成人综合亚洲91猫咪| 色成人在线视频| 日韩精品一区二区三区视频 | 欧美视频日韩视频在线观看| 日韩一级完整毛片| 亚洲欧洲成人自拍| 蜜桃91丨九色丨蝌蚪91桃色| 不卡的av网站| 日韩一区和二区| 国产精品国产自产拍在线| 婷婷成人激情在线网| 成人伦理片在线| 日韩一区二区三区电影在线观看| 日韩一区日韩二区| 国产资源在线一区| 欧美日韩国产乱码电影| 国产精品黄色在线观看| 久久91精品久久久久久秒播| 欧美中文字幕一区二区三区|