<html><head><meta name="color-scheme" content="light dark"></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">package LWP::RobotUA;

use parent qw(LWP::UserAgent);

our $VERSION = '6.77';

require WWW::RobotRules;
require HTTP::Request;
require HTTP::Response;

use Carp ();
use HTTP::Status ();
use HTTP::Date qw(time2str);
use strict;


#
# Additional attributes in addition to those found in LWP::UserAgent:
#
# $self-&gt;{'delay'}    Required delay between request to the same
#                     server in minutes.
#
# $self-&gt;{'rules'}     A WWW::RobotRules object
#

sub new
{
    my $class = shift;
    my %cnf;
    if (@_ &lt; 4) {
	# legacy args
	@cnf{qw(agent from rules)} = @_;
    }
    else {
	%cnf = @_;
    }

    Carp::croak('LWP::RobotUA agent required') unless $cnf{agent};
    Carp::croak('LWP::RobotUA from address required')
	unless $cnf{from} &amp;&amp; $cnf{from} =~ m/\@/;

    my $delay = delete $cnf{delay} || 1;
    my $use_sleep = delete $cnf{use_sleep};
    $use_sleep = 1 unless defined($use_sleep);
    my $rules = delete $cnf{rules};

    my $self = LWP::UserAgent-&gt;new(%cnf);
    $self = bless $self, $class;

    $self-&gt;{'delay'} = $delay;   # minutes
    $self-&gt;{'use_sleep'} = $use_sleep;

    if ($rules) {
	$rules-&gt;agent($cnf{agent});
	$self-&gt;{'rules'} = $rules;
    }
    else {
	$self-&gt;{'rules'} = WWW::RobotRules-&gt;new($cnf{agent});
    }

    $self;
}


sub delay     { shift-&gt;_elem('delay',     @_); }
sub use_sleep { shift-&gt;_elem('use_sleep', @_); }


sub agent
{
    my $self = shift;
    my $old = $self-&gt;SUPER::agent(@_);
    if (@_) {
	# Changing our name means to start fresh
	$self-&gt;{'rules'}-&gt;agent($self-&gt;{'agent'});
    }
    $old;
}


sub rules {
    my $self = shift;
    my $old = $self-&gt;_elem('rules', @_);
    $self-&gt;{'rules'}-&gt;agent($self-&gt;{'agent'}) if @_;
    $old;
}


sub no_visits
{
    my($self, $netloc) = @_;
    $self-&gt;{'rules'}-&gt;no_visits($netloc) || 0;
}

*host_count = \&amp;no_visits;  # backwards compatibility with LWP-5.02


sub host_wait
{
    my($self, $netloc) = @_;
    return undef unless defined $netloc;
    my $last = $self-&gt;{'rules'}-&gt;last_visit($netloc);
    if ($last) {
	my $wait = int($self-&gt;{'delay'} * 60 - (time - $last));
	$wait = 0 if $wait &lt; 0;
	return $wait;
    }
    return 0;
}


sub simple_request
{
    my($self, $request, $arg, $size) = @_;

    # Do we try to access a new server?
    my $allowed = $self-&gt;{'rules'}-&gt;allowed($request-&gt;uri);

    if ($allowed &lt; 0) {
	# Host is not visited before, or robots.txt expired; fetch "robots.txt"
	my $robot_url = $request-&gt;uri-&gt;clone;
	$robot_url-&gt;path("robots.txt");
	$robot_url-&gt;query(undef);

	# make access to robot.txt legal since this will be a recursive call
	$self-&gt;{'rules'}-&gt;parse($robot_url, "");

	my $robot_req = HTTP::Request-&gt;new('GET', $robot_url);
	my $parse_head = $self-&gt;parse_head(0);
	my $robot_res = $self-&gt;request($robot_req);
	$self-&gt;parse_head($parse_head);
	my $fresh_until = $robot_res-&gt;fresh_until;
	my $content = "";
	if ($robot_res-&gt;is_success &amp;&amp; $robot_res-&gt;content_is_text) {
	    $content = $robot_res-&gt;decoded_content;
	    $content = "" unless $content &amp;&amp; $content =~ /^\s*Disallow\s*:/mi;
	}
	$self-&gt;{'rules'}-&gt;parse($robot_url, $content, $fresh_until);

	# recalculate allowed...
	$allowed = $self-&gt;{'rules'}-&gt;allowed($request-&gt;uri);
    }

    # Check rules
    unless ($allowed) {
	my $res = HTTP::Response-&gt;new(
	  HTTP::Status::RC_FORBIDDEN, 'Forbidden by robots.txt');
	$res-&gt;request( $request ); # bind it to that request
	return $res;
    }

    my $netloc = eval { local $SIG{__DIE__}; $request-&gt;uri-&gt;host_port; };
    my $wait = $self-&gt;host_wait($netloc);

    if ($wait) {
	if ($self-&gt;{'use_sleep'}) {
	    sleep($wait)
	}
	else {
	    my $res = HTTP::Response-&gt;new(
	      HTTP::Status::RC_SERVICE_UNAVAILABLE, 'Please, slow down');
	    $res-&gt;header('Retry-After', time2str(time + $wait));
	    $res-&gt;request( $request ); # bind it to that request
	    return $res;
	}
    }

    # Perform the request
    my $res = $self-&gt;SUPER::simple_request($request, $arg, $size);

    $self-&gt;{'rules'}-&gt;visit($netloc);

    $res;
}


sub as_string
{
    my $self = shift;
    my @s;
    push(@s, "Robot: $self-&gt;{'agent'} operated by $self-&gt;{'from'}  [$self]");
    push(@s, "    Minimum delay: " . int($self-&gt;{'delay'}*60) . "s");
    push(@s, "    Will sleep if too early") if $self-&gt;{'use_sleep'};
    push(@s, "    Rules = $self-&gt;{'rules'}");
    join("\n", @s, '');
}

1;


__END__

=pod

=head1 NAME

LWP::RobotUA - a class for well-behaved Web robots

=head1 SYNOPSIS

  use LWP::RobotUA;
  my $ua = LWP::RobotUA-&gt;new('my-robot/0.1', 'me@foo.com');
  $ua-&gt;delay(10);  # be very nice -- max one hit every ten minutes!
  ...

  # Then just use it just like a normal LWP::UserAgent:
  my $response = $ua-&gt;get('http://whatever.int/...');
  ...

=head1 DESCRIPTION

This class implements a user agent that is suitable for robot
applications.  Robots should be nice to the servers they visit.  They
should consult the F&lt;/robots.txt&gt; file to ensure that they are welcomed
and they should not make requests too frequently.

But before you consider writing a robot, take a look at
L&lt;http://www.robotstxt.org/&gt;.

When you use an I&lt;LWP::RobotUA&gt; object as your user agent, then you do not
really have to think about these things yourself; C&lt;robots.txt&gt; files
are automatically consulted and obeyed, the server isn't queried
too rapidly, and so on.  Just send requests
as you do when you are using a normal I&lt;LWP::UserAgent&gt;
object (using C&lt;&lt; $ua-&gt;get(...) &gt;&gt;, C&lt;&lt; $ua-&gt;head(...) &gt;&gt;,
C&lt;&lt; $ua-&gt;request(...) &gt;&gt;, etc.), and this
special agent will make sure you are nice.

=head1 METHODS

The LWP::RobotUA is a sub-class of L&lt;LWP::UserAgent&gt; and implements the
same methods. In addition the following methods are provided:

=head2 new

    my $ua = LWP::RobotUA-&gt;new( %options )
    my $ua = LWP::RobotUA-&gt;new( $agent, $from )
    my $ua = LWP::RobotUA-&gt;new( $agent, $from, $rules )

The LWP::UserAgent options C&lt;agent&gt; and C&lt;from&gt; are mandatory.  The
options C&lt;delay&gt;, C&lt;use_sleep&gt; and C&lt;rules&gt; initialize attributes
private to the RobotUA.  If C&lt;rules&gt; are not provided, then
L&lt;WWW::RobotRules&gt; is instantiated providing an internal database of
F&lt;robots.txt&gt;.

It is also possible to just pass the value of C&lt;agent&gt;, C&lt;from&gt; and
optionally C&lt;rules&gt; as plain positional arguments.

=head2 delay

    my $delay = $ua-&gt;delay;
    $ua-&gt;delay( $minutes );

Get/set the minimum delay between requests to the same server, in
I&lt;minutes&gt;.  The default is C&lt;1&gt; minute.  Note that this number doesn't
have to be an integer; for example, this sets the delay to C&lt;10&gt; seconds:

    $ua-&gt;delay(10/60);

=head2 use_sleep

    my $bool = $ua-&gt;use_sleep;
    $ua-&gt;use_sleep( $boolean );

Get/set a value indicating whether the UA should L&lt;LWP::RobotUA/sleep&gt; if
requests arrive too fast, defined as C&lt;&lt; $ua-&gt;delay &gt;&gt; minutes not passed since
last request to the given server.  The default is true.  If this value is
false then an internal C&lt;SERVICE_UNAVAILABLE&gt; response will be generated.
It will have a C&lt;Retry-After&gt; header that indicates when it is OK to
send another request to this server.

=head2 rules

    my $rules = $ua-&gt;rules;
    $ua-&gt;rules( $rules );

Set/get which I&lt;WWW::RobotRules&gt; object to use.

=head2 no_visits

    my $num = $ua-&gt;no_visits( $netloc )

Returns the number of documents fetched from this server host. Yeah I
know, this method should probably have been named C&lt;num_visits&gt; or
something like that. :-(

=head2 host_wait

    my $num = $ua-&gt;host_wait( $netloc )

Returns the number of I&lt;seconds&gt; (from now) you must wait before you can
make a new request to this host.

=head2 as_string

    my $string = $ua-&gt;as_string;

Returns a string that describes the state of the UA.
Mainly useful for debugging.

=head1 SEE ALSO

L&lt;LWP::UserAgent&gt;, L&lt;WWW::RobotRules&gt;

=head1 COPYRIGHT

Copyright 1996-2004 Gisle Aas.

This library is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.

=cut
</pre></body></html>