From 12a0dafbaa1677b15138757d04ecb0fcc57a810a Mon Sep 17 00:00:00 2001 From: davehome Date: Thu, 14 Jul 2011 11:33:28 -0600 Subject: [PATCH] New package: perl-WWW-RobotRules-6.01. --- srcpkgs/perl-WWW-RobotRules/template | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 srcpkgs/perl-WWW-RobotRules/template diff --git a/srcpkgs/perl-WWW-RobotRules/template b/srcpkgs/perl-WWW-RobotRules/template new file mode 100644 index 00000000000..2587da892ab --- /dev/null +++ b/srcpkgs/perl-WWW-RobotRules/template @@ -0,0 +1,26 @@ +# Template build file for 'perl-WWW-RobotRules'. +pkgname=perl-WWW-RobotRules +version=6.01 +wrksrc="WWW-RobotRules-$version" +distfiles="${CPAN_SITE}/WWW/WWW-RobotRules-$version.tar.gz" +build_style=perl_module +short_desc="WWW::RobotRules - database of robots.txt-derived permissions" +maintainer="davehome " +homepage="http://search.cpan.org/~gaas/WWW-RobotRules-6.01/lib/WWW/RobotRules.pm" +license="GPL-2" +checksum=f817e3e982c9d869c7796bcb5737c3422c2272355424acd162d0f3b132bec9d3 +long_desc=" + This module parses /robots.txt files as specified in + A Standard for Robot Exclusion, at http://www.robotstxt.org/wc/norobots.html + Webmasters can use the /robots.txt file to forbid conforming robots from + accessing parts of their web site. + + The parsed files are kept in a WWW::RobotRules object, and this object + provides methods to check if access to a given URL is prohibited. The same + WWW::RobotRules object can be used for one or more parsed /robots.txt files + on any number of hosts." + +noarch=yes + +Add_dependency full perl-URI +Add_dependency full perl