From a3eb919fb4621b94d56522a661b6564d391196e8 Mon Sep 17 00:00:00 2001 From: Andreas Enge Date: Sun, 8 Dec 2013 22:18:58 +0100 Subject: gnu: Add perl-www-robotrules. * gnu/packages/web.scm (perl-www-robotrules): New variable. --- gnu/packages/web.scm | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) (limited to 'gnu/packages') diff --git a/gnu/packages/web.scm b/gnu/packages/web.scm index 013efc1790..d9821d87ec 100644 --- a/gnu/packages/web.scm +++ b/gnu/packages/web.scm @@ -295,3 +295,28 @@ the selection of a preferred content representation based upon attributes of the negotiable variants and the value of the various Accept* header fields in the request.") (home-page "http://search.cpan.org/~gaas/HTTP-Negotiate/"))) + +(define-public perl-www-robotrules + (package + (name "perl-www-robotrules") + (version "6.02") + (source (origin + (method url-fetch) + (uri (string-append + "mirror://cpan/authors/id/G/GA/GAAS/WWW-RobotRules-" + version ".tar.gz")) + (sha256 + (base32 + "07m50dp5n5jxv3m93i55qvnd67a6g7cvbvlik115kmc8lbkh5da6")))) + (build-system perl-build-system) + (inputs + `(("perl-uri" ,perl-uri))) + (license (package-license perl)) + (synopsis "Perl database of robots.txt-derived permissions") + (description + "The WWW::RobotRules module parses /robots.txt files as specified in +\"A Standard for Robot Exclusion\", at +. Webmasters can use the +/robots.txt file to forbid conforming robots from accessing parts of +their web site.") + (home-page "http://search.cpan.org/~gaas/WWW-RobotRules/"))) -- cgit 1.4.1