From 22bf775f9c84d9c6853964bc6d6685da4195d187 Mon Sep 17 00:00:00 2001 From: Juan RP Date: Sat, 22 Feb 2014 09:20:30 +0100 Subject: [PATCH] perl-WWW-RobotRules: remove long_desc. --- srcpkgs/perl-WWW-RobotRules/template | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/srcpkgs/perl-WWW-RobotRules/template b/srcpkgs/perl-WWW-RobotRules/template index 4c66d3e169d..95c9df32ceb 100644 --- a/srcpkgs/perl-WWW-RobotRules/template +++ b/srcpkgs/perl-WWW-RobotRules/template @@ -1,8 +1,8 @@ # Template build file for 'perl-WWW-RobotRules'. pkgname=perl-WWW-RobotRules version=6.02 -revision=1 -wrksrc="WWW-RobotRules-$version" +revision=2 +wrksrc="${pkgname/perl-/}-${version}" build_style=perl-module hostmakedepends="perl>=5.18" makedepends="${hostmakedepends} perl-URI" @@ -14,13 +14,3 @@ homepage="http://search.cpan.org/dist/WWW-RobotRules" license="GPL-2" distfiles="${CPAN_SITE}/WWW/WWW-RobotRules-$version.tar.gz" checksum=46b502e7a288d559429891eeb5d979461dd3ecc6a5c491ead85d165b6e03a51e -long_desc=" - This module parses /robots.txt files as specified in - A Standard for Robot Exclusion, at http://www.robotstxt.org/wc/norobots.html - Webmasters can use the /robots.txt file to forbid conforming robots from - accessing parts of their web site. - - The parsed files are kept in a WWW::RobotRules object, and this object - provides methods to check if access to a given URL is prohibited. The same - WWW::RobotRules object can be used for one or more parsed /robots.txt files - on any number of hosts."