From 29c0d80c40d44eb77e3320c5c883357eb6b6f604 Mon Sep 17 00:00:00 2001 From: Jesse Luehrs Date: Sat, 28 Nov 2009 15:40:24 -0600 Subject: make plugins a bit more flexible --- lib/Crawl/Bot.pm | 31 ++++------- lib/Crawl/Bot/Mantis.pm | 113 ----------------------------------------- lib/Crawl/Bot/Plugin.pm | 18 +++++++ lib/Crawl/Bot/Plugin/Mantis.pm | 106 ++++++++++++++++++++++++++++++++++++++ lib/Crawl/Bot/Plugin/Wiki.pm | 52 +++++++++++++++++++ lib/Crawl/Bot/Wiki.pm | 60 ---------------------- 6 files changed, 185 insertions(+), 195 deletions(-) delete mode 100644 lib/Crawl/Bot/Mantis.pm create mode 100644 lib/Crawl/Bot/Plugin.pm create mode 100644 lib/Crawl/Bot/Plugin/Mantis.pm create mode 100644 lib/Crawl/Bot/Plugin/Wiki.pm delete mode 100644 lib/Crawl/Bot/Wiki.pm diff --git a/lib/Crawl/Bot.pm b/lib/Crawl/Bot.pm index b6d956e..7d34403 100644 --- a/lib/Crawl/Bot.pm +++ b/lib/Crawl/Bot.pm @@ -4,6 +4,10 @@ use MooseX::NonMoose; extends 'Bot::BasicBot'; use File::Path; +use Module::Pluggable ( + instantiate => 'new', + sub_name => 'create_plugins', +); has [qw(username name)] => ( # don't need (or want) accessors, just want to initialize the hash slot @@ -26,33 +30,17 @@ has update_time => ( default => 300, ); -has mantis => ( - is => 'ro', - isa => 'Crawl::Bot::Mantis', - lazy => 1, - default => sub { - my $self = shift; - require Crawl::Bot::Mantis; - Crawl::Bot::Mantis->new(bot => $self); - }, -); - -has wiki => ( +has plugins => ( is => 'ro', - isa => 'Crawl::Bot::Wiki', + isa => 'ArrayRef[Crawl::Bot::Plugin]', lazy => 1, - default => sub { - my $self = shift; - require Crawl::Bot::Wiki; - Crawl::Bot::Wiki->new(bot => $self); - }, + default => sub { [__PACKAGE__->create_plugins(bot => shift)] }, ); sub BUILD { my $self = shift; File::Path::mkpath($self->data_dir); - $self->mantis; - $self->wiki; + $self->plugins; } before say => sub { @@ -63,8 +51,7 @@ before say => sub { sub tick { my $self = shift; - $self->mantis->tick; - $self->wiki->tick; + $_->tick for @{ $self->plugins }; return $self->update_time; } diff --git a/lib/Crawl/Bot/Mantis.pm b/lib/Crawl/Bot/Mantis.pm deleted file mode 100644 index 8f4b937..0000000 --- a/lib/Crawl/Bot/Mantis.pm +++ /dev/null @@ -1,113 +0,0 @@ -package Crawl::Bot::Mantis; -use Moose; - -use autodie; -use File::Spec; -use XML::RAI; - -has bot => ( - is => 'ro', - isa => 'Crawl::Bot', - required => 1, - weak_ref => 1, - handles => [qw(say_all data_dir)], -); - -has rss_feed => ( - is => 'ro', - isa => 'Str', - lazy => 1, - default => 'http://crawl.develz.org/mantis/issues_rss.php', -); - -has _cache_file => ( - is => 'ro', - isa => 'Str', - lazy => 1, - default => 'mantis_issue_cache', -); - -sub cache_file { - my $self = shift; - return File::Spec->catfile($self->data_dir, $self->_cache_file); -} - -has issues => ( - traits => ['Hash'], - isa => 'HashRef', - lazy => 1, - default => sub { { } }, - handles => { - has_issue => 'exists', - issues => 'keys', - _add_issue => 'set', - }, -); - -sub BUILD { - my $self = shift; - my $file = $self->cache_file; - if (-r $file) { - warn "Updating seen issue list from the cache..."; - open my $fh, '<', $file; - while (<$fh>) { - chomp; - $self->add_issue($_); - warn " got issue $_"; - } - } - else { - warn "Updating seen issue list from a fresh copy of the feed..."; - $self->each_issue(sub { - my $issue = shift; - my $link = $issue->identifier; - (my $id = $link) =~ s/.*=(\d+)$/$1/; - $self->add_issue($id); - warn " got issue $id"; - }); - $self->save_cache; - } -} - -sub save_cache { - my $self = shift; - warn "Saving cache state to " . $self->cache_file; - open my $fh, '>', $self->cache_file; - $fh->print("$_\n") for $self->issues; -} - -sub add_issue { - my $self = shift; - $self->_add_issue($_[0], 1); -} - -sub each_issue { - my $self = shift; - my ($code) = @_; - my $rss = XML::RAI->parse_uri($self->rss_feed); - for my $issue (@{ $rss->items }) { - $code->($issue); - } -} - -sub tick { - my $self = shift; - warn "Checking for new issues..."; - $self->each_issue(sub { - my $issue = shift; - (my $id = $issue->identifier) =~ s/.*=(\d+)$/$1/; - return if $self->has_issue($id); - warn "New issue! ($id)"; - (my $title = $issue->title) =~ s/\d+: //; - my $link = $issue->link; - (my $user = $issue->creator) =~ s/ <.*?>$//; - $self->say_all("$title ($link) by $user"); - $self->add_issue($id); - }); - $self->save_cache; -} - -__PACKAGE__->meta->make_immutable; -no Moose; - -1; diff --git a/lib/Crawl/Bot/Plugin.pm b/lib/Crawl/Bot/Plugin.pm new file mode 100644 index 0000000..c70e102 --- /dev/null +++ b/lib/Crawl/Bot/Plugin.pm @@ -0,0 +1,18 @@ +package Crawl::Bot::Plugin; +use Moose; + +has bot => ( + is => 'ro', + isa => 'Crawl::Bot', + required => 1, + weak_ref => 1, + handles => [qw(say_all data_dir)], +); + +# not all plugins require a tick method +sub tick { } + +__PACKAGE__->meta->make_immutable; +no Moose; + +1; diff --git a/lib/Crawl/Bot/Plugin/Mantis.pm b/lib/Crawl/Bot/Plugin/Mantis.pm new file mode 100644 index 0000000..9312237 --- /dev/null +++ b/lib/Crawl/Bot/Plugin/Mantis.pm @@ -0,0 +1,106 @@ +package Crawl::Bot::Plugin::Mantis; +use Moose; +extends 'Crawl::Bot::Plugin'; + +use autodie; +use File::Spec; +use XML::RAI; + +has rss_feed => ( + is => 'ro', + isa => 'Str', + lazy => 1, + default => 'http://crawl.develz.org/mantis/issues_rss.php', +); + +has _cache_file => ( + is => 'ro', + isa => 'Str', + lazy => 1, + default => 'mantis_issue_cache', +); + +sub cache_file { + my $self = shift; + return File::Spec->catfile($self->data_dir, $self->_cache_file); +} + +has issues => ( + traits => ['Hash'], + isa => 'HashRef', + lazy => 1, + default => sub { { } }, + handles => { + has_issue => 'exists', + issues => 'keys', + _add_issue => 'set', + }, +); + +sub BUILD { + my $self = shift; + my $file = $self->cache_file; + if (-r $file) { + warn "Updating seen issue list from the cache..."; + open my $fh, '<', $file; + while (<$fh>) { + chomp; + $self->add_issue($_); + warn " got issue $_"; + } + } + else { + warn "Updating seen issue list from a fresh copy of the feed..."; + $self->each_issue(sub { + my $issue = shift; + my $link = $issue->identifier; + (my $id = $link) =~ s/.*=(\d+)$/$1/; + $self->add_issue($id); + warn " got issue $id"; + }); + $self->save_cache; + } +} + +sub save_cache { + my $self = shift; + warn "Saving cache state to " . $self->cache_file; + open my $fh, '>', $self->cache_file; + $fh->print("$_\n") for $self->issues; +} + +sub add_issue { + my $self = shift; + $self->_add_issue($_[0], 1); +} + +sub each_issue { + my $self = shift; + my ($code) = @_; + my $rss = XML::RAI->parse_uri($self->rss_feed); + for my $issue (@{ $rss->items }) { + $code->($issue); + } +} + +sub tick { + my $self = shift; + warn "Checking for new issues..."; + $self->each_issue(sub { + my $issue = shift; + (my $id = $issue->identifier) =~ s/.*=(\d+)$/$1/; + return if $self->has_issue($id); + warn "New issue! ($id)"; + (my $title = $issue->title) =~ s/\d+: //; + my $link = $issue->link; + (my $user = $issue->creator) =~ s/ <.*?>$//; + $self->say_all("$title ($link) by $user"); + $self->add_issue($id); + }); + $self->save_cache; +} + +__PACKAGE__->meta->make_immutable; +no Moose; + +1; diff --git a/lib/Crawl/Bot/Plugin/Wiki.pm b/lib/Crawl/Bot/Plugin/Wiki.pm new file mode 100644 index 0000000..9e08159 --- /dev/null +++ b/lib/Crawl/Bot/Plugin/Wiki.pm @@ -0,0 +1,52 @@ +package Crawl::Bot::Plugin::Wiki; +use Moose; + +use XML::RPC; + +has xmlrpc_location => ( + is => 'ro', + isa => 'Str', + lazy => 1, + default => 'http://crawl.develz.org/wiki/lib/exe/xmlrpc.php', +); + +has wiki_base => ( + is => 'ro', + isa => 'Str', + lazy => 1, + default => 'http://crawl.develz.org/wiki/doku.php?id=', +); + +has last_checked => ( + is => 'rw', + isa => 'Int', +); + +sub tick { + my $self = shift; + my $last_checked = $self->last_checked; + $self->last_checked(time); + return unless $last_checked; + + my $xmlrpc = XML::RPC->new($self->xmlrpc_location); + warn "Getting recent wiki changes..."; + my $changes = $xmlrpc->call('wiki.getRecentChanges', $last_checked); + for my $change (@$changes) { + warn "Page $change->{name} changed"; + my $history = $xmlrpc->call('wiki.getPageVersions', $change->{name}, 0); + next if @$history; + warn "Page $change->{name} is new!"; + my $name = $change->{name}; + my $page = $xmlrpc->call('wiki.getPage', $change->{name}); + if ($page =~ /(===?=?=?=?) (.*) \1/) { + $name = $2; + } + $self->say_all("$change->{author} created page $name at " + . $self->wiki_base . "$change->{name}"); + } +} + +__PACKAGE__->meta->make_immutable; +no Moose; + +1; diff --git a/lib/Crawl/Bot/Wiki.pm b/lib/Crawl/Bot/Wiki.pm deleted file mode 100644 index bffc925..0000000 --- a/lib/Crawl/Bot/Wiki.pm +++ /dev/null @@ -1,60 +0,0 @@ -package Crawl::Bot::Wiki; -use Moose; - -use XML::RPC; - -has bot => ( - is => 'ro', - isa => 'Crawl::Bot', - required => 1, - weak_ref => 1, - handles => [qw(say_all)], -); - -has xmlrpc_location => ( - is => 'ro', - isa => 'Str', - lazy => 1, - default => 'http://crawl.develz.org/wiki/lib/exe/xmlrpc.php', -); - -has wiki_base => ( - is => 'ro', - isa => 'Str', - lazy => 1, - default => 'http://crawl.develz.org/wiki/doku.php?id=', -); - -has last_checked => ( - is => 'rw', - isa => 'Int', -); - -sub tick { - my $self = shift; - my $last_checked = $self->last_checked; - $self->last_checked(time); - return unless $last_checked; - - my $xmlrpc = XML::RPC->new($self->xmlrpc_location); - warn "Getting recent wiki changes..."; - my $changes = $xmlrpc->call('wiki.getRecentChanges', $last_checked); - for my $change (@$changes) { - warn "Page $change->{name} changed"; - my $history = $xmlrpc->call('wiki.getPageVersions', $change->{name}, 0); - next if @$history; - warn "Page $change->{name} is new!"; - my $name = $change->{name}; - my $page = $xmlrpc->call('wiki.getPage', $change->{name}); - if ($page =~ /(===?=?=?=?) (.*) \1/) { - $name = $2; - } - $self->say_all("$change->{author} created page $name at " - . $self->wiki_base . "$change->{name}"); - } -} - -__PACKAGE__->meta->make_immutable; -no Moose; - -1; -- cgit v1.2.3-54-g00ecf