-- tarquin.
print &PrintLinkList(&GetFullLinkList( &GetParam("unique", 1), &GetParam("sort", 1), &GetParam("page", 1), &GetParam("inter", 0), &GetParam("url", 0), &GetParam("exists", 2), &GetParam("empty", 0), &GetParam("search", "") ));
Purists may want to rename "PrintLinkList" to "GetLinkList?" ;-)
GetFullLinkList is going to have to be hacked too... at the moment it grabs parameters from the URL. However, I want to pass it parameters from another call. Since it is currently only called form one place, we'll grab URL parameters in the wiki.cgi caller (ugly nexted GetParams? in the call... )
my ($unique, $sort, $pagelink, $interlink, $urllink, $exists, $empty, $search)= @_ ; my ($name, $link ); # foreach iterators (though why not use $_ ?) #my ($name, $unique, $sort, $exists, $empty, $link, $search); #my ($pagelink, $interlink, $urllink); my (@found, @links, @newlinks, @pglist, %pgExists, %seen);
#$unique = &GetParam("unique", 1); #$sort = &GetParam("sort", 1); #$pagelink = &GetParam("page", 1); #$interlink = &GetParam("inter", 0); #$urllink = &GetParam("url", 0); #$exists = &GetParam("exists", 2); #$empty = &GetParam("empty", 0); #$search = &GetParam("search", "");
And the second hack for this sub makes it treat subpages correctly:
1. change the input line to
my ($unique, $sort, $pagelink, $interlink, $urllink, $exists, $empty, $search, $fixSubpages )= @_ ;
2. and add this little block as shown:
@links = &GetPageLinks($name, $pagelink, $interlink, $urllink); # patch - tarq - handle subpages correctly # so MagicWantedPages? works correctly and $fixSubpages my $parentPage; if( $fixSubpages and $UseSubpage? ) { # extract parent name: whatever comes before the /. This can be self. ( $parentPage ) = $name =~ m{^([^/]+)}; } # end patch
foreach $link (@links) {
package Wanted_Pages; @ISA = qw(MagicPage);
Wanted_Pages->register(); ########################### sub GenerateContent? { my ($text, @links); my $throttle = 1; my $testoutput;
# grab the list of links from UseModWiki::GetFullLinkList which spits out an array # on the way, kill any items which are just one pagename: ie pages that make no requests @links = grep { !/^\w+\s*$/ } UseModWiki::GetFullLinkList(1,1,1,0,0,0,0,'',1); #reset currently open page, so the referrer is correct in the edit links made below $UseModWiki::OpenPageName = 'Wanted_Pages';
# what we have so far: multiple lines like: # {Existing page} {list of wanted link plain text}\n
my %requesters; # hash of arrays to store who wants the pages
################## # Data extraction foreach (@links) { # we are chomping through each line at a time
my $head; s[^(\S+)\s*]{ $head = $1; ''; }eg; # strip link tags and put the head name somewhere else # now $_ is the list of links wanted by page $head
# convert "headless" subpages by adding the head # s[\B/(\w*)][$head/$1]g; # not needed: GetFullLinkList is fixed
while( m[(\S+)\s*]g ) { #$testoutput .= "$1
"; push @{ $requesters{$1} }, $head; } } # $testoutput .= join '
', @links;
# now we have a hash of arrays. Keys are wanted pages, value is array of requesters ################## # Data output my $wantedpage; foreach $wantedpage (sort { @{$requesters{$b}} <=> @{$requesters{$a}} || $a cmp $b } (grep { @{$requesters{$_}} > $throttle } keys %requesters ) ) { # run through the hash keys # BUT sort them by size of the associated array, # AND only let through the arrays > $throttle $text .= '<dt>' # get an edit link for the wanted page . $wantedpage . &UseModWiki::GetEditLink($wantedpage,"?") #. &UseModWiki::GetEditLink($wantedpage,$wantedpage) # give number of requests . ' ('. @{$requesters{$wantedpage}} . ' requests)</dt><dd>Requested by: ' # list the requester, as page links . join(', ', map {UseModWiki::GetPageLink($_)} @{$requesters{$wantedpage}} ) . '</dd>'; } #return $testoutput . '<HR>'; return qq[<style type="text/css"> \#Wanted_Pages dd {font-size:smaller; margin:1px 0px 1.5ex 4em;} \#Wanted_Pages dl {margin:0px;} \#Wanted_Pages dt {font-weight:normal;} </style> <DL>$text</DL>]; }}
{ package WantedPages?; @ISA = qw(MagicPage);
WantedPages?->register(); ########################### sub GenerateContent? { my ($text, $links);
$links .= UseModWiki::PrintLinkList(UseModWiki::GetFullLinkList(1,1,1,0,0,0,0,'')); # not bad but we want "names=0" and we can't set that from here ... # can't be bothered to hack wiki.cgi any more, so just... $links =~ s[<a.*?/a> ?][]g; # strip links and maybe a space after $links =~ s[\w*/\w*][]g; # remove subpages since not nicely handled below
my %counts;
while( $links =~ /(\w+)\s/g ) { $counts{$1}++; # the trick would be to preserve the names at the front, # and shove those into an array in the hash } foreach $page (sort { $counts{$b} <=> $counts{$a} or lc $a cmp lc $b } keys %counts) { #$text .= qq[<LI><A HREF="$WikiBaseAddress??action=edit&id=$page">$page</A> (<A HREF="$WikiBaseAddress??back=$page">$counts{$page}</A>)</LI>\n]; $text .= '<LI>' . &UseModWiki::GetEditLink($page,$page) . qq[</A> (<A HREF="$WikiBaseAddress??back=$page">$counts{$page}</A>)</LI>\n]; }
$text .= "<HR>$links"; return $text; } }