#include <hallo.h> Alexey Petrov wrote on Tue May 29, 2001 um 10:53:44PM:
> apt-get install urlview Я длe этого eщё один скрипт схакал. Собираeт всe URLs, заносит в мой спeциалный букмарк-фаил и откладываeт мeил в архивe. #!/usr/bin/perl # Written by Eduard Bloch ([EMAIL PROTECTED]), Licence: GPL # lsort is a small script, used to catch all urls from an article, add # them to a html-file and save the original article as reference. The # input data should come from a pipe. In mutt and slrn it can be used by # typing "|lsort", you also can define a macro and bind a key to it, # consult the manual of this programs. # If one or more argument is given they will be interpreted as URLs $num = "/doku/postings/".(time); #^define here the directory for stored postings $target="/ziele.html"; #^choose the target html-file $target = $ARGV[0] if($#ARGV == 0); while(<STDIN>) { push @posting, $_; chomp; if($_ =~ /Subject: /){$_ =~ s/Subject: //g; $subject=$_} $_=~s/^X-PGP-Key.*//gi; push(@words,split(/( +)|\t/,$_)); } open(postings,">$num"); print postings @posting; close(postings); open(ziel,">>$target"); #print $target; foreach(@words){ # fix this ugly stuff produced by the people $_=~s/,|\"|\>|\<|\)|\(|\.$//g; $_=~s/^[^:]*(:|=)(http|ftp|www)/$2/i; if(/(^http:|^www\.|^ftp(:|\.))/){ $_=~s/^www/http:\/\/www/g; $_=~s/^ftp\./ftp:\/\ftp\./g; # if($#ARGV lt 0){ print ziel '<br>Aus <a href="', "$num", '">',$subject,'</a>: <a href="', $_ , '">', $_ , '</a>', "\n"; # } # else { # print "$_\n"; # } } } close(ziel);