From 0693dcb548569f118d1d7eefdf2874ef56334eb2 Mon Sep 17 00:00:00 2001 From: "B. Watson" Date: Sat, 22 Sep 2018 17:18:44 -0400 Subject: sbosrcarch: slimjet hack --- sbosrcarch | 64 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 63 insertions(+), 1 deletion(-) (limited to 'sbosrcarch') diff --git a/sbosrcarch b/sbosrcarch index e7f1a2b..f5da6d4 100755 --- a/sbosrcarch +++ b/sbosrcarch @@ -15,6 +15,10 @@ our %url_filename_collisions = ( 'https://ftp.mirrorservice.org/sites/download.salixos.org/x86_64/extra-14.2/source/libraries/p4api/p4api.tgz' => 'p4api.tgz.x86_64' ); +our %url_rewrite_hacks = ( + 'network/slimjet' => \&slimjet_hack +); + # TODO create_mode stats are wrong # TODO based on feedback from ttkp and pink_mist on IRC: @@ -1112,6 +1116,58 @@ sub store_file { } } +# Unless/until upstream fixes their shit... +# slimjet has a really fast release cycle, sometimes 2 or 3 per week, +# and of course SBo only updates once per week. +# Their download URL doesn't change (unversioned), causing md5sum +# mismatches more often than not. +# However, for all versions *but* the latest release, there's also +# an archive URL with the version number in the path. +# So slimjet_hack() will read VERSION from the slimjet.info file, see +# if the archive URL exists (via HTTP HEAD), and if so, return that +# instead of the real URL. If it's not found, just return the real +# URL we were passed (which might or might not work OK). +sub slimjet_hack { + my $url = shift; + my $file = shift || "network/slimjet.info"; + my $ver; + open my $f, "<$file"; + + if(!$f) { + print "slimjet_hack(): $file: $!\n"; + return $url; + } + + while(<$f>) { + if(/^\s*VERSION\s*=\s*"?([^"]+)"?/) { + $ver = $1; + last; + } + } + + if(!$ver) { + print "slimjet_hack(): couldn't extract VERSION from $file\n"; + return $url; + } + + my $newurl = $url; + $newurl =~ s,.*/,,; + $newurl = "https://www.slimjet.com/release/archive/$ver/$newurl"; + print "slimjet_hack(): \$newurl: $newurl\n"; + + my $cmd = "$curl $curlopts --silent --head --fail --max-time 60 $newurl >/dev/null"; + my $result = system($cmd); + + if($result) { + print "slimjet_hack(): \$newurl not found\n"; + } else { + $url = $newurl; + } + print "slimjet_hack(): return value: $url\n"; + + return $url; +} + # handle_info_file() is used as the 'wanted' sub for File::Find, but # it's also called from add and update modes, so it doesn't use any of # the File::Find stuff. Call while cd'ed to $sbogitdir, with $_ set to @@ -1136,6 +1192,10 @@ sub handle_info_file { my $filename = url_to_filename($url); print ": $url\n"; + if(exists($url_rewrite_hacks{$category/$prgnam})) { + $url = $url_rewrite_hacks{$category/$prgnam}->($url); + } + if(already_exists($filename, $category, $prgnam, $md5)) { print " already in archive, OK\n"; $archivecount++; @@ -1898,10 +1958,11 @@ Usage: $self create update + status purge trim check - add [ ...] + add [] [ ...] rm For full documentation try: @@ -1926,6 +1987,7 @@ for ($ARGV[0]) { /check/ && do { check_mode(0); }; /status/ && do { check_mode(1); }; /bwlist/ && do { bwlist_mode(); }; + #/slimjet_hack/ && do { $url_rewrite_hacks{'network/slimjet'}->('https://www.slimjetbrowser.com/release/slimjet_i386.tar.xz', '/tmp/slimjet.info'); exit 0; }; usage(); } -- cgit v1.2.3