aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorB. Watson <yalhcru@gmail.com>2018-09-22 17:18:44 -0400
committerB. Watson <yalhcru@gmail.com>2018-09-22 17:18:44 -0400
commit0693dcb548569f118d1d7eefdf2874ef56334eb2 (patch)
treedea54d7f7d6d260160e0f8d708dc6e9e57b42625
parent0348a3112ca460e3afdd5babf9251033f114b12f (diff)
downloadsbostuff-0693dcb548569f118d1d7eefdf2874ef56334eb2.tar.gz
sbosrcarch: slimjet hack
-rwxr-xr-xsbosrcarch64
1 files changed, 63 insertions, 1 deletions
diff --git a/sbosrcarch b/sbosrcarch
index e7f1a2b..f5da6d4 100755
--- a/sbosrcarch
+++ b/sbosrcarch
@@ -15,6 +15,10 @@ our %url_filename_collisions = (
'https://ftp.mirrorservice.org/sites/download.salixos.org/x86_64/extra-14.2/source/libraries/p4api/p4api.tgz' => 'p4api.tgz.x86_64'
);
+our %url_rewrite_hacks = (
+ 'network/slimjet' => \&slimjet_hack
+);
+
# TODO create_mode stats are wrong
# TODO based on feedback from ttkp and pink_mist on IRC:
@@ -1112,6 +1116,58 @@ sub store_file {
}
}
+# Unless/until upstream fixes their shit...
+# slimjet has a really fast release cycle, sometimes 2 or 3 per week,
+# and of course SBo only updates once per week.
+# Their download URL doesn't change (unversioned), causing md5sum
+# mismatches more often than not.
+# However, for all versions *but* the latest release, there's also
+# an archive URL with the version number in the path.
+# So slimjet_hack() will read VERSION from the slimjet.info file, see
+# if the archive URL exists (via HTTP HEAD), and if so, return that
+# instead of the real URL. If it's not found, just return the real
+# URL we were passed (which might or might not work OK).
+sub slimjet_hack {
+ my $url = shift;
+ my $file = shift || "network/slimjet.info";
+ my $ver;
+ open my $f, "<$file";
+
+ if(!$f) {
+ print "slimjet_hack(): $file: $!\n";
+ return $url;
+ }
+
+ while(<$f>) {
+ if(/^\s*VERSION\s*=\s*"?([^"]+)"?/) {
+ $ver = $1;
+ last;
+ }
+ }
+
+ if(!$ver) {
+ print "slimjet_hack(): couldn't extract VERSION from $file\n";
+ return $url;
+ }
+
+ my $newurl = $url;
+ $newurl =~ s,.*/,,;
+ $newurl = "https://www.slimjet.com/release/archive/$ver/$newurl";
+ print "slimjet_hack(): \$newurl: $newurl\n";
+
+ my $cmd = "$curl $curlopts --silent --head --fail --max-time 60 $newurl >/dev/null";
+ my $result = system($cmd);
+
+ if($result) {
+ print "slimjet_hack(): \$newurl not found\n";
+ } else {
+ $url = $newurl;
+ }
+ print "slimjet_hack(): return value: $url\n";
+
+ return $url;
+}
+
# handle_info_file() is used as the 'wanted' sub for File::Find, but
# it's also called from add and update modes, so it doesn't use any of
# the File::Find stuff. Call while cd'ed to $sbogitdir, with $_ set to
@@ -1136,6 +1192,10 @@ sub handle_info_file {
my $filename = url_to_filename($url);
print ": $url\n";
+ if(exists($url_rewrite_hacks{$category/$prgnam})) {
+ $url = $url_rewrite_hacks{$category/$prgnam}->($url);
+ }
+
if(already_exists($filename, $category, $prgnam, $md5)) {
print " already in archive, OK\n";
$archivecount++;
@@ -1898,10 +1958,11 @@ Usage: $self <mode>
create
update
+ status
purge
trim
check
- add <category/prgname> [<file> ...]
+ add [<category/prgname>] <file> [<file> ...]
rm <category/prgname>
For full documentation try:
@@ -1926,6 +1987,7 @@ for ($ARGV[0]) {
/check/ && do { check_mode(0); };
/status/ && do { check_mode(1); };
/bwlist/ && do { bwlist_mode(); };
+ #/slimjet_hack/ && do { $url_rewrite_hacks{'network/slimjet'}->('https://www.slimjetbrowser.com/release/slimjet_i386.tar.xz', '/tmp/slimjet.info'); exit 0; };
usage();
}