aboutsummaryrefslogtreecommitdiff
path: root/sbofindsrc
diff options
context:
space:
mode:
authorB. Watson <yalhcru@gmail.com>2015-10-25 05:00:53 -0400
committerB. Watson <yalhcru@gmail.com>2015-10-25 05:00:53 -0400
commit201ba5945d468c2894c26cc7664ad9b147d3b1b8 (patch)
tree7dcb5d9d1a678f6c7d0a33eadd00fc2d442d8a0a /sbofindsrc
parentaf740e9ab544748610e363ac25f6eae54c1974f3 (diff)
downloadsbostuff-201ba5945d468c2894c26cc7664ad9b147d3b1b8.tar.gz
sbofindsrc: add filewatcher
Diffstat (limited to 'sbofindsrc')
-rwxr-xr-xsbofindsrc49
1 files changed, 45 insertions, 4 deletions
diff --git a/sbofindsrc b/sbofindsrc
index 552e27c..5dae40a 100755
--- a/sbofindsrc
+++ b/sbofindsrc
@@ -9,12 +9,30 @@ SELF=$( basename $0 )
# failure otherwise. order isn't important here, it's randomized on
# every run.
-repos="wayback sbosrcarch macports fedora naptime pldattic tld ponce sfdirect gentoo netbsd freebsd debian"
-
-# mirror(s) to use for sbosrcarch, one or more, space-separated.
+repos="
+filewatcher
+wayback
+sbosrcarch
+macports
+fedora
+naptime
+pldattic
+tld
+ponce
+sfdirect
+gentoo
+netbsd
+freebsd
+debian
+"
+
+# mirror(s) to use for sbosrcarch, one or more, space or newline-separated.
# these are tried in the order listed.
# leave off the trailing / (shouldn't really matter, but...)
-sbosrcarch_mirrors="http://urchlay.naptime.net/~urchlay/sbosrc"
+sbosrcarch_mirrors="
+http://slackware.org.uk/sbosrcarch
+http://urchlay.naptime.net/~urchlay/sbosrc
+"
usage() {
cat <<EOF
@@ -200,6 +218,29 @@ macports_download() {
do_wget "http://distfiles.macports.org/$pkgname/$dlfile"
}
+# http://www.filewatcher.com/_/?q=Lirc-Client-2.00.tar.gz
+
+# for some reason, wget's getting the content gzipped. The
+# server appears to violate the HTTP/1.1 spec: it ignores
+# "Accept-Encoding: identity" or "Accept-Encoding:" with no arg,
+# and always sends gzipped content with "Content-encoding: gzip"
+
+# We have to do HTML scraping :(
+
+# TODO: CGI parameter escaping?
+
+filewatcher_download() {
+ fwurl="$(
+ wget -O- "http://www.filewatcher.com/_/?q=$dlfile" | \
+ zcat 2>/dev/null |
+ grep '<a *href *= *"*/m/' | \
+ sed 's,.*href *= *"\(\(ht\|f\)tp:[^"]*\)".*,\1,' | \
+ head -1
+ )"
+
+ [ -n "$fwurl" ] && do_wget "$fwurl"
+}
+
check_file() {
if [ ! -f "$dlfile" ]; then
echo "Nothing downloaded"