aboutsummaryrefslogtreecommitdiff
path: root/bin/urlc
diff options
context:
space:
mode:
authorTom Ryder <tom@sanctum.geek.nz>2016-06-22 10:13:10 +1200
committerTom Ryder <tom@sanctum.geek.nz>2016-06-22 10:13:10 +1200
commitc83042d6701f275bd792fb15c889e780deddb14a (patch)
treecc6f6615267ce1ce8aae52e59477d8cea4dd8e73 /bin/urlc
parentRemove reference to GitHub dotfiles pages (diff)
downloaddotfiles-c83042d6701f275bd792fb15c889e780deddb14a.tar.gz
dotfiles-c83042d6701f275bd792fb15c889e780deddb14a.zip
Rename binscripts more tersely
Diffstat (limited to 'bin/urlc')
-rwxr-xr-xbin/urlc69
1 files changed, 69 insertions, 0 deletions
diff --git a/bin/urlc b/bin/urlc
new file mode 100755
index 00000000..49f41082
--- /dev/null
+++ b/bin/urlc
@@ -0,0 +1,69 @@
+#!/usr/bin/env bash
+
+#
+# Given a list of files or stdin containing a newline-separated list of URLs,
+# try to find erroneous, redirecting, or insecure URLs with working secure
+# alternatives.
+#
+# Author: Tom Ryder <tom@sanctum.geek.nz>
+# Copyright: 2016
+# License: Public domain
+#
+
+# Name self
+self=urlc
+
+# cURL request timeout
+tm=${URLCHECK_TIMEOUT:-8}
+
+# Create temporary files for headers and body content
+head=$(mktemp) || exit
+body=$(mktemp) || exit
+
+# Set up cleanup function to remove temporary files on exit
+cleanup() {
+ rm -f -- "$head" "$body"
+}
+trap cleanup EXIT
+
+# Error count
+declare -i errc
+
+# Iterate through input; ignore leading/trailing whitespace
+while read -r url ; do
+
+ # Skip anything that doesn't start with HTTP
+ [[ $url == 'http'* ]] || continue
+
+ # Make initial request, log head and body to files, cry and skip on error
+ if ! curl -fHLsS -D "$head" -m "$tm" -o "$body" -- "$url" ; then
+ printf '%s: %s raises error\n' \
+ "$self" "$url" >&2
+ ((errc++))
+ continue
+ fi
+
+ # Iterate through header file, cry about the first redirect we find
+ while IFS=': ' read -r header value ; do
+ [[ $header == 'Location' ]] || continue
+ printf '%s: %s redirects to %s\n' \
+ "$self" "$url" "$value" >&2
+ ((errc++))
+ break
+ done < "$head"
+
+ # Skip anything that's already secure
+ [[ $url == 'https:'* ]] && continue
+
+ # Form a naïve attempt at a possible secure URL and try to request it,
+ # point it out if it actually works
+ securl=${url/http:/https:}
+ if curl -fLsS -D "$head" -m "$tm" -o "$body" -- "$securl" 2>/dev/null ; then
+ printf '%s: %s has a working secure version at %s\n' \
+ "$self" "$url" "$securl" >&2
+ ((errc++))
+ fi
+done < <(cat -- "${@:-/dev/stdin}") ## shellcheck disable=SC2002
+
+# Exit if any errors
+exit "$((errc > 0))"