#!/usr/bin/env bash # # Given a list of files or stdin containing a newline-separated list of URLs, # try to find erroneous, redirecting, or insecure URLs with working secure # alternatives. # # Author: Tom Ryder # Copyright: 2016 # License: Public domain # # Name self self=urlc # cURL request timeout tm=${URLCHECK_TIMEOUT:-8} # Create temporary files for headers and body content head=$(mktemp) || exit body=$(mktemp) || exit # Set up cleanup function to remove temporary files on exit cleanup() { rm -f -- "$head" "$body" } trap cleanup EXIT # Error count declare -i errc # Iterate through input; ignore leading/trailing whitespace while read -r url ; do # Skip anything that doesn't start with HTTP [[ $url == 'http'* ]] || continue # Make initial request, log head and body to files, cry and skip on error if ! curl -fHLsS -D "$head" -m "$tm" -o "$body" -- "$url" ; then printf '%s: %s raises error\n' \ "$self" "$url" >&2 ((errc++)) continue fi # Iterate through header file, cry about the first redirect we find while IFS=': ' read -r header value ; do [[ $header == 'Location' ]] || continue printf '%s: %s redirects to %s\n' \ "$self" "$url" "$value" >&2 ((errc++)) break done < "$head" # Skip anything that's already secure [[ $url == 'https:'* ]] && continue # Form a naïve attempt at a possible secure URL and try to request it, # point it out if it actually works securl=${url/http:/https:} if curl -fLsS -D "$head" -m "$tm" -o "$body" -- "$securl" 2>/dev/null ; then printf '%s: %s has a working secure version at %s\n' \ "$self" "$url" "$securl" >&2 ((errc++)) fi done < <(cat -- "${@:-/dev/stdin}") ## shellcheck disable=SC2002 # Exit if any errors exit "$((errc > 0))"