blob: 5692a89233670ba290ff6ab6f7f2eec66efc68eb (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
|
#!/usr/bin/env bash
#
# Given a list of files or stdin containing a newline-separated list of URLs,
# try to find erroneous, redirecting, or insecure URLs with working secure
# alternatives.
#
# Author: Tom Ryder <tom@sanctum.geek.nz>
# Copyright: 2016
# License: Public domain
#
# Name self
self=urlcheck
# cURL request timeout
tm=${URLCHECK_TIMEOUT:-8}
# Create temporary files for headers and body content
head=$(mktemp) || exit
body=$(mktemp) || exit
# Set up cleanup function to remove temporary files on exit
cleanup() {
rm -f -- "$head" "$body"
}
trap cleanup EXIT
# Error count
declare -i errc
# Iterate through input; ignore leading/trailing whitespace
while read -r url ; do
# Skip anything that doesn't start with HTTP
[[ $url == 'http'* ]] || continue
# Make initial request, log head and body to files, cry and skip on error
if ! curl -fHLsS -D "$head" -m "$tm" -o "$body" -- "$url" ; then
printf '%s: %s raises error\n' \
"$self" "$url" >&2
((errc++))
continue
fi
# Iterate through header file, cry about the first redirect we find
while IFS=': ' read -r header value ; do
[[ $header == 'Location' ]] || continue
printf '%s: %s redirects to %s\n' \
"$self" "$url" "$value" >&2
((errc++))
break
done < "$head"
# Skip anything that's already secure
[[ $url == 'https:'* ]] && continue
# Form a naïve attempt at a possible secure URL and try to request it,
# point it out if it actually works
securl=${url/http:/https:}
if curl -fLsS -D "$head" -m "$tm" -o "$body" -- "$securl" 2>/dev/null ; then
printf '%s: %s has a working secure version at %s\n' \
"$self" "$url" "$securl" >&2
((errc++))
fi
done < <(cat -- "${@:-/dev/stdin}") ## shellcheck disable=SC2002
# Exit if any errors
exit "$((errc > 0))"
|