From c83042d6701f275bd792fb15c889e780deddb14a Mon Sep 17 00:00:00 2001 From: Tom Ryder Date: Wed, 22 Jun 2016 10:13:10 +1200 Subject: Rename binscripts more tersely --- bin/getmails | 36 ------------------------- bin/gms | 36 +++++++++++++++++++++++++ bin/htmlurls | 25 ------------------ bin/hurl | 25 ++++++++++++++++++ bin/mdurls | 18 ------------- bin/murl | 18 +++++++++++++ bin/plenv-modules-update | 34 ------------------------ bin/plmu | 34 ++++++++++++++++++++++++ bin/scatter | 34 ------------------------ bin/shoal | 37 -------------------------- bin/shock | 34 ------------------------ bin/sls | 37 ++++++++++++++++++++++++++ bin/sra | 34 ++++++++++++++++++++++++ bin/sta | 34 ++++++++++++++++++++++++ bin/urlc | 69 ++++++++++++++++++++++++++++++++++++++++++++++++ bin/urlcheck | 69 ------------------------------------------------ 16 files changed, 287 insertions(+), 287 deletions(-) delete mode 100755 bin/getmails create mode 100755 bin/gms delete mode 100755 bin/htmlurls create mode 100755 bin/hurl delete mode 100755 bin/mdurls create mode 100755 bin/murl delete mode 100755 bin/plenv-modules-update create mode 100755 bin/plmu delete mode 100755 bin/scatter delete mode 100755 bin/shoal delete mode 100755 bin/shock create mode 100755 bin/sls create mode 100755 bin/sra create mode 100755 bin/sta create mode 100755 bin/urlc delete mode 100755 bin/urlcheck (limited to 'bin') diff --git a/bin/getmails b/bin/getmails deleted file mode 100755 index d8383564..00000000 --- a/bin/getmails +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env bash - -# -# Run getmail(1) over every getmailrc.* file in ~/.getmail (I didn't like the -# included getmails(1) script). -# -# Author: Tom Ryder -# Copyright: 2016 -# -self=getmails - -# Check for existence of needed commands -hash flock getmail try || exit - -# Create a directory for our lockfiles if need be; we'll just leave it there -lockdir=${TMPDIR:-/tmp}/getmail-$UID -if ! mkdir -p -- "$lockdir" ; then - printf "%s: Could not create lockdir %s\n" \ - "$self" "$lockdir" >&2 - exit 1 -fi - -# Iterate through the getmailrc.* files in $GETMAIL if defined, or -# $HOME/.getmail if not -for rcfile in "${GETMAIL:-$HOME/.getmail}"/getmailrc.* ; do - - # Run the current rcfile with getmail quietly, using its basename with - # .lock appended as the lockfile in the lock directory - ( - flock -n 9 || exit 1 - try -i 15 -n 3 getmail --rcfile "$rcfile" "$@" - ) 9>"$lockdir"/"${rcfile##*/}".lock & -done - -# Wait for all of the enqueued tasks to finish -wait diff --git a/bin/gms b/bin/gms new file mode 100755 index 00000000..7ab865f2 --- /dev/null +++ b/bin/gms @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# +# Run getmail(1) over every getmailrc.* file in ~/.getmail (I didn't like the +# included getmails(1) script). +# +# Author: Tom Ryder +# Copyright: 2016 +# +self=gms + +# Check for existence of needed commands +hash flock getmail try || exit + +# Create a directory for our lockfiles if need be; we'll just leave it there +lockdir=${TMPDIR:-/tmp}/getmail-$UID +if ! mkdir -p -- "$lockdir" ; then + printf "%s: Could not create lockdir %s\n" \ + "$self" "$lockdir" >&2 + exit 1 +fi + +# Iterate through the getmailrc.* files in $GETMAIL if defined, or +# $HOME/.getmail if not +for rcfile in "${GETMAIL:-$HOME/.getmail}"/getmailrc.* ; do + + # Run the current rcfile with getmail quietly, using its basename with + # .lock appended as the lockfile in the lock directory + ( + flock -n 9 || exit 1 + try -i 15 -n 3 getmail --rcfile "$rcfile" "$@" + ) 9>"$lockdir"/"${rcfile##*/}".lock & +done + +# Wait for all of the enqueued tasks to finish +wait diff --git a/bin/htmlurls b/bin/htmlurls deleted file mode 100755 index 23dc7dcc..00000000 --- a/bin/htmlurls +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash - -# -# Extract URLs from an HTML document or documents. -# -# Author: Tom Ryder -# Copyright: 2016 -# License: Public domain -# - -# Set a sensible locale so that sort(1) doesn't act dumbly -LANG=C.UTF-8 -export LANG - -# Check we have the programs we need -hash pup || exit - -# Emit the content of the args, or stdin -cat -- "${@:-/dev/stdin}" | ## shellcheck disable=SC2002 - -# Pipe it through a pup filter to get all the values of the a href elements -pup 'a attr{href}' | - -# Sort it uniquely -sort | uniq diff --git a/bin/hurl b/bin/hurl new file mode 100755 index 00000000..23dc7dcc --- /dev/null +++ b/bin/hurl @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +# +# Extract URLs from an HTML document or documents. +# +# Author: Tom Ryder +# Copyright: 2016 +# License: Public domain +# + +# Set a sensible locale so that sort(1) doesn't act dumbly +LANG=C.UTF-8 +export LANG + +# Check we have the programs we need +hash pup || exit + +# Emit the content of the args, or stdin +cat -- "${@:-/dev/stdin}" | ## shellcheck disable=SC2002 + +# Pipe it through a pup filter to get all the values of the a href elements +pup 'a attr{href}' | + +# Sort it uniquely +sort | uniq diff --git a/bin/mdurls b/bin/mdurls deleted file mode 100755 index a3d522ca..00000000 --- a/bin/mdurls +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash - -# -# Format markdown and pass it to htmlurls to extract URLs from it. -# -# Author: Tom Ryder -# Copyright: 2016 -# License: Public domain -# - -# Check we have the programs we need -hash pandoc htmlurls || exit - -# Pipe the output of pandoc(1) on our args ... -pandoc -f markdown -t html -- "${@:-/dev/stdin}" | - -# ... into our own htmlurls -htmlurls diff --git a/bin/murl b/bin/murl new file mode 100755 index 00000000..7b5dc050 --- /dev/null +++ b/bin/murl @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# +# Format markdown and pass it to hurl to extract URLs from it. +# +# Author: Tom Ryder +# Copyright: 2016 +# License: Public domain +# + +# Check we have the programs we need +hash pandoc hurl || exit + +# Pipe the output of pandoc(1) on our args ... +pandoc -f markdown -t html -- "${@:-/dev/stdin}" | + +# ... into our own hurl +hurl diff --git a/bin/plenv-modules-update b/bin/plenv-modules-update deleted file mode 100755 index 668c4667..00000000 --- a/bin/plenv-modules-update +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -# Export a sensible locale so that sort(1) behaves -LANG=C.UTF-8 -export LANG - -# Check we have required programs -hash cpanm plenv || exit - -# Create required temporary files -mf=$(mktemp) || exit -ef=$(mktemp) || exit -cf=$(mktemp) || exit - -# Clean up temporary files on exit -cleanup() { - rm -f -- "$mf" "$ef" "$cf" -} -trap cleanup EXIT - -# Get the list of modules, sort them, write them to a file -plenv list-modules | sort > "$mf" - -# Sort the non-CPAN modules from ~/.plenv and write them to a file -sort -- "$HOME"/.plenv/non-cpanm-modules > "$ef" - -# Write out the list of modules that appear in the first file, but not the -# second -comm -23 -- "$mf" "$ef" > "$cf" - -# Read the list of modules to upgrade and upgrade them one by one -while read -r module ; do - cpanm --from http://cpan.inspire.net.nz --notest --quiet -- "$module" -done < "$cf" diff --git a/bin/plmu b/bin/plmu new file mode 100755 index 00000000..668c4667 --- /dev/null +++ b/bin/plmu @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# Export a sensible locale so that sort(1) behaves +LANG=C.UTF-8 +export LANG + +# Check we have required programs +hash cpanm plenv || exit + +# Create required temporary files +mf=$(mktemp) || exit +ef=$(mktemp) || exit +cf=$(mktemp) || exit + +# Clean up temporary files on exit +cleanup() { + rm -f -- "$mf" "$ef" "$cf" +} +trap cleanup EXIT + +# Get the list of modules, sort them, write them to a file +plenv list-modules | sort > "$mf" + +# Sort the non-CPAN modules from ~/.plenv and write them to a file +sort -- "$HOME"/.plenv/non-cpanm-modules > "$ef" + +# Write out the list of modules that appear in the first file, but not the +# second +comm -23 -- "$mf" "$ef" > "$cf" + +# Read the list of modules to upgrade and upgrade them one by one +while read -r module ; do + cpanm --from http://cpan.inspire.net.nz --notest --quiet -- "$module" +done < "$cf" diff --git a/bin/scatter b/bin/scatter deleted file mode 100755 index bab4ec32..00000000 --- a/bin/scatter +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -# -# scatter(1) -- Run a command on every hostname returned by shoal(1) and print -# both stdout and stderr, including allocating a pty with -t. -# -# Author: Tom Ryder -# Copyright: 2014 -# License: Public domain -# - -# Name self -self=scatter - -# Handle ^C interrupts -trap 'trap - INT; kill -INT $$' INT - -# Bail if we couldn't find shoal(1) -hash shoal || exit - -# Exit with usage method if no arguments given -if ! (($#)) ; then - printf 'USAGE: %s \n' "$self" >&2 - exit 1 -fi - -# Execute command, print both stdout and stderr, and use file descriptor 3 to -# avoid clobbering any of the standard streams -while read -r hostname <&3 ; do - printf '%s: %s\n' "$self" "$hostname" - # shellcheck disable=SC2029 - ssh -qt -- "$hostname" "$@" - printf '\n' -done 3< <(shoal) diff --git a/bin/shoal b/bin/shoal deleted file mode 100755 index f503d858..00000000 --- a/bin/shoal +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash - -# -# shoal(1) -- Print all the non-wildcard Host names (first one per line) from -# an ssh_config(5) file, defaulting to $HOME/.ssh/config. -# -# Author: Tom Ryder -# Copyright: 2014 -# License: Public domain -# - -# Start by assuming we should parse all hosts -declare -i shoal -shoal=1 - -# Iterate through the config -while read -r option value _ ; do - - # "### shoal" and "### noshoal" toggles parsing - case $option in - '###') - case $value in - noshoal) - shoal=0 - ;; - shoal) - shoal=1 - ;; - esac - ;; - 'Host') - if ((shoal)) && [[ $value != *[^[:alnum:]_-]* ]] ; then - printf '%s\n' "$value" - fi - ;; - esac -done < "${1:-$HOME/.ssh/config}" diff --git a/bin/shock b/bin/shock deleted file mode 100755 index c578b624..00000000 --- a/bin/shock +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -# -# shock(1) -- Run a command on every hostname returned by shoal(1) and print -# the hostname if the command's return value was zero. Discard stdout, but do -# print stderr. -# -# Author: Tom Ryder -# Copyright: 2014 -# License: Public domain -# - -# Name self -self=shock - -# Handle ^C interrupts -trap 'trap - INT; kill -INT $$' INT - -# Bail if we couldn't find shoal(1) -hash shoal || exit - -# Exit with usage method if no command given -if ! (($#)) ; then - printf 'USAGE: %s \n' "$self" >&2 - exit 1 -fi - -# Execute command, print hostname if it returns zero -while read -r hostname ; do - # shellcheck disable=SC2029 - if ssh -nq -- "$hostname" "$@" >/dev/null ; then - printf '%s\n' "$hostname" - fi -done < <(shoal) diff --git a/bin/sls b/bin/sls new file mode 100755 index 00000000..eb376cbc --- /dev/null +++ b/bin/sls @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# +# sls(1) -- Print all the non-wildcard Host names (first one per line) from +# an ssh_config(5) file, defaulting to $HOME/.ssh/config. +# +# Author: Tom Ryder +# Copyright: 2014 +# License: Public domain +# + +# Start by assuming we should parse all hosts +declare -i sls +sls=1 + +# Iterate through the config +while read -r option value _ ; do + + # "### sls" and "### nosls" toggles parsing + case $option in + '###') + case $value in + nosls) + sls=0 + ;; + sls) + sls=1 + ;; + esac + ;; + 'Host') + if ((sls)) && [[ $value != *[^[:alnum:]_-]* ]] ; then + printf '%s\n' "$value" + fi + ;; + esac +done < "${1:-$HOME/.ssh/config}" diff --git a/bin/sra b/bin/sra new file mode 100755 index 00000000..64ccb2ae --- /dev/null +++ b/bin/sra @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# +# sra(1) -- Run a command on every hostname returned by sls(1) and print +# both stdout and stderr, including allocating a pty with -t. +# +# Author: Tom Ryder +# Copyright: 2014 +# License: Public domain +# + +# Name self +self=sra + +# Handle ^C interrupts +trap 'trap - INT; kill -INT $$' INT + +# Bail if we couldn't find sls(1) +hash sls || exit + +# Exit with usage method if no arguments given +if ! (($#)) ; then + printf 'USAGE: %s \n' "$self" >&2 + exit 1 +fi + +# Execute command, print both stdout and stderr, and use file descriptor 3 to +# avoid clobbering any of the standard streams +while read -r hostname <&3 ; do + printf '%s: %s\n' "$self" "$hostname" + # shellcheck disable=SC2029 + ssh -qt -- "$hostname" "$@" + printf '\n' +done 3< <(sls) diff --git a/bin/sta b/bin/sta new file mode 100755 index 00000000..ced20cef --- /dev/null +++ b/bin/sta @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# +# sta(1) -- Run a command on every hostname returned by sls(1) and print +# the hostname if the command's return value was zero. Discard stdout, but do +# print stderr. +# +# Author: Tom Ryder +# Copyright: 2014 +# License: Public domain +# + +# Name self +self=sta + +# Handle ^C interrupts +trap 'trap - INT; kill -INT $$' INT + +# Bail if we couldn't find sls(1) +hash sls || exit + +# Exit with usage method if no command given +if ! (($#)) ; then + printf 'USAGE: %s \n' "$self" >&2 + exit 1 +fi + +# Execute command, print hostname if it returns zero +while read -r hostname ; do + # shellcheck disable=SC2029 + if ssh -nq -- "$hostname" "$@" >/dev/null ; then + printf '%s\n' "$hostname" + fi +done < <(sls) diff --git a/bin/urlc b/bin/urlc new file mode 100755 index 00000000..49f41082 --- /dev/null +++ b/bin/urlc @@ -0,0 +1,69 @@ +#!/usr/bin/env bash + +# +# Given a list of files or stdin containing a newline-separated list of URLs, +# try to find erroneous, redirecting, or insecure URLs with working secure +# alternatives. +# +# Author: Tom Ryder +# Copyright: 2016 +# License: Public domain +# + +# Name self +self=urlc + +# cURL request timeout +tm=${URLCHECK_TIMEOUT:-8} + +# Create temporary files for headers and body content +head=$(mktemp) || exit +body=$(mktemp) || exit + +# Set up cleanup function to remove temporary files on exit +cleanup() { + rm -f -- "$head" "$body" +} +trap cleanup EXIT + +# Error count +declare -i errc + +# Iterate through input; ignore leading/trailing whitespace +while read -r url ; do + + # Skip anything that doesn't start with HTTP + [[ $url == 'http'* ]] || continue + + # Make initial request, log head and body to files, cry and skip on error + if ! curl -fHLsS -D "$head" -m "$tm" -o "$body" -- "$url" ; then + printf '%s: %s raises error\n' \ + "$self" "$url" >&2 + ((errc++)) + continue + fi + + # Iterate through header file, cry about the first redirect we find + while IFS=': ' read -r header value ; do + [[ $header == 'Location' ]] || continue + printf '%s: %s redirects to %s\n' \ + "$self" "$url" "$value" >&2 + ((errc++)) + break + done < "$head" + + # Skip anything that's already secure + [[ $url == 'https:'* ]] && continue + + # Form a naïve attempt at a possible secure URL and try to request it, + # point it out if it actually works + securl=${url/http:/https:} + if curl -fLsS -D "$head" -m "$tm" -o "$body" -- "$securl" 2>/dev/null ; then + printf '%s: %s has a working secure version at %s\n' \ + "$self" "$url" "$securl" >&2 + ((errc++)) + fi +done < <(cat -- "${@:-/dev/stdin}") ## shellcheck disable=SC2002 + +# Exit if any errors +exit "$((errc > 0))" diff --git a/bin/urlcheck b/bin/urlcheck deleted file mode 100755 index 5692a892..00000000 --- a/bin/urlcheck +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env bash - -# -# Given a list of files or stdin containing a newline-separated list of URLs, -# try to find erroneous, redirecting, or insecure URLs with working secure -# alternatives. -# -# Author: Tom Ryder -# Copyright: 2016 -# License: Public domain -# - -# Name self -self=urlcheck - -# cURL request timeout -tm=${URLCHECK_TIMEOUT:-8} - -# Create temporary files for headers and body content -head=$(mktemp) || exit -body=$(mktemp) || exit - -# Set up cleanup function to remove temporary files on exit -cleanup() { - rm -f -- "$head" "$body" -} -trap cleanup EXIT - -# Error count -declare -i errc - -# Iterate through input; ignore leading/trailing whitespace -while read -r url ; do - - # Skip anything that doesn't start with HTTP - [[ $url == 'http'* ]] || continue - - # Make initial request, log head and body to files, cry and skip on error - if ! curl -fHLsS -D "$head" -m "$tm" -o "$body" -- "$url" ; then - printf '%s: %s raises error\n' \ - "$self" "$url" >&2 - ((errc++)) - continue - fi - - # Iterate through header file, cry about the first redirect we find - while IFS=': ' read -r header value ; do - [[ $header == 'Location' ]] || continue - printf '%s: %s redirects to %s\n' \ - "$self" "$url" "$value" >&2 - ((errc++)) - break - done < "$head" - - # Skip anything that's already secure - [[ $url == 'https:'* ]] && continue - - # Form a naïve attempt at a possible secure URL and try to request it, - # point it out if it actually works - securl=${url/http:/https:} - if curl -fLsS -D "$head" -m "$tm" -o "$body" -- "$securl" 2>/dev/null ; then - printf '%s: %s has a working secure version at %s\n' \ - "$self" "$url" "$securl" >&2 - ((errc++)) - fi -done < <(cat -- "${@:-/dev/stdin}") ## shellcheck disable=SC2002 - -# Exit if any errors -exit "$((errc > 0))" -- cgit v1.2.3