#!/bin/bash

url_file=""
do_preflight_check=True

function usage
{
    retval="$1"
    case "$retval" in
        0)
            ;;
        *)
            exec 1>&2
            ;;
    esac

    echo "Usage: $0 --url-file filename --skip-preflight-check --help"
    echo
    echo "Download a list of URL's"

    exit "$retval"
}

while [ "$#" -ge 1 ]
do
    case "$1" in
        --skip-preflight-check)
            do_preflight_check=False
            ;;
        --url-file)
            url_file="$2"
            shift
            ;;
        --help|-h)
            usage 0
            ;;
        *)
            echo "$0: Unrecognized option: $1" 1>&2
            usage 1
            ;;
    esac
    shift
done

case "$do_preflight_check" in
    True)
        # shellcheck disable=SC1003
        if grep '\\' "$url_file"
        then
            echo "$0: one or more backslashes found"
        fi
        if grep ':.*:' "$url_file"
        then
            echo "$0: one or more double colons found"
        fi
        ;;
    False)
        ;;
    *)
        echo "$0: internal error: \$do_preflight_check not True or False: $do_preflight_check" 1>&2
        exit 1
        ;;
esac

# No file should require an hour to transfer - if it does, something is wrong.
# We use wget to download the files
looper \
    --verbosity 2 \
    --max-concurrency 4 \
    --maxtime $((60*60)) \
    --commands-file <(sed 's/^\(.*\)$/wget "\1"/' < "$url_file")