Last ditch effort to correct a wrong shasum.
Some users on corp networks are finding that cloud images appear in their proxies before SHA256SUMS files. We now try to force any to upstream proxies to update their copy and retry the checksum, rather than immediately failing. We also now add proper argument parsing to cache-url, and associated help functionality, since we have now grown our first argument. Change-Id: I44d44b62db8d1df990606fdf087ec1b837f491f6 Closes-Bug: #1336067
This commit is contained in:
parent
61c192ea3c
commit
a65d74f171
@ -21,9 +21,48 @@ set -o pipefail
|
|||||||
# Download a URL to a local cache
|
# Download a URL to a local cache
|
||||||
# e.g. cache-url http://.../foo ~/.cache/image-create/foo
|
# e.g. cache-url http://.../foo ~/.cache/image-create/foo
|
||||||
|
|
||||||
|
SCRIPT_NAME=$(basename $0)
|
||||||
|
SCRIPT_HOME=$(dirname $0)
|
||||||
|
FORCE_REVALIDATE=0
|
||||||
|
|
||||||
|
function show_options () {
|
||||||
|
echo "Usage: $SCRIPT_NAME [options] <url> <destination_file>"
|
||||||
|
echo
|
||||||
|
echo "Download a URL and cache it to a specified location."
|
||||||
|
echo "Subsequent requests will compare the last modified date"
|
||||||
|
echo "of the upstream file to determine whether it needs to be"
|
||||||
|
echo "downloaded again."
|
||||||
|
echo
|
||||||
|
echo "Options:"
|
||||||
|
echo " -f -- force upstream caches to fetch a new copy of the file"
|
||||||
|
echo " -h -- show this help"
|
||||||
|
echo
|
||||||
|
exit $1
|
||||||
|
}
|
||||||
|
|
||||||
|
TEMP=$(getopt -o hf -n $SCRIPT_NAME -- "$@")
|
||||||
|
if [ $? != 0 ] ; then echo "Terminating..." >&2 ; exit 1 ; fi
|
||||||
|
|
||||||
|
# Note the quotes around `$TEMP': they are essential!
|
||||||
|
eval set -- "$TEMP"
|
||||||
|
|
||||||
|
while true ; do
|
||||||
|
case "$1" in
|
||||||
|
-h|"-?") show_options 0;;
|
||||||
|
-f) FORCE_REVALIDATE=1; shift 1;;
|
||||||
|
--) shift; break;;
|
||||||
|
*) echo "Error: unsupported option $1." ; exit 1 ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
url=$1
|
url=$1
|
||||||
dest=$2
|
dest=$2
|
||||||
time_cond=
|
time_cond=
|
||||||
|
curl_opts=""
|
||||||
|
|
||||||
|
if [ -z $url -o -z $dest ] ; then
|
||||||
|
show_options 1
|
||||||
|
fi
|
||||||
|
|
||||||
if [ -p $dest ]; then
|
if [ -p $dest ]; then
|
||||||
type="fifo"
|
type="fifo"
|
||||||
@ -34,14 +73,17 @@ else
|
|||||||
tmp=$(mktemp $(dirname $dest)/.download.XXXXXXXX)
|
tmp=$(mktemp $(dirname $dest)/.download.XXXXXXXX)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f $dest -a -s $dest ] ; then
|
if [ "$FORCE_REVALIDATE" = "1" ]; then
|
||||||
|
curl_opts="-H 'Pragma: no-cache, must-revalidate' -H 'Cache-Control: no-cache, must-revalidate'"
|
||||||
|
success="Downloaded and cached $url, having forced upstream caches to revalidate"
|
||||||
|
elif [ -f $dest -a -s $dest ] ; then
|
||||||
time_cond="-z $dest"
|
time_cond="-z $dest"
|
||||||
success="Server copy has changed. Using server version of $url"
|
success="Server copy has changed. Using server version of $url"
|
||||||
else
|
else
|
||||||
success="Downloaded and cached $url for the first time"
|
success="Downloaded and cached $url for the first time"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
rcode=$(curl -L -o $tmp -w '%{http_code}' $url $time_cond)
|
rcode=$(eval curl -v -L -o $tmp -w '%{http_code}' $curl_opts $url $time_cond)
|
||||||
if [ "$rcode" == "200" -o "${url:0:7}" == "file://" ] ; then
|
if [ "$rcode" == "200" -o "${url:0:7}" == "file://" ] ; then
|
||||||
# In cases where servers ignore the Modified time,
|
# In cases where servers ignore the Modified time,
|
||||||
# curl cancels the download, outputs a 200 and leaves
|
# curl cancels the download, outputs a 200 and leaves
|
||||||
|
@ -14,15 +14,21 @@ DIB_RELEASE=${DIB_RELEASE:-trusty}
|
|||||||
BASE_IMAGE_FILE=${BASE_IMAGE_FILE:-$DIB_RELEASE-server-cloudimg-$ARCH-root.tar.gz}
|
BASE_IMAGE_FILE=${BASE_IMAGE_FILE:-$DIB_RELEASE-server-cloudimg-$ARCH-root.tar.gz}
|
||||||
SHA256SUMS=${SHA256SUMS:-https://${DIB_CLOUD_IMAGES##http?(s)://}/$DIB_RELEASE/current/SHA256SUMS}
|
SHA256SUMS=${SHA256SUMS:-https://${DIB_CLOUD_IMAGES##http?(s)://}/$DIB_RELEASE/current/SHA256SUMS}
|
||||||
CACHED_FILE=$DIB_IMAGE_CACHE/$BASE_IMAGE_FILE
|
CACHED_FILE=$DIB_IMAGE_CACHE/$BASE_IMAGE_FILE
|
||||||
|
CACHED_SUMS=$DIB_IMAGE_CACHE/SHA256SUMS.ubuntu.$DIB_RELEASE.$ARCH
|
||||||
|
|
||||||
if [ -n "$DIB_OFFLINE" -a -f "$CACHED_FILE" ] ; then
|
if [ -n "$DIB_OFFLINE" -a -f "$CACHED_FILE" ] ; then
|
||||||
echo "Not checking freshness of cached $CACHED_FILE."
|
echo "Not checking freshness of cached $CACHED_FILE."
|
||||||
else
|
else
|
||||||
echo "Fetching Base Image"
|
echo "Fetching Base Image"
|
||||||
$TMP_HOOKS_PATH/bin/cache-url $SHA256SUMS $DIB_IMAGE_CACHE/SHA256SUMS.ubuntu.$DIB_RELEASE.$ARCH
|
$TMP_HOOKS_PATH/bin/cache-url $SHA256SUMS $CACHED_SUMS
|
||||||
$TMP_HOOKS_PATH/bin/cache-url $DIB_CLOUD_IMAGES/$DIB_RELEASE/current/$BASE_IMAGE_FILE $CACHED_FILE
|
$TMP_HOOKS_PATH/bin/cache-url $DIB_CLOUD_IMAGES/$DIB_RELEASE/current/$BASE_IMAGE_FILE $CACHED_FILE
|
||||||
pushd $DIB_IMAGE_CACHE
|
pushd $DIB_IMAGE_CACHE
|
||||||
grep "$BASE_IMAGE_FILE" SHA256SUMS.ubuntu.$DIB_RELEASE.$ARCH | sha256sum --check -
|
if ! grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check - ; then
|
||||||
|
# It is likely that an upstream http(s) proxy has given us a new image, but stale sha256sums
|
||||||
|
# Try again. Harder.
|
||||||
|
$TMP_HOOKS_PATH/bin/cache-url -f $SHA256SUMS $CACHED_SUMS
|
||||||
|
grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check -
|
||||||
|
fi
|
||||||
popd
|
popd
|
||||||
fi
|
fi
|
||||||
# Extract the base image (use --numeric-owner to avoid UID/GID mismatch between
|
# Extract the base image (use --numeric-owner to avoid UID/GID mismatch between
|
||||||
|
Loading…
Reference in New Issue
Block a user