Merge "Last ditch effort to correct a wrong shasum."

This commit is contained in:
Jenkins 2014-07-07 23:49:21 +00:00 committed by Gerrit Code Review
commit 8a077a0b13
2 changed files with 52 additions and 4 deletions

View File

@ -21,9 +21,48 @@ set -o pipefail
# Download a URL to a local cache
# e.g. cache-url http://.../foo ~/.cache/image-create/foo
SCRIPT_NAME=$(basename $0)
SCRIPT_HOME=$(dirname $0)
FORCE_REVALIDATE=0
function show_options () {
echo "Usage: $SCRIPT_NAME [options] <url> <destination_file>"
echo
echo "Download a URL and cache it to a specified location."
echo "Subsequent requests will compare the last modified date"
echo "of the upstream file to determine whether it needs to be"
echo "downloaded again."
echo
echo "Options:"
echo " -f -- force upstream caches to fetch a new copy of the file"
echo " -h -- show this help"
echo
exit $1
}
TEMP=$(getopt -o hf -n $SCRIPT_NAME -- "$@")
if [ $? != 0 ] ; then echo "Terminating..." >&2 ; exit 1 ; fi
# Note the quotes around `$TEMP': they are essential!
eval set -- "$TEMP"
while true ; do
case "$1" in
-h|"-?") show_options 0;;
-f) FORCE_REVALIDATE=1; shift 1;;
--) shift; break;;
*) echo "Error: unsupported option $1." ; exit 1 ;;
esac
done
url=$1
dest=$2
time_cond=
curl_opts=""
if [ -z $url -o -z $dest ] ; then
show_options 1
fi
if [ -p $dest ]; then
type="fifo"
@ -34,14 +73,17 @@ else
tmp=$(mktemp $(dirname $dest)/.download.XXXXXXXX)
fi
if [ -f $dest -a -s $dest ] ; then
if [ "$FORCE_REVALIDATE" = "1" ]; then
curl_opts="-H 'Pragma: no-cache, must-revalidate' -H 'Cache-Control: no-cache, must-revalidate'"
success="Downloaded and cached $url, having forced upstream caches to revalidate"
elif [ -f $dest -a -s $dest ] ; then
time_cond="-z $dest"
success="Server copy has changed. Using server version of $url"
else
success="Downloaded and cached $url for the first time"
fi
rcode=$(curl -L -o $tmp -w '%{http_code}' $url $time_cond)
rcode=$(eval curl -v -L -o $tmp -w '%{http_code}' $curl_opts $url $time_cond)
if [ "$rcode" == "200" -o "${url:0:7}" == "file://" ] ; then
# In cases where servers ignore the Modified time,
# curl cancels the download, outputs a 200 and leaves

View File

@ -14,15 +14,21 @@ DIB_RELEASE=${DIB_RELEASE:-trusty}
BASE_IMAGE_FILE=${BASE_IMAGE_FILE:-$DIB_RELEASE-server-cloudimg-$ARCH-root.tar.gz}
SHA256SUMS=${SHA256SUMS:-https://${DIB_CLOUD_IMAGES##http?(s)://}/$DIB_RELEASE/current/SHA256SUMS}
CACHED_FILE=$DIB_IMAGE_CACHE/$BASE_IMAGE_FILE
CACHED_SUMS=$DIB_IMAGE_CACHE/SHA256SUMS.ubuntu.$DIB_RELEASE.$ARCH
if [ -n "$DIB_OFFLINE" -a -f "$CACHED_FILE" ] ; then
echo "Not checking freshness of cached $CACHED_FILE."
else
echo "Fetching Base Image"
$TMP_HOOKS_PATH/bin/cache-url $SHA256SUMS $DIB_IMAGE_CACHE/SHA256SUMS.ubuntu.$DIB_RELEASE.$ARCH
$TMP_HOOKS_PATH/bin/cache-url $SHA256SUMS $CACHED_SUMS
$TMP_HOOKS_PATH/bin/cache-url $DIB_CLOUD_IMAGES/$DIB_RELEASE/current/$BASE_IMAGE_FILE $CACHED_FILE
pushd $DIB_IMAGE_CACHE
grep "$BASE_IMAGE_FILE" SHA256SUMS.ubuntu.$DIB_RELEASE.$ARCH | sha256sum --check -
if ! grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check - ; then
# It is likely that an upstream http(s) proxy has given us a new image, but stale sha256sums
# Try again. Harder.
$TMP_HOOKS_PATH/bin/cache-url -f $SHA256SUMS $CACHED_SUMS
grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check -
fi
popd
fi
# Extract the base image (use --numeric-owner to avoid UID/GID mismatch between