Lock around ubuntu tarball download
Make this safe for parallel builds Change-Id: I578fa38e1d44bc205b70d087d161b471cd260fa1
This commit is contained in:
parent
a030387d6b
commit
94535ebb21
@ -14,33 +14,48 @@ DIB_RELEASE=${DIB_RELEASE:-trusty}
|
|||||||
BASE_IMAGE_FILE=${BASE_IMAGE_FILE:-$DIB_RELEASE-server-cloudimg-$ARCH-root.tar.gz}
|
BASE_IMAGE_FILE=${BASE_IMAGE_FILE:-$DIB_RELEASE-server-cloudimg-$ARCH-root.tar.gz}
|
||||||
SHA256SUMS=${SHA256SUMS:-https://${DIB_CLOUD_IMAGES##http?(s)://}/$DIB_RELEASE/current/SHA256SUMS}
|
SHA256SUMS=${SHA256SUMS:-https://${DIB_CLOUD_IMAGES##http?(s)://}/$DIB_RELEASE/current/SHA256SUMS}
|
||||||
CACHED_FILE=$DIB_IMAGE_CACHE/$BASE_IMAGE_FILE
|
CACHED_FILE=$DIB_IMAGE_CACHE/$BASE_IMAGE_FILE
|
||||||
|
CACHED_FILE_LOCK=$DIB_IMAGE_CACHE/$BASE_IMAGE_FILE.lock
|
||||||
CACHED_SUMS=$DIB_IMAGE_CACHE/SHA256SUMS.ubuntu.$DIB_RELEASE.$ARCH
|
CACHED_SUMS=$DIB_IMAGE_CACHE/SHA256SUMS.ubuntu.$DIB_RELEASE.$ARCH
|
||||||
|
|
||||||
if [ -n "$DIB_OFFLINE" -a -f "$CACHED_FILE" ] ; then
|
function get_ubuntu_tarball() {
|
||||||
echo "Not checking freshness of cached $CACHED_FILE."
|
if [ -n "$DIB_OFFLINE" -a -f "$CACHED_FILE" ] ; then
|
||||||
else
|
echo "Not checking freshness of cached $CACHED_FILE."
|
||||||
echo "Fetching Base Image"
|
else
|
||||||
$TMP_HOOKS_PATH/bin/cache-url $SHA256SUMS $CACHED_SUMS
|
echo "Fetching Base Image"
|
||||||
$TMP_HOOKS_PATH/bin/cache-url $DIB_CLOUD_IMAGES/$DIB_RELEASE/current/$BASE_IMAGE_FILE $CACHED_FILE
|
$TMP_HOOKS_PATH/bin/cache-url $SHA256SUMS $CACHED_SUMS
|
||||||
pushd $DIB_IMAGE_CACHE
|
$TMP_HOOKS_PATH/bin/cache-url \
|
||||||
if ! grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check - ; then
|
$DIB_CLOUD_IMAGES/$DIB_RELEASE/current/$BASE_IMAGE_FILE $CACHED_FILE
|
||||||
# It is likely that an upstream http(s) proxy has given us a skewed
|
pushd $DIB_IMAGE_CACHE
|
||||||
# result - either a cached SHA file or a cached image. Use cache-busting
|
|
||||||
# to get (as long as caches are compliant...) fresh files.
|
|
||||||
# Try the sha256sum first, just in case that is the stale one (avoiding
|
|
||||||
# downloading the larger image), and then if the sums still fail retry
|
|
||||||
# the image.
|
|
||||||
$TMP_HOOKS_PATH/bin/cache-url -f $SHA256SUMS $CACHED_SUMS
|
|
||||||
if ! grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check - ; then
|
if ! grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check - ; then
|
||||||
$TMP_HOOKS_PATH/bin/cache-url -f $DIB_CLOUD_IMAGES/$DIB_RELEASE/current/$BASE_IMAGE_FILE $CACHED_FILE
|
# It is likely that an upstream http(s) proxy has given us a skewed
|
||||||
grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check -
|
# result - either a cached SHA file or a cached image. Use cache-busting
|
||||||
|
# to get (as long as caches are compliant...) fresh files.
|
||||||
|
# Try the sha256sum first, just in case that is the stale one (avoiding
|
||||||
|
# downloading the larger image), and then if the sums still fail retry
|
||||||
|
# the image.
|
||||||
|
$TMP_HOOKS_PATH/bin/cache-url -f $SHA256SUMS $CACHED_SUMS
|
||||||
|
if ! grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check - ; then
|
||||||
|
$TMP_HOOKS_PATH/bin/cache-url -f \
|
||||||
|
$DIB_CLOUD_IMAGES/$DIB_RELEASE/current/$BASE_IMAGE_FILE $CACHED_FILE
|
||||||
|
grep "$BASE_IMAGE_FILE" $CACHED_SUMS | sha256sum --check -
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
popd
|
||||||
fi
|
fi
|
||||||
popd
|
# Extract the base image (use --numeric-owner to avoid UID/GID mismatch between
|
||||||
fi
|
# image tarball and host OS e.g. when building Ubuntu image on an openSUSE host)
|
||||||
# Extract the base image (use --numeric-owner to avoid UID/GID mismatch between
|
sudo tar -C $TARGET_ROOT --numeric-owner -xzf $DIB_IMAGE_CACHE/$BASE_IMAGE_FILE
|
||||||
# image tarball and host OS e.g. when building Ubuntu image on an openSUSE host)
|
if [ -e "$TARGET_ROOT/lost+found" ]; then
|
||||||
sudo tar -C $TARGET_ROOT --numeric-owner -xzf $DIB_IMAGE_CACHE/$BASE_IMAGE_FILE
|
sudo rmdir $TARGET_ROOT/lost+found
|
||||||
if [ -e "$TARGET_ROOT/lost+found" ]; then
|
fi
|
||||||
sudo rmdir $TARGET_ROOT/lost+found
|
}
|
||||||
fi
|
|
||||||
|
(
|
||||||
|
echo "Getting $CACHED_FILE_LOCK: $(date)"
|
||||||
|
# Wait up to 20 minutes for another process to download
|
||||||
|
if ! flock -w 1200 9 ; then
|
||||||
|
echo "Did not get $CACHED_FILE_LOCK: $(date)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
get_ubuntu_tarball
|
||||||
|
) 9> $CACHED_FILE_LOCK
|
||||||
|
Loading…
Reference in New Issue
Block a user