forked from sig_core/toolkit
stop using fpsync
This commit is contained in:
parent
282eef11f9
commit
9f87266007
@ -12,6 +12,7 @@ parser = argparse.ArgumentParser(description="Peridot Sync and Compose")
|
|||||||
# All of our options
|
# All of our options
|
||||||
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
parser.add_argument('--release', type=str, help="Major Release Version or major-type (eg 9-beta)", required=True)
|
||||||
parser.add_argument('--arch', type=str, help="Architecture")
|
parser.add_argument('--arch', type=str, help="Architecture")
|
||||||
|
parser.add_argument('--fpsync', type=str, help="Use fpsync instead of rsync")
|
||||||
parser.add_argument('--logger', type=str)
|
parser.add_argument('--logger', type=str)
|
||||||
|
|
||||||
# Parse them
|
# Parse them
|
||||||
@ -28,6 +29,7 @@ a = RepoSync(
|
|||||||
config,
|
config,
|
||||||
major=major,
|
major=major,
|
||||||
arch=results.arch,
|
arch=results.arch,
|
||||||
|
fpsync=results.fpsync,
|
||||||
logger=results.logger,
|
logger=results.logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -63,6 +63,7 @@ class RepoSync:
|
|||||||
just_pull_everything: bool = False,
|
just_pull_everything: bool = False,
|
||||||
extra_dnf_args=None,
|
extra_dnf_args=None,
|
||||||
reposync_clean_old: bool = False,
|
reposync_clean_old: bool = False,
|
||||||
|
fpsync: bool = False,
|
||||||
logger=None
|
logger=None
|
||||||
):
|
):
|
||||||
self.nofail = nofail
|
self.nofail = nofail
|
||||||
@ -76,6 +77,7 @@ class RepoSync:
|
|||||||
self.repoclosure = repoclosure
|
self.repoclosure = repoclosure
|
||||||
self.refresh_extra_files = refresh_extra_files
|
self.refresh_extra_files = refresh_extra_files
|
||||||
self.refresh_treeinfo = refresh_treeinfo
|
self.refresh_treeinfo = refresh_treeinfo
|
||||||
|
self.fpsync = fpsync
|
||||||
# Enables podman syncing, which should effectively speed up operations
|
# Enables podman syncing, which should effectively speed up operations
|
||||||
self.parallel = parallel
|
self.parallel = parallel
|
||||||
# This makes it so every repo is synced at the same time.
|
# This makes it so every repo is synced at the same time.
|
||||||
@ -1406,7 +1408,7 @@ class RepoSync:
|
|||||||
# Standard ISOs
|
# Standard ISOs
|
||||||
self.log.info(Color.INFO + 'Starting to sync ISOs to compose')
|
self.log.info(Color.INFO + 'Starting to sync ISOs to compose')
|
||||||
|
|
||||||
if os.path.exists('/usr/bin/fpsync'):
|
if os.path.exists('/usr/bin/fpsync') and self.fpsync:
|
||||||
self.log.info(Color.INFO + 'Starting up fpsync')
|
self.log.info(Color.INFO + 'Starting up fpsync')
|
||||||
message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir)
|
message, ret = Shared.fpsync_method(iso_root, sync_iso_root, tmp_dir)
|
||||||
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
||||||
@ -1430,7 +1432,7 @@ class RepoSync:
|
|||||||
if os.path.exists(live_root):
|
if os.path.exists(live_root):
|
||||||
self.log.info(Color.INFO + 'Starting to sync live images to compose')
|
self.log.info(Color.INFO + 'Starting to sync live images to compose')
|
||||||
|
|
||||||
if os.path.exists('/usr/bin/fpsync'):
|
if os.path.exists('/usr/bin/fpsync') and self.fpsync:
|
||||||
message, ret = Shared.fpsync_method(live_root, sync_live_root, tmp_dir)
|
message, ret = Shared.fpsync_method(live_root, sync_live_root, tmp_dir)
|
||||||
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
||||||
message, ret = Shared.rsync_method(live_root, sync_live_root)
|
message, ret = Shared.rsync_method(live_root, sync_live_root)
|
||||||
@ -1444,7 +1446,7 @@ class RepoSync:
|
|||||||
if os.path.exists(images_root):
|
if os.path.exists(images_root):
|
||||||
self.log.info(Color.INFO + 'Starting to sync cloud images to compose')
|
self.log.info(Color.INFO + 'Starting to sync cloud images to compose')
|
||||||
|
|
||||||
if os.path.exists('/usr/bin/fpsync'):
|
if os.path.exists('/usr/bin/fpsync') and self.fpsync:
|
||||||
message, ret = Shared.fpsync_method(images_root, sync_images_root, tmp_dir)
|
message, ret = Shared.fpsync_method(images_root, sync_images_root, tmp_dir)
|
||||||
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
elif os.path.exists('/usr/bin/parallel') and os.path.exists('/usr/bin/rsync'):
|
||||||
message, ret = Shared.rsync_method(images_root, sync_images_root)
|
message, ret = Shared.rsync_method(images_root, sync_images_root)
|
||||||
|
@ -564,12 +564,29 @@ class Shared:
|
|||||||
"""
|
"""
|
||||||
find_cmd = '/usr/bin/find'
|
find_cmd = '/usr/bin/find'
|
||||||
parallel_cmd = '/usr/bin/parallel'
|
parallel_cmd = '/usr/bin/parallel'
|
||||||
rsync_cmd = '/usr/bin/rsync'
|
cmd = '/usr/bin/rsync'
|
||||||
switches = '-av --chown=10004:10005 --progress --relative --human-readable'
|
switches = '-av --chown=10004:10005 --progress --relative --human-readable'
|
||||||
|
rsync_cmd = '{} {} {}/ {}'.format(cmd, switches, src, dest)
|
||||||
|
|
||||||
os.makedirs(dest, exist_ok=True)
|
#os.makedirs(dest, exist_ok=True)
|
||||||
|
process = subprocess.call(
|
||||||
|
shlex.split(rsync_cmd),
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
if process != 0:
|
||||||
|
message = 'Syncing (rsync) failed'
|
||||||
|
retval = process
|
||||||
|
return message, retval
|
||||||
|
|
||||||
return 'Not available', 1
|
if os.path.exists(dest):
|
||||||
|
message = 'Syncing (rsync) succeeded'
|
||||||
|
retval = process
|
||||||
|
else:
|
||||||
|
message = 'Path synced does not seem to exist for some reason.'
|
||||||
|
retval = 1
|
||||||
|
|
||||||
|
return message, retval
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def s3_determine_latest(s3_bucket, release, arches, filetype, name, logger):
|
def s3_determine_latest(s3_bucket, release, arches, filetype, name, logger):
|
||||||
|
Loading…
Reference in New Issue
Block a user