add s3 uploader

This commit is contained in:
Louis Abel 2023-08-24 15:22:16 -07:00
parent 70a9af6b10
commit 46ad89c403
Signed by: label
GPG Key ID: 2A6975660E424560
4 changed files with 73 additions and 7 deletions

Binary file not shown.

View File

@ -12,6 +12,7 @@ import datetime
from pv2.util import gitutil, fileutil, rpmutil, processor, generic from pv2.util import gitutil, fileutil, rpmutil, processor, generic
from pv2.util import error as err from pv2.util import error as err
from pv2.util import constants as const from pv2.util import constants as const
from pv2.util import uploader as upload
#try: #try:
# import gi # import gi
@ -193,6 +194,16 @@ class Import:
shutil.move(src=source_path, dst=dest_path) shutil.move(src=source_path, dst=dest_path)
if os.path.exists('/usr/sbin/restorecon'): if os.path.exists('/usr/sbin/restorecon'):
processor.run_proc_foreground_shell(f'/usr/sbin/restorecon {dest_path}') processor.run_proc_foreground_shell(f'/usr/sbin/restorecon {dest_path}')
@staticmethod
def upload_to_s3(repo_path, file_dict: dict, bucket, aws_key_id: str, aws_secret_key: str):
"""
Upload an object to s3
"""
for name, sha in file_dict.items():
source_path = f'{repo_path}/{name}'
dest_name = sha
upload.upload_to_s3(source_path, bucket, aws_key_id,
aws_secret_key, dest_name=dest_name)
@staticmethod @staticmethod
def import_lookaside_peridot_cli( def import_lookaside_peridot_cli(
@ -328,7 +339,10 @@ class SrpmImport(Import):
git_user: str = 'git', git_user: str = 'git',
org: str = 'rpms', org: str = 'rpms',
dest_lookaside: str = '/var/www/html/sources', dest_lookaside: str = '/var/www/html/sources',
verify_signature: bool = False verify_signature: bool = False,
aws_access_key_id: str = '',
aws_access_key: str = '',
aws_bucket: str = ''
): ):
""" """
Init the class. Init the class.
@ -365,6 +379,10 @@ class SrpmImport(Import):
self.__branch = f'c{release}' self.__branch = f'c{release}'
print(f'Warning: Branch name not specified, defaulting to {self.__branch}') print(f'Warning: Branch name not specified, defaulting to {self.__branch}')
self.__aws_access_key_id = aws_access_key_id
self.__aws_access_key = aws_access_key
self.__aws_bucket = aws_bucket
def __get_srpm_release_version(self): def __get_srpm_release_version(self):
""" """
Gets the release version from the srpm Gets the release version from the srpm
@ -378,7 +396,7 @@ class SrpmImport(Import):
return None return None
# pylint: disable=too-many-locals # pylint: disable=too-many-locals
def pkg_import(self, skip_lookaside: bool = False): def pkg_import(self, skip_lookaside: bool = False, s3_upload: bool = False):
""" """
Actually perform the import Actually perform the import
@ -452,6 +470,19 @@ class SrpmImport(Import):
self.import_lookaside(git_repo_path, self.rpm_name, branch, self.import_lookaside(git_repo_path, self.rpm_name, branch,
sources, self.dest_lookaside) sources, self.dest_lookaside)
if s3_upload:
# I don't want to blatantly blow up here yet.
if len(self.__aws_access_key_id) == 0 or len(self.__aws_access_key) == 0 or len(self.__aws_bucket) == 0:
print('WARNING: No access key, ID, or bucket was provided. Skipping upload.')
else:
self.upload_to_s3(
git_repo_path,
sources,
self.__aws_bucket,
self.__aws_access_key_id,
self.__aws_access_key,
)
# Temporary hack like with git. # Temporary hack like with git.
dest_gitignore_file = f'{git_repo_path}/.gitignore' dest_gitignore_file = f'{git_repo_path}/.gitignore'
if os.path.exists(dest_gitignore_file): if os.path.exists(dest_gitignore_file):
@ -579,7 +610,10 @@ class GitImport(Import):
distprefix: str = 'el', distprefix: str = 'el',
source_git_user: str = 'git', source_git_user: str = 'git',
dest_git_user: str = 'git', dest_git_user: str = 'git',
dest_org: str = 'rpms' dest_org: str = 'rpms',
aws_access_key_id: str = '',
aws_access_key: str = '',
aws_bucket: str = ''
): ):
""" """
Init the class. Init the class.
@ -604,6 +638,9 @@ class GitImport(Import):
self.__upstream_lookaside = upstream_lookaside self.__upstream_lookaside = upstream_lookaside
self.__upstream_lookaside_url = self.get_lookaside_template_path(upstream_lookaside) self.__upstream_lookaside_url = self.get_lookaside_template_path(upstream_lookaside)
self.__alternate_spec_name = alternate_spec_name self.__alternate_spec_name = alternate_spec_name
self.__aws_access_key_id = aws_access_key_id
self.__aws_access_key = aws_access_key
self.__aws_bucket = aws_bucket
if len(dest_branch) > 0: if len(dest_branch) > 0:
self.__dest_branch = dest_branch self.__dest_branch = dest_branch
@ -612,7 +649,7 @@ class GitImport(Import):
raise err.ConfigurationError(f'{upstream_lookaside} is not valid.') raise err.ConfigurationError(f'{upstream_lookaside} is not valid.')
# pylint: disable=too-many-locals, too-many-statements, too-many-branches # pylint: disable=too-many-locals, too-many-statements, too-many-branches
def pkg_import(self, skip_lookaside: bool = False): def pkg_import(self, skip_lookaside: bool = False, s3_upload: bool = False):
""" """
Actually perform the import Actually perform the import
@ -765,6 +802,19 @@ class GitImport(Import):
self.import_lookaside(dest_git_repo_path, self.rpm_name, dest_branch, self.import_lookaside(dest_git_repo_path, self.rpm_name, dest_branch,
sources, self.dest_lookaside) sources, self.dest_lookaside)
if s3_upload:
# I don't want to blatantly blow up here yet.
if len(self.__aws_access_key_id) == 0 or len(self.__aws_access_key) == 0 or len(self.__aws_bucket) == 0:
print('WARNING: No access key, ID, or bucket was provided. Skipping upload.')
else:
self.upload_to_s3(
dest_git_repo_path,
sources,
self.__aws_bucket,
self.__aws_access_key_id,
self.__aws_access_key,
)
# This is a temporary hack. There are cases that the .gitignore that's # This is a temporary hack. There are cases that the .gitignore that's
# provided by upstream errorneouly keeps out certain sources, despite # provided by upstream errorneouly keeps out certain sources, despite
# the fact that they were pushed before. We're killing off any # the fact that they were pushed before. We're killing off any

View File

@ -24,6 +24,10 @@ rpm_parser.add_argument('--verify-signature', action='store_true')
rpm_parser.add_argument('--skip-lookaside-upload', rpm_parser.add_argument('--skip-lookaside-upload',
action='store_true', action='store_true',
help='Set this flag to skip uploading to /var/www/html/sources esque lookaside') help='Set this flag to skip uploading to /var/www/html/sources esque lookaside')
rpm_parser.add_argument('--upload-to-s3', action='store_true')
rpm_parser.add_argument('--aws-access-key-id', type=str, required=False, default='')
rpm_parser.add_argument('--aws-access-key', type=str, required=False, default='')
rpm_parser.add_argument('--aws-bucket', type=str, required=False, default='')
git_parser.add_argument('--name', type=str, required=True) git_parser.add_argument('--name', type=str, required=True)
git_parser.add_argument('--source-gituser', type=str, required=False, default='git') git_parser.add_argument('--source-gituser', type=str, required=False, default='git')
@ -47,6 +51,10 @@ git_parser.add_argument('--alternate-spec-name',
git_parser.add_argument('--skip-lookaside-upload', git_parser.add_argument('--skip-lookaside-upload',
action='store_true', action='store_true',
help='Set this flag to skip uploading to /var/www/html/sources esque lookaside') help='Set this flag to skip uploading to /var/www/html/sources esque lookaside')
git_parser.add_argument('--upload-to-s3', action='store_true')
git_parser.add_argument('--aws-access-key-id', type=str, required=False, default='')
git_parser.add_argument('--aws-access-key', type=str, required=False, default='')
git_parser.add_argument('--aws-bucket', type=str, required=False, default='')
results = parser.parse_args() results = parser.parse_args()
command = parser.parse_args().cmd command = parser.parse_args().cmd
@ -66,8 +74,12 @@ def main():
org=results.gitorg, org=results.gitorg,
dest_lookaside=results.dest_lookaside, dest_lookaside=results.dest_lookaside,
verify_signature=results.verify_signature, verify_signature=results.verify_signature,
aws_access_key_id=results.aws_access_key_id,
aws_access_key=results.aws_access_key,
aws_bucket=results.aws_bucket,
) )
classy.pkg_import(skip_lookaside=results.skip_lookaside_upload) classy.pkg_import(skip_lookaside=results.skip_lookaside_upload,
s3_upload=results.upload_to_s3)
elif command == 'git': elif command == 'git':
classy = importutil.GitImport( classy = importutil.GitImport(
package=results.name, package=results.name,
@ -84,8 +96,12 @@ def main():
distprefix=results.distprefix, distprefix=results.distprefix,
alternate_spec_name=results.alternate_spec_name, alternate_spec_name=results.alternate_spec_name,
dest_lookaside=results.dest_lookaside, dest_lookaside=results.dest_lookaside,
aws_access_key_id=results.aws_access_key_id,
aws_access_key=results.aws_access_key,
aws_bucket=results.aws_bucket,
) )
classy.pkg_import(skip_lookaside=results.skip_lookaside_upload) classy.pkg_import(skip_lookaside=results.skip_lookaside_upload,
s3_upload=results.upload_to_s3)
else: else:
print('Unknown command') print('Unknown command')

View File

@ -54,7 +54,7 @@ def upload_to_s3(
dest_name = os.path.basename(input_file) dest_name = os.path.basename(input_file)
if s3 is None: if s3 is None:
err.UploadError('s3 module is not available') raise err.UploadError('s3 module is not available')
s3_client = boto3.client( s3_client = boto3.client(
's3', 's3',