mirror of
https://github.com/rocky-linux/createhdds.git
synced 2024-12-22 18:18:32 +00:00
cbe7769748
This handles scheduling of jobs for more than one type of image; currently we'll run tests for Workstation live as well. It requires some cleverness to run some tests for *all* images (currently just default_boot_and_install) but run all the tests that can be run with any non-live installer image with the best image available for the compose. We introduce a special (openQA, not fedfind) 'flavor' called 'universal'; we run a couple of checks to find the best image in the compose for running the universal tests, and schedule tests for the 'universal' flavor with that image. The 'best' image is a server or 'generic' DVD if possible, and if not, a server or 'generic' boot.iso. ISO files have the compose's version identifier prepended to their names. Otherwise they retain their original names, which should usually be unique within a given compose, except for boot.iso files, which have their payload and arch added into their names to ensure they don't overwrite each other. This also adds a mechanism for TESTCASES (in conf_test_suites) to define a callback which will be called with the flavor of the image being tested; the result of the callback will be used as the 'test name' for relval result reporting purposes. This allows us to report results against the correct 'test instance' for the image being tested, for tests like Boot_default_install which have 'test instances' for each image. We can extend this general approach in future for other cases where we have multiple 'test instances' for a single test case.
330 lines
12 KiB
Python
Executable File
330 lines
12 KiB
Python
Executable File
#!/usr/bin/env python
|
|
|
|
import json
|
|
import urllib2
|
|
import re
|
|
import urlgrabber
|
|
import os.path
|
|
import sys
|
|
import subprocess
|
|
import argparse
|
|
import datetime
|
|
# We can at least find images and run OpenQA jobs without wikitcms
|
|
try:
|
|
import wikitcms.wiki
|
|
except:
|
|
pass
|
|
import fedfind.release
|
|
|
|
from report_job_results import report_results
|
|
|
|
PERSISTENT = "/var/tmp/openqa_watcher.json"
|
|
ISO_PATH = "/var/lib/openqa/factory/iso/"
|
|
RUN_COMMAND = "/var/lib/openqa/script/client isos post ISO=%s DISTRI=fedora VERSION=rawhide FLAVOR=%s ARCH=%s BUILD=%s"
|
|
VERSIONS = ['i386', 'x86_64']
|
|
|
|
# read last tested version from file
|
|
def read_last():
|
|
result = {}
|
|
try:
|
|
f = open(PERSISTENT, "r")
|
|
json_raw = f.read()
|
|
f.close()
|
|
json_parsed = json.loads(json_raw)
|
|
except IOError:
|
|
return result, {}
|
|
|
|
for version in VERSIONS:
|
|
result[version] = json_parsed.get(version, None)
|
|
return result, json_parsed
|
|
|
|
def download_image(image):
|
|
"""Download a given image with a name that should be unique.
|
|
Returns the filename of the image (not the path).
|
|
"""
|
|
ver = image.version.replace(' ', '_')
|
|
if image.imagetype == 'boot':
|
|
isoname = "{0}_{1}_{2}_boot.iso".format(ver, image.payload, image.arch)
|
|
else:
|
|
isoname = "{0}_{1}".format(ver, image.filename)
|
|
filename = os.path.join(ISO_PATH, isoname)
|
|
if not os.path.isfile(filename):
|
|
print("Downloading {0} ({1}) to {2}...".format(
|
|
image.url, image.desc, filename))
|
|
# Icky hack around a urlgrabber bug:
|
|
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=715416
|
|
urlgrabber.urlgrab(image.url.replace('https', 'http'), filename)
|
|
return isoname
|
|
|
|
def run_openqa_jobs(isoname, flavor, arch, build):
|
|
"""# run OpenQA 'isos' job on selected isoname, with given arch
|
|
and a version string. **NOTE**: the version passed to OpenQA as
|
|
BUILD and is parsed back into the 'relval report-auto' arguments
|
|
by report_job_results.py; it is expected to be in the form of a
|
|
3-tuple on which join('_') has been run, and the three elements
|
|
will be passed as --release, --compose and --milestone. Returns
|
|
list of job IDs.
|
|
"""
|
|
command = RUN_COMMAND % (isoname, flavor, arch, build)
|
|
|
|
# starts OpenQA jobs
|
|
output = subprocess.check_output(command.split())
|
|
|
|
# read ids from OpenQA to wait for
|
|
r = re.compile(r'ids => \[(?P<from>\d+)( \.\. (?P<to>\d+))?\]')
|
|
match = r.search(output)
|
|
if match and match.group('to'):
|
|
from_i = int(match.group('from'))
|
|
to_i = int(match.group('to')) + 1
|
|
return range(from_i, to_i)
|
|
elif match:
|
|
return [int(match.group('from'))]
|
|
else:
|
|
return []
|
|
|
|
def jobs_from_current(wiki):
|
|
"""Schedule jobs against the 'current' release validation event
|
|
(according to wikitcms) if we have not already. Returns a tuple,
|
|
first value is the job list, second is the current event.
|
|
"""
|
|
if not wiki:
|
|
print("python-wikitcms is required for current validation event "
|
|
"discovery.")
|
|
return ([], None)
|
|
last_versions, json_parsed = read_last()
|
|
currev = wiki.current_event
|
|
print("Current event: {0}".format(currev.version))
|
|
runarches = []
|
|
for arch in VERSIONS:
|
|
last_version = last_versions.get(arch, None)
|
|
if last_version and last_version >= currev.sortname:
|
|
print("Skipped: {0}".format(arch))
|
|
else:
|
|
runarches.append(arch)
|
|
json_parsed[arch] = currev.sortname
|
|
|
|
# write info about latest versions
|
|
f = open(PERSISTENT, "w")
|
|
f.write(json.dumps(json_parsed))
|
|
f.close()
|
|
|
|
jobs = jobs_from_fedfind(currev.ff_release, runarches)
|
|
|
|
return (jobs, currev)
|
|
|
|
def jobs_from_fedfind(ff_release, arches=VERSIONS):
|
|
"""Given a fedfind.Release object, find the ISOs we want and run
|
|
jobs on them. arches is an iterable of arches to run on, if not
|
|
specified, we'll use our constant.
|
|
"""
|
|
# Find currently-testable images for our arches.
|
|
jobs = []
|
|
queries = (
|
|
fedfind.release.Query('imagetype', ('boot', 'live')),
|
|
fedfind.release.Query('arch', arches),
|
|
fedfind.release.Query('payload', ('server', 'generic', 'workstation')))
|
|
images = ff_release.find_images(queries)
|
|
|
|
# Now schedule jobs. First, let's get the BUILD value for openQA.
|
|
build = '_'.join(
|
|
(ff_release.release, ff_release.milestone, ff_release.compose))
|
|
|
|
# Next let's schedule the 'universal' tests.
|
|
# We have different images in different composes: nightlies only
|
|
# have a generic boot.iso, TC/RC builds have Server netinst/boot
|
|
# and DVD. We always want to run *some* tests -
|
|
# default_boot_and_install at least - for all images we find, then
|
|
# we want to run all the tests that are not image-dependent on
|
|
# just one image. So we have a special 'universal' flavor and
|
|
# product in openQA; all the image-independent test suites run for
|
|
# that product. Here, we find the 'best' image we can for the
|
|
# compose we're running on (a DVD if possible, a boot.iso or
|
|
# netinst if not), and schedule the 'universal' jobs on that
|
|
# image.
|
|
for arch in arches:
|
|
okimgs = (img for img in images if img.arch == arch and
|
|
any(img.imagetype == okt
|
|
for okt in ('dvd', 'boot', 'netinst')))
|
|
bestscore = 0
|
|
bestimg = None
|
|
for img in okimgs:
|
|
if img.imagetype == 'dvd':
|
|
score = 10
|
|
else:
|
|
score = 1
|
|
if img.payload == 'generic':
|
|
score += 5
|
|
elif img.payload == 'server':
|
|
score += 3
|
|
elif img.payload == 'workstation':
|
|
score += 1
|
|
if score > bestscore:
|
|
bestimg = img
|
|
bestscore = score
|
|
if not bestimg:
|
|
print("No universal tests image found for {0)!".format(arch))
|
|
continue
|
|
print("Running universal tests for {0} with {1}!".format(
|
|
arch, bestimg.desc))
|
|
isoname = download_image(bestimg)
|
|
job_ids = run_openqa_jobs(isoname, 'universal', arch, build)
|
|
jobs.extend(job_ids)
|
|
|
|
# Now schedule per-image jobs.
|
|
for image in images:
|
|
isoname = download_image(image)
|
|
flavor = '_'.join((image.payload, image.imagetype))
|
|
job_ids = run_openqa_jobs(isoname, flavor, image.arch, build)
|
|
jobs.extend(job_ids)
|
|
return jobs
|
|
|
|
## SUB-COMMAND FUNCTIONS
|
|
|
|
def run_current(args, wiki):
|
|
"""run OpenQA for current release validation event, if we have
|
|
not already done it.
|
|
"""
|
|
jobs, _ = jobs_from_current(wiki)
|
|
# wait for jobs to finish and display results
|
|
if jobs:
|
|
print jobs
|
|
report_results(jobs)
|
|
sys.exit()
|
|
|
|
def run_compose(args, wiki=None):
|
|
"""run OpenQA on a specified compose, optionally reporting results
|
|
if a matching wikitcms ValidationEvent is found by relval/wikitcms
|
|
"""
|
|
# get the fedfind release object
|
|
try:
|
|
ff_release = fedfind.release.get_release(
|
|
release=args.release, milestone=args.milestone,
|
|
compose=args.compose)
|
|
except ValueError as err:
|
|
sys.exit(err[0])
|
|
|
|
print("Running on compose: {0}".format(ff_release.version))
|
|
if args.arch:
|
|
jobs = jobs_from_fedfind(ff_release, [args.arch])
|
|
else:
|
|
jobs = jobs_from_fedfind(ff_release)
|
|
print(jobs)
|
|
if args.submit_results:
|
|
report_results(jobs)
|
|
sys.exit()
|
|
|
|
def run_all(args, wiki=None):
|
|
"""Do everything we can: test current validation event compose if
|
|
it's new, amd test both Rawhide and Branched nightlies if they
|
|
exist and aren't the same as the 'current' compose.
|
|
"""
|
|
skip = ''
|
|
|
|
# Run for 'current' validation event.
|
|
(jobs, currev) = jobs_from_current(wiki)
|
|
print("Jobs from current validation event: {0}".format(jobs))
|
|
|
|
utcdate = datetime.datetime.utcnow()
|
|
if args.yesterday:
|
|
utcdate = utcdate - datetime.timedelta(days=1)
|
|
if currev and currev.compose == utcdate.strftime('%Y%m%d'):
|
|
# Don't schedule tests for the same compose as both "today's
|
|
# nightly" and "current validation event"
|
|
skip = currev.milestone
|
|
|
|
# Run for day's Rawhide nightly (if not same as current event.)
|
|
if skip.lower() != 'rawhide':
|
|
try:
|
|
rawhide_ffrel = fedfind.release.get_release(
|
|
release='Rawhide', compose=utcdate)
|
|
rawjobs = jobs_from_fedfind(rawhide_ffrel)
|
|
print("Jobs from {0}: {1}".format(rawhide_ffrel.version, rawjobs))
|
|
jobs.extend(rawjobs)
|
|
except ValueError as err:
|
|
print("Rawhide image discovery failed: {0}".format(err))
|
|
|
|
# Run for day's Branched nightly (if not same as current event.)
|
|
# We must guess a release for Branched, fedfind cannot do so. Best
|
|
# guess we can make is the same as the 'current' validation event
|
|
# compose (this is why we have jobs_from_current return currev).
|
|
if skip.lower() != 'branched':
|
|
try:
|
|
branched_ffrel = fedfind.release.get_release(
|
|
release=currev.release, milestone='Branched', compose=utcdate)
|
|
branchjobs = jobs_from_fedfind(branched_ffrel)
|
|
print("Jobs from {0}: {1}".format(branched_ffrel.version,
|
|
branchjobs))
|
|
jobs.extend(branchjobs)
|
|
except ValueError as err:
|
|
print("Branched image discovery failed: {0}".format(err))
|
|
if jobs:
|
|
report_results(jobs)
|
|
sys.exit()
|
|
|
|
if __name__ == "__main__":
|
|
test_help = "Operate on the staging wiki (for testing)"
|
|
parser = argparse.ArgumentParser(description=(
|
|
"Run OpenQA tests for a release validation test event."))
|
|
subparsers = parser.add_subparsers()
|
|
|
|
parser_current = subparsers.add_parser(
|
|
'current', description="Run for the current event, if needed.")
|
|
parser_current.add_argument(
|
|
'-t', '--test', help=test_help, required=False, action='store_true')
|
|
parser_current.set_defaults(func=run_current)
|
|
|
|
parser_compose = subparsers.add_parser(
|
|
'compose', description="Run for a specific compose (TC/RC or nightly)."
|
|
" If a matching release validation test event can be found and "
|
|
"--submit-results is passed, results will be reported.")
|
|
parser_compose.add_argument(
|
|
'-r', '--release', type=int, required=False, choices=range(12, 100),
|
|
metavar="12-99", help="Release number of a specific compose to run "
|
|
"against. Must be passed for validation event discovery to succeed.")
|
|
parser_compose.add_argument(
|
|
'-m', '--milestone', help="The milestone to operate on (Alpha, Beta, "
|
|
"Final, Branched, Rawhide). Must be specified for a TC/RC; for a "
|
|
"nightly, will be guessed if not specified", required=False,
|
|
choices=['Alpha', 'Beta', 'Final', 'Branched', 'Rawhide'])
|
|
parser_compose.add_argument(
|
|
'-c', '--compose', help="The version to run for; either the compose "
|
|
"(for a TC/RC), or the date (for a nightly build)", required=False,
|
|
metavar="{T,R}C1-19 or YYYYMMDD")
|
|
parser_compose.add_argument(
|
|
'-a', '--arch', help="The arch to run for", required=False,
|
|
choices=('x86_64', 'i386'))
|
|
parser_compose.add_argument(
|
|
'-s', '--submit-results', help="Submit the results to the release "
|
|
"validation event for this compose, if possible", required=False,
|
|
action='store_true')
|
|
parser_compose.add_argument(
|
|
'-t', '--test', help=test_help, required=False, action='store_true')
|
|
parser_compose.set_defaults(func=run_compose)
|
|
|
|
parser_all = subparsers.add_parser(
|
|
'all', description="Run for the current validation event (if needed) "
|
|
"and today's Rawhide and Branched nightly's (if found). 'Today' is "
|
|
"calculated for the UTC time zone, no matter the system timezone.")
|
|
parser_all.add_argument(
|
|
'-y', '--yesterday', help="Run on yesterday's nightlies, not today's",
|
|
required=False, action='store_true')
|
|
parser_all.add_argument(
|
|
'-t', '--test', help=test_help, required=False, action='store_true')
|
|
parser_all.set_defaults(func=run_all)
|
|
|
|
args = parser.parse_args()
|
|
|
|
wiki = None
|
|
if args.test:
|
|
try:
|
|
wiki = wikitcms.wiki.Wiki(('https', 'stg.fedoraproject.org'),
|
|
'/w/')
|
|
except NameError:
|
|
pass
|
|
else:
|
|
try:
|
|
wiki = wikitcms.wiki.Wiki(('https', 'fedoraproject.org'), '/w/')
|
|
except NameError:
|
|
pass
|
|
args.func(args, wiki)
|