From 0fd12da6dedee7acd37f33cca1c1bff5efd8ad6f Mon Sep 17 00:00:00 2001 From: Niels Breet Date: Thu, 17 Aug 2017 12:50:25 +0300 Subject: [PATCH 1/2] Sync production changes back to git --- participants/bz.py | 7 +- participants/check_has_relevant_changelog.py | 6 +- participants/check_mentions_bug.py | 4 +- participants/check_package_built_at_source.py | 8 +- participants/check_package_is_complete.py | 13 +- participants/check_valid_changes.py | 17 +- participants/defineimage.py | 163 ++++++++++++++++++ participants/do_build_trial.py | 119 +++++++++++++ participants/get_build_results.py | 3 +- participants/get_notify_recipients_obs.py | 10 +- participants/get_package_boss_conf.py | 4 +- participants/get_provides.py | 8 +- participants/get_relevant_changelog.py | 95 +++++++--- participants/is_repo_published.py | 40 +++-- participants/notify_irc.py | 10 +- participants/qareports.py | 33 ++-- participants/select_test_packages.py | 9 +- participants/setup_build_trial.py | 15 +- participants/update_meta.py | 18 +- participants/update_patterns.py | 1 + participants/wait_for_repo_published.py | 159 +++++++++++++++++ 21 files changed, 647 insertions(+), 95 deletions(-) mode change 100644 => 100755 participants/bz.py mode change 100644 => 100755 participants/check_has_relevant_changelog.py mode change 100644 => 100755 participants/check_mentions_bug.py mode change 100644 => 100755 participants/check_package_built_at_source.py mode change 100644 => 100755 participants/check_package_is_complete.py mode change 100644 => 100755 participants/check_valid_changes.py create mode 100755 participants/defineimage.py create mode 100755 participants/do_build_trial.py mode change 100644 => 100755 participants/get_build_results.py mode change 100644 => 100755 participants/get_notify_recipients_obs.py mode change 100644 => 100755 participants/get_package_boss_conf.py mode change 100644 => 100755 participants/get_provides.py mode change 100644 => 100755 participants/get_relevant_changelog.py mode change 100644 => 100755 participants/notify_irc.py mode change 100644 => 100755 participants/qareports.py mode change 100644 => 100755 participants/select_test_packages.py mode change 100755 => 100644 participants/setup_build_trial.py mode change 100644 => 100755 participants/update_patterns.py create mode 100644 participants/wait_for_repo_published.py diff --git a/participants/bz.py b/participants/bz.py old mode 100644 new mode 100755 index 3d800a6..04cddd8 --- a/participants/bz.py +++ b/participants/bz.py @@ -66,6 +66,7 @@ https://wiki.mozilla.org/index.php?title=Bugzilla:REST_API:Methods """ +import os import re from urllib2 import HTTPError import datetime @@ -241,7 +242,10 @@ def handle_mentioned_bug(bugzilla, bugnum, extra_data, wid, trigger): if wid.params.comment: comment = wid.params.comment elif wid.params.template: - with open(wid.params.template) as fileobj: + with open(os.path.join(bugzilla["template_store"], wid.params.template)) as fileobj: + comment = prepare_comment(fileobj.read(), wid.fields.as_dict(), extra_data) + elif wid.fields.reports and wid.fields.reports.bz_comment_template: + with open(os.path.join(bugzilla["template_store"], wid.fields.reports.bz_comment_template)) as fileobj: comment = prepare_comment(fileobj.read(), wid.fields.as_dict(), extra_data) elif bugzilla['template']: comment = prepare_comment(bugzilla["template"], wid.fields.as_dict(), extra_data) @@ -374,6 +378,7 @@ def handle_action(self, action, wid, bugs): matches = set([(match.group(), match.group('key')) for match in bugzilla['compiled_re'].finditer(entry)]) for remote_re in bugzilla['remote_tags_re']: for match in remote_re.finditer(entry): + print match.group() tracking_bugs = bugzilla['interface'].tracking_bugs(match.group()) for tracker in tracking_bugs[match.group()]: matches.add((match.group(), str(tracker))) diff --git a/participants/check_has_relevant_changelog.py b/participants/check_has_relevant_changelog.py old mode 100644 new mode 100755 index 61cef70..53e70d0 --- a/participants/check_has_relevant_changelog.py +++ b/participants/check_has_relevant_changelog.py @@ -63,8 +63,12 @@ def handle_wi(self, wid): if not actions: raise RuntimeError("Missing mandatory field 'ev.actions'") - result = True + # skip requests marked as revert + if "revert" in wid.fields.ev.description.lower(): + wid.result = True + return + result = True # Assert each package being submitted has relevant changelog entries. for action in actions: pkg_result, _ = contains_relevant_changelog(action, wid) diff --git a/participants/check_mentions_bug.py b/participants/check_mentions_bug.py old mode 100644 new mode 100755 index 294bf56..6a9321a --- a/participants/check_mentions_bug.py +++ b/participants/check_mentions_bug.py @@ -110,7 +110,7 @@ def handle_wi(self, wi): result = False f.msg.append("No bugs mentioned in relevant changelog of "\ "package %s, please refer to a bug using: "\ - "%s" % (package, - bugzilla['regexp'])) + "%s or %s" % (package, + bugzilla['regexp'], bugzilla['remote_tags'])) wi.result = result diff --git a/participants/check_package_built_at_source.py b/participants/check_package_built_at_source.py old mode 100644 new mode 100755 index 13ad74d..9fe060b --- a/participants/check_package_built_at_source.py +++ b/participants/check_package_built_at_source.py @@ -90,12 +90,12 @@ def quality_check(self, action, wid): if status != "succeeded": msg.append("Build status is '%s' for %s (against %s)." % (status, source_build, build_against)) - if status != "excluded": + if status not in ["excluded", "disabled"]: result = False - if targets: - msg.append("Not build against %s" % ", ".join(targets)) - result = False + #if targets: + # msg.append("Not build against %s" % ", ".join(targets)) + # result = False if msg: return result, " ".join(msg) diff --git a/participants/check_package_is_complete.py b/participants/check_package_is_complete.py old mode 100644 new mode 100755 index 3bdadfe..9eb5166 --- a/participants/check_package_is_complete.py +++ b/participants/check_package_is_complete.py @@ -117,15 +117,17 @@ def get_rpm_sources(self, action, filelist): spec = self.obs.getFile(action["sourceproject"], action["sourcepackage"], spec_name, action["sourcerevision"]) - print spec except Exception, exobj: raise SourceError("Failed to fetch spec file %s/%s/%s rev %s: %s" % (action["sourceproject"], action["sourcepackage"], spec_name, action["sourcerevision"], exobj)) + import hashlib + print "Spec file retrieved from", action["sourceproject"], action["sourcepackage"], action["sourcerevision"], ": ", hashlib.md5(spec).hexdigest() try: - tmp_spec = NamedTemporaryFile(mode="w") + tmp_spec = NamedTemporaryFile(mode="w", delete=False) tmp_spec.file.write(spec) tmp_spec.file.flush() + print "Parsing spec file from", tmp_spec.name spec_obj = parse_spec(tmp_spec.name) sources = [os.path.basename(name) for name, _, _ in spec_obj.sources] @@ -169,7 +171,6 @@ def check_source_files(self, action, _wid, filelist): msg = "" try: sources.update(self.get_rpm_sources(action, filelist)) - print sources except SourceError, exobj: msg += str(exobj) try: @@ -177,6 +178,8 @@ def check_source_files(self, action, _wid, filelist): except SourceError, exobj: msg += str(exobj) extras = [] + print sources + print filelist for name in filelist: if name.startswith("_service"): name = name.split(":")[-1] @@ -187,9 +190,13 @@ def check_source_files(self, action, _wid, filelist): if name not in sources: if name.endswith("-rpmlintrc") and not name == "%s-rpmlintrc" % action["sourcepackage"]: continue + if name == "_src": + continue extras.append(name) else: sources.remove(name) + if "_src" in sources: + sources.remove("_src") if extras: msg += "\nExtra source files: %s. " % ", ".join(extras) if sources: diff --git a/participants/check_valid_changes.py b/participants/check_valid_changes.py old mode 100644 new mode 100755 index 2869a18..ce2ddc0 --- a/participants/check_valid_changes.py +++ b/participants/check_valid_changes.py @@ -47,7 +47,7 @@ import re import time import rpm -from rpmUtils.miscutils import compareEVR +from rpmUtils.miscutils import compareEVR, stringToVersion from tempfile import NamedTemporaryFile try: @@ -203,7 +203,12 @@ def setup_obs(self, namespace): def _get_spec_file(self, prj, pkg, rev): - file_list = self.obs.getPackageFileList(prj, pkg, revision=rev) + file_list = [] + try: + file_list = self.obs.getPackageFileList(prj, pkg, revision=rev) + except: + pass + specs = [ fil for fil in file_list if fil.endswith(".spec")] if len(specs) > 1: @@ -220,7 +225,8 @@ def _get_spec_file(self, prj, pkg, rev): spec = self.obs.getFile(prj, pkg, fil, revision=rev) specob = None - + print fil + print spec with NamedTemporaryFile() as specf: specf.write(spec) specf.flush() @@ -248,12 +254,13 @@ def check_spec_version_match(self, version, prj, pkg, rev=None): def check_version_inc(self, version, prj, pkg): error, specob = self._get_spec_file(prj, pkg, None) if error: + print error #don't care if we can't get target package spec return None src_hdrs = [pkg for pkg in specob.packages if pkg.header.isSource()][0] - spec_version = src_hdrs.header[rpm.RPMTAG_VERSION] - version_comparison = compareEVR(('', version.split('-')[0], ''), ('', spec_version, '')) + spec_version = "%s-%s" % (src_hdrs.header[rpm.RPMTAG_VERSION], src_hdrs.header[rpm.RPMTAG_RELEASE]) + version_comparison = compareEVR(stringToVersion(version), stringToVersion(spec_version)) if version_comparison == 1: return None else: diff --git a/participants/defineimage.py b/participants/defineimage.py new file mode 100755 index 0000000..1fff029 --- /dev/null +++ b/participants/defineimage.py @@ -0,0 +1,163 @@ +#!/usr/bin/python +""" This participant is used to select extra packages that are added to an image +according to its definition. Currently only "testing" images can be defined. +To define a testing image for a set of packages (persumably being promoted) any +-tests binary package produced by them are selected. +In addition their reverse dependencies are worked out and then all +-tests binary packages produced by them are also selected. + +:term:`Workitem` fields IN + +:Parameters: + test_project(string): + The test project which contains the packages going to be tested + repository(string): + The repository in above said project + image.arch(string): + The arch of the image going to be built + image.packages(list): + Names of packages going to be tested + + +:term:`Workitem` params IN + +:Parameters: + image_type(string): + The type of image to be defined (only "testing" for now) + +:term:`Workitem` fields OUT + +:Returns: + result(boolean): + True if everything was OK, False otherwise. + image.packages(list): + Extends the list of packages going to be included in the image + +""" +from buildservice import BuildService + +def select_subpkgs(subpkgs, package): + """This function implements selection of test packages from a list of + packages based on the packagename-tests naming convention + + :param subpkgs: the packages to select from + :type subpkgs: list + + :param package: the name of the package that is going to be tested + :type package: string + """ + + selected = [] + for bpk in subpkgs: + if bpk.endswith('-debuginfo'): + continue + if bpk.endswith('-devel'): + continue + if bpk.endswith('-doc'): + continue + if bpk.endswith('-tests'): + if package.endswith('-tests') and bpk == package: + selected.append(bpk) + continue + if bpk == package + '-tests': + selected.append(bpk) + continue + if bpk == package + '-unit-tests' : + selected.append(bpk) + continue + selected.append(bpk) + return selected + +class ParticipantHandler(object): + + """ Participant class as defined by the SkyNET API """ + + def __init__(self): + self.obs = None + self.oscrc = None + self.image_options = {} + + def handle_wi_control(self, ctrl): + """ job control thread """ + pass + + def handle_lifecycle_control(self, ctrl): + """ participant control thread """ + if ctrl.message == "start": + if ctrl.config.has_option("obs", "oscrc"): + self.oscrc = ctrl.config.get("obs", "oscrc") + if ctrl.config.has_option("defineimage", "imagetypes"): + image_types = ctrl.config.get("defineimage", + "imagetypes").split(',') + for itype in image_types: + self.image_options[itype] = dict(ctrl.config.items(itype)) + + def setup_obs(self, namespace): + """ setup the Buildservice instance using the namespace as an alias + to the apiurl """ + + self.obs = BuildService(oscrc=self.oscrc, apiurl=namespace) + + def handle_wi(self, wid): + """ actual job thread """ + wid.result = False + + itype = wid.params.image_type + + if not wid.fields.msg: + wid.fields.msg = [] + + if not itype: + raise RuntimeError("Missing mandatory parameter 'image_type'") + + if not itype in self.image_options.keys() : + raise RuntimeError("Invalid image_type parameter %s" % itype) + + missing = [name for name in ["test_project", "repository", "image"] + if not getattr(wid.fields, name, None)] + missing.extend(["image." + name for name in ["packages", "arch"] + if not getattr(wid.fields.image, name, None)]) + if missing: + raise RuntimeError("Missing mandatory field(s): %s" % + ", ".join(missing)) + + self.setup_obs(wid.fields.ev.namespace) + + if itype == "testing": + prj = wid.fields.test_project + repo = wid.fields.repository + arch = wid.fields.image.arch + packages = wid.fields.image.packages + + # get reverse dependencies of each package + additional_pkgs = [] + for pkg in packages: + for pkg_revdep in self.obs.getPackageReverseDepends(prj, repo, + pkg, arch): + if pkg_revdep not in additional_pkgs: + additional_pkgs.append(pkg_revdep) + + packages.extend(additional_pkgs) + + # get subpackages of each package and select the ones we are + # interested in + selected = [] + for pkg in packages : + pkg_subpkgs = self.obs.getPackageSubpkgs(prj, repo, pkg, arch) + subpkgs = select_subpkgs(pkg_subpkgs, pkg) + for subpkg in subpkgs: + if subpkg not in selected: + selected.append(subpkg) + + if "always_include" in self.image_options[itype]: + for inc in self.image_options[itype]["always_include"]\ + .split(','): + if inc and inc not in selected: + selected.append(inc) + + wid.fields.image.packages.extend(selected) + + wid.fields.msg.append('Defined %s image includes %s' % + (itype, (", ").join(selected))) + + wid.result = True diff --git a/participants/do_build_trial.py b/participants/do_build_trial.py new file mode 100755 index 0000000..9b77914 --- /dev/null +++ b/participants/do_build_trial.py @@ -0,0 +1,119 @@ +#!/usr/bin/python +"""Copies the set of packages that are being promoted by a submit request to +a trial build area. + +.. warning:: + The OBS user configured in the oscrc file used needs to have maintainership + rights on the trial build project. + +:term:`Workitem` fields IN: + +:Parameters: + ev.actions(list): + Submit request data structure :term:`actions` + ev.id: + Submit request id + +:term:`Workitem` params IN + +:Parameters: + build_in: + The trial build area (project) + linked(Boolean): + Set it to True if the trial build area is a project Link to the + destination. This will cause any leftover binary packages to be wiped. + Read more about prj_links : + http://en.opensuse.org/openSUSE:Build_Service_Concept_project_linking + +:term:`Workitem` fields OUT: + +:Returns: + result(Boolean): + True if everything went OK, False otherwise. + +""" + +from buildservice import BuildService +from urllib2 import HTTPError + +class ParticipantHandler(object): + """Participant class as defined by the SkyNET API.""" + + def __init__(self): + self.oscrc = None + self.obs = None + + def handle_wi_control(self, ctrl): + """Job control thread.""" + pass + + def handle_lifecycle_control(self, ctrl): + """Participant control thread.""" + if ctrl.message == "start": + if ctrl.config.has_option("obs", "oscrc"): + self.oscrc = ctrl.config.get("obs", "oscrc") + + def setup_obs(self, namespace): + """Setup the Buildservice instance + + Using the namespace as an alias to the apiurl. + """ + + self.obs = BuildService(oscrc=self.oscrc, apiurl=namespace) + + def build_trial(self, wid): + """Copy packages from source to build_in: + + * Since we are not doing a cross instance copy the src and dst + apiurl are the same. + * The request is made from the source to the eventual destination. + So using the request_src project in the build_trial and copying to + the build_in will work. + * This also uses the specific version mentioned in the request. + * if build_in is a project link clean it from any possible left over + binaries. + + """ + + wid.result = False + rid = wid.fields.ev.id + actions = wid.fields.ev.actions + build_in = wid.params.build_in + + # wipeBinaries errors if there are no packages to wipe + #if wid.params.linked: + # pkgs = self.obs.getPackageList(build_in) + # if pkgs: + # self.obs.wipeBinaries(build_in) + # for pkg in pkgs: + # self.obs.deletePackage(build_in, pkg) + + for act in actions: + if act['type'] == 'submit': + self.obs.copyPackage(self.obs.apiurl, + act['sourceproject'], + act['sourcepackage'], + self.obs.apiurl, + build_in, + act['targetpackage'], + client_side_copy = False, + keep_maintainers = False, + keep_develproject = False, + expand = True, + revision = act['sourcerevision'], + comment = "Trial build for request %s" % rid) + # TODO: figure out a way to simulate deletions in the trial build + + self.log.info("Trial build for request %s" % rid) + wid.result = True + + def handle_wi(self, wid): + """Actual job thread.""" + + self.setup_obs(wid.fields.ev.namespace) + try: + self.build_trial(wid) + except HTTPError as err: + if err.code == 403: + self.log.info("Is the BOSS user (see /etc/skynet/oscrc) enabled as a maintainer in the relevant project") + raise err diff --git a/participants/get_build_results.py b/participants/get_build_results.py old mode 100644 new mode 100755 index b099584..8823b1a --- a/participants/get_build_results.py +++ b/participants/get_build_results.py @@ -53,7 +53,8 @@ def get_failures(results, archs): # If we succeed then continue to the next package. # In a link project, unbuilt packages from the link-source # are reported as 'excluded' (which is as good as success) - if results[arch][pkg] in [ "succeeded", "excluded"]: + # another OK state is 'disabled' + if results[arch][pkg] in [ "succeeded", "excluded", "disabled" ]: continue else: # a broken new package is also a new failure diff --git a/participants/get_notify_recipients_obs.py b/participants/get_notify_recipients_obs.py old mode 100644 new mode 100755 index 67a0194..6e8f773 --- a/participants/get_notify_recipients_obs.py +++ b/participants/get_notify_recipients_obs.py @@ -140,11 +140,11 @@ def handle_wi(self, wid): for action in wid.fields.ev.actions: found = False for entry in action.get("relevant_changelog", ""): - for addr in emailre.findall(entry): - addr = addr.replace("<","").replace(">","") - mailaddr.add(addr) - found = True - if found: break + for addr in emailre.findall(entry): + addr = addr.replace("<","").replace(">","") + mailaddr.add(addr) + found = True + if found: break else: raise RuntimeError("Unknown role token: %s" % role) diff --git a/participants/get_package_boss_conf.py b/participants/get_package_boss_conf.py old mode 100644 new mode 100755 index 8d4b38d..6edc054 --- a/participants/get_package_boss_conf.py +++ b/participants/get_package_boss_conf.py @@ -101,6 +101,7 @@ def _get_boss_conf(self, project, package, revision=None): boss.conf """ try: + self.log.info("getting %s %s" % (project, package)) contents = self.obs.getFile( project, package, "boss.conf", revision) except HTTPError, exobj: @@ -109,7 +110,8 @@ def _get_boss_conf(self, project, package, revision=None): contents = None else: # something else failed on OBS - raise + self.log.info("WTF!") + contents = None except Exception: # buildservice raises all kinds of weird exceptions self.log.info("Failed to get boss.conf for %s %s revision %s" % \ diff --git a/participants/get_provides.py b/participants/get_provides.py old mode 100644 new mode 100755 index ab7c392..fe857d6 --- a/participants/get_provides.py +++ b/participants/get_provides.py @@ -126,8 +126,12 @@ def __get_provides(self, project, packages, targets, provide): binaries = self.obs.getBinaryList(project, target, package) for binary in binaries: self.log.info("Checking %s from %s in %s" % ( binary, package, target)) - bininfo = self.obs.getBinaryInfo(project, target, package, - binary) + try: + bininfo = self.obs.getBinaryInfo(project, target, + package, binary) + except Exception, exc: + print "Skipping %s:%s" % (package, exc) + if bininfo.get("arch", "src") == "src": continue for name in bininfo.get("provides", []): diff --git a/participants/get_relevant_changelog.py b/participants/get_relevant_changelog.py old mode 100644 new mode 100755 index 20b5830..462b1a7 --- a/participants/get_relevant_changelog.py +++ b/participants/get_relevant_changelog.py @@ -20,7 +20,13 @@ if "last_revision" the relevant changelog entries will be obtained by comparing to the previous revision of the file in the destination. Otherwise it compares the changes file from source to the destination. - + + project(string): + Project name to get package from. If specified then ev.actions is not needed + + package(string): + Package name to get changelog from. If specified then ev.actions is not needed + :term:`Workitem` fields OUT: :Returns: @@ -40,7 +46,26 @@ from buildservice import BuildService from urllib2 import HTTPError -_blankre = re.compile(r"^\W*$") +_blankre = re.compile(r"^\s*$") + + +def _diff_chlog(oldlogs, newlogs): + + chlog = [] + skip = False + for line in newlogs: + if line.startswith("*"): + if not line in oldlogs: + chlog.append(line) + skip = False + else: + skip = True + continue + else: + if not skip: + chlog.append(line) + + return chlog def get_relevant_changelog(src_chlog, dst_chlog, new_count=None): """ Diff two changelogs and return the list of lines that are only in @@ -63,23 +88,27 @@ def get_relevant_changelog(src_chlog, dst_chlog, new_count=None): count += 1 relchlog.append(line) else: + relchlog = _diff_chlog(dst_chlog.splitlines(), src_chlog.splitlines()) # Get relevant lines based on diff - diff_txt = difflib.unified_diff(dst_chlog.splitlines(), - src_chlog.splitlines()) + #diff_txt = difflib.unified_diff(dst_chlog.splitlines(), + # src_chlog.splitlines(), n=0) # Convert the diff text to a list of lines discarding the diff header - diff_list = list(diff_txt)[3:] - print diff_list + #diff_list = list(diff_txt)[3:] + #print diff_list # Logic to compare changelogs and extract relevant entries - for line in diff_list: - if line.startswith("+"): - entry = line.replace("+", "", 1) - relchlog.append(entry) - elif line and line[0] in ("-"): + #for line in diff_list: + # if line and line.startswith("+"): + # entry = line.replace("+", "", 1) + # relchlog.append(entry) + # elif line and line[0] in ("-"): # skip removed lines - continue - elif line and line[0] in (" "): + # continue + # elif line and line[0] in (" "): # As soon as we hit a matching line we are done - break + # entry = line.replace(" ", "", 1) + # relchlog.append(entry) + # elif line and line[0] in ("@"): + # break # Now take the list of lines and create a list of changelog # entries by splitting on blanks @@ -143,7 +172,20 @@ def get_relevant_changelogs(self, wid): wid.result = False if not wid.fields.ev.actions: - raise RuntimeError("Missing mandatory field 'ev.actions'") + if wid.params.project and wid.params.package: + action = { 'type' : 'submit', + 'targetproject' : wid.params.project, + 'targetpackage' : wid.params.package, + 'sourceproject' : wid.params.project, + 'sourcepackage' : wid.params.package, + 'sourcerevision': "latest" + } + + wid.fields.ev.actions = [action] + else: + raise RuntimeError("Missing mandatory field 'ev.actions'") + + actions = wid.fields.ev.actions use_rev = False if wid.params.compare and wid.params.compare == "last_revision": @@ -156,7 +198,7 @@ def get_relevant_changelogs(self, wid): except ValueError, e: raise RuntimeError("Wrong optional field new_changelog_count, should be an integer") - for action in wid.fields.ev.actions: + for action in actions: if action['type'] != "submit": continue if action.get("target", None): @@ -174,27 +216,36 @@ def get_relevant_changelogs(self, wid): src_chlog = "" if use_rev: + # get commit history commit_log = self.obs.getCommitLog(target_project, target_package) + + rev = None # use the second last commit revision if available if len(commit_log) > 1 : - src_chlog = self.get_changes_file(target_project, - target_package, - str(commit_log[1][0])) + rev = str(commit_log[1][0]) + + dst_chlog = self.get_changes_file(target_project, + target_package, + rev) + + src_chlog = self.get_changes_file(target_project, + target_package) + else: src_chlog = self.get_changes_file(source_project, source_package, source_revision) - dst_chlog = self.get_changes_file(target_project, - target_package) + dst_chlog = self.get_changes_file(target_project, + target_package) rel_chlog = get_relevant_changelog(src_chlog, dst_chlog, new_count) print rel_chlog if rel_chlog: - action["relevant_changelog"] = rel_chlog + action["relevant_changelog"] = [ entry.decode('UTF-8', 'replace') for entry in rel_chlog ] wid.result = True diff --git a/participants/is_repo_published.py b/participants/is_repo_published.py index d450910..1d4aa08 100644 --- a/participants/is_repo_published.py +++ b/participants/is_repo_published.py @@ -21,6 +21,7 @@ True if repository(ies are) is published, False otherwise. """ +import sys, traceback from copy import copy import datetime from boss.obs import BuildServiceParticipant @@ -31,7 +32,7 @@ class State(object): def __init__(self, obs, project): self.checked = None #FIXME: make it configurable - self.lifetime = datetime.timedelta(seconds=60) + self.lifetime = datetime.timedelta(seconds=15) self._obs = obs self.project = project self._source_state = None @@ -48,7 +49,7 @@ def expired(self): def publish_states(self): """caching property representing the publish state of a project""" - if self.expired: + if self._publish_states is None or self.expired: print "refreshing publish state of %s" % self.project # Returns dict {"repo/arch" : "state"} publish_states = {} @@ -56,7 +57,8 @@ def publish_states(self): for repo_arch, state in all_states.items(): repo , arch = repo_arch.split("/") # unpublished means that repository publishing is disabled - publish_states[(repo, arch)] = state.endswith("published") or "broken" + print(repo, arch, state) + publish_states[(repo, arch)] = state.endswith("published") or state == "broken" self._publish_states = publish_states return self._publish_states @@ -65,7 +67,7 @@ def publish_states(self): def source_state(self): """caching property representing the source state of a project""" - if self.expired: + if self._source_state is None or self.expired: print "refreshing source state of %s" % self.project states = {} for package in self._obs.getPackageList(self.project): @@ -74,10 +76,21 @@ def source_state(self): states[package] = False try: - _ = self._obs.getPackageFileList(self.project, package) - states[package] = True + filelist = self._obs.getPackageFileList(self.project, package) + print filelist + if "_service" in filelist: + x = self._obs.getServiceState(self.project, package) + print x + if x == "succeeded": + states[package] = True + else: + states[package] = True except Exception, exc: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exc(file=sys.stdout) print exc + if "failed" in str(exc): + states[package] = True self._source_state = states @@ -111,6 +124,10 @@ def ready(self, repository=None, architecture=None, exclude_repos=None, # _ = len(self.source_state and self.source_state.keys()) # get reference to source_state dict source_state = copy(self.source_state) + if source_state is None: + _ = len(self.source_state and self.source_state.keys()) + source_state = copy(self.source_state) + # if not packages were specified care about all of them if not packages: packages = source_state.keys() @@ -165,16 +182,15 @@ def handle_lifecycle_control(self, ctrl): @BuildServiceParticipant.setup_obs def handle_wi(self, wid): """Actual job thread.""" - + wid.result = False # Decide which packages to care about when checking source state - packages = None + # empty list will mean checking all packages + # this is useful for checking trial build project + packages = set() # OBS request with actions if wid.fields.ev and wid.fields.ev.actions: - # empty list will mean checking all packages - # this is useful for checking trial build project - packages = set() for action in wid.fields.ev.actions: # only check submit actions if action["type"] == "submit": @@ -189,7 +205,7 @@ def handle_wi(self, wid): packages.add(action['sourcepackage']) state = self.registry.register(self.obs, wid.params.project) - print packages + print "packages is %s" % str(packages) wid.result = state.ready(wid.params.repository, wid.params.arch, wid.fields.exclude_repos, diff --git a/participants/notify_irc.py b/participants/notify_irc.py old mode 100644 new mode 100755 index be10ef4..5ea79b6 --- a/participants/notify_irc.py +++ b/participants/notify_irc.py @@ -7,8 +7,14 @@ def notify(self, msg=["No 'message' for notify_irc"], channel="#meego-boss", hig # It depends on a reachable supybot instance with the Notify plugin ircbot = socket.socket() ircbot.connect((self.ircbot_host, self.ircbot_port)) - for m in msg: - ircbot.send("%s %s%s\n" % (channel, highlight, m)) + for item in msg: + # split possible lines + mls = item.splitlines() + for m in mls: + try: + ircbot.send("%s %s%s\n" % (channel, highlight, m.strip())) + except: + ircbot.send("%s %sunable to send line, please check logs\n" % (channel, highlight)) ircbot.close() def handle_wi_control(self, ctrl): diff --git a/participants/qareports.py b/participants/qareports.py old mode 100644 new mode 100755 index 16004c3..6de5bb7 --- a/participants/qareports.py +++ b/participants/qareports.py @@ -1,10 +1,4 @@ -import os - - -from qarep.upload import ( - ReportUploader, get_results_files_list, move_results_dir -) - +from qarep.upload import * class ParticipantHandler(object): def handle_wi_control(self, ctrl): @@ -17,9 +11,7 @@ def handle_lifecycle_control(self, ctrl): user = ctrl.config.get("qareports", "user") password = ctrl.config.get("qareports", "password") realm = ctrl.config.get("qareports", "realm") - self.uploader = ReportUploader( - apiurl, auth_token, user, password, realm - ) + self.uploader = ReportUploader(apiurl, auth_token, user, password, realm) def handle_wi(self, wid): @@ -51,22 +43,17 @@ def handle_wi(self, wid): attachments.append((result, open(result).read())) if result_xmls: - url, msg = self.uploader.send_files( - result_xmls, - attachments, - hwproduct=hwproduct, - testtype=testtype, - target=target, - release_version=release_version, - build=build - ) + url, msg = self.uploader.send_files(result_xmls, + attachments, + hwproduct=hwproduct, + testtype=testtype, + target=target, + release_version = release_version, + build = build) wid.fields.qa.results.report_url = url if url and ((f.qa and f.qa.move_results) or (p.move_results)): - move_results_dir( - f.qa.results.results_dir, - os.path.basename(url) - ) + move_results_dir(f.qa.results.results_dir, os.path.basename(url)) if not wid.fields.msg: wid.fields.msg = [] diff --git a/participants/select_test_packages.py b/participants/select_test_packages.py old mode 100644 new mode 100755 index e67ef8d..f403b5a --- a/participants/select_test_packages.py +++ b/participants/select_test_packages.py @@ -36,8 +36,8 @@ (optional) OBS project repository architecture to limit the search using(list of strings): The selection convention to use. "name", "provides" or "pattern" - allow_recursive(bool) TODO: NOT YET IMPLEMENTED! - shall the patterns expanding be recursive: True recusive, False do not + allow_recursive(bool): + shall the patterns expanding be recursive: True recusive, False do not expand patterns within the pattern :term:`Workitem` fields OUT @@ -69,6 +69,7 @@ def handle_lifecycle_control(self, ctrl): def select_bpkgs(self, project, package, target, using): binaries = self.obs.getBinaryList(project, target, package) + #self.log.info(binaries) selected = {} for binary in binaries: @@ -175,6 +176,8 @@ def handle_wi(self, wid): patterns = wid.fields.qa.test_patterns.as_dict() expanded_patterns = self.obs.expandPatterns(patterns, depth = 4) + print "expanded_patterns:" + print expanded_patterns for pattern, props in expanded_patterns.items(): for rpmpgk in props['requires']: selected.update({rpmpgk: props['provides']}) @@ -186,3 +189,5 @@ def handle_wi(self, wid): wid.result = True + #self.log.info(wid.dump()) + diff --git a/participants/setup_build_trial.py b/participants/setup_build_trial.py old mode 100755 new mode 100644 index d69bd99..2d01fae --- a/participants/setup_build_trial.py +++ b/participants/setup_build_trial.py @@ -305,6 +305,8 @@ def construct_trial(self, trial_project, actions, extra_path=None, extra_links=N for act in actions: # handle delete requests using build disable flags if act['type'] == 'delete' and act['deletepackage'] not in submits: + if not "build" in flags: + flags["build"] = etree.Element("build") flags["build"].append(etree.Element("disable", {"package" : act['deletepackage']})) if act['type'] == 'submit': @@ -346,23 +348,26 @@ def handle_wi(self, wid): trial_project = "%s:SR%s" % (prj_prefix, rid) actions = wid.fields.ev.actions build_trial_groups = wid.fields.build_trial.as_dict().get("groups", {}) - trial_map, trial_groups = self.get_trials(trial_project, build_trial_groups, wid.fields.build_trial.suffix or "") - print trial_map - print trial_groups + suffix = wid.fields.build_trial.suffix or "" + trial_map, trial_groups = self.get_trials(trial_project, build_trial_groups, suffix) + exclude_prjs = wid.fields.build_trial.exclude_prjs or [] + if suffix: + exclude_prjs = [prj + suffix for prj in exclude_prjs] exclude_repos = wid.fields.exclude_repos or [] exclude_archs = wid.fields.exclude_archs or [] wid.result = False self.cache = {} # first construct main trial project - main_actions = [act for act in actions if act["targetproject"] in trial_groups[trial_project]] + main_actions = [act for act in actions if act["targetproject"] in trial_groups[trial_project] and act["targetproject"] not in exclude_prjs] main_links = self.construct_trial(trial_project, main_actions, extra_path=wid.fields.build_trial.extra_path, extra_links=set(), exclude_repos=exclude_repos, exclude_archs=exclude_archs) main_links.add(trial_project) wid.fields.build_trial.project = trial_project # then construct trial sub projects for trial_sub_project, targets in trial_groups.items(): + print (trial_sub_project, targets) if trial_sub_project == trial_project: continue - sub_actions = [act for act in actions if act["targetproject"] in targets] + sub_actions = [act for act in actions if act["targetproject"] in targets and act["targetproject"] not in exclude_prjs] sub_links = self.construct_trial(trial_sub_project, sub_actions, extra_path=trial_project, extra_links=set(targets), exclude_repos=exclude_repos, exclude_archs=exclude_archs, exclude_links=main_links) wid.fields.build_trial.subprojects = _normalize(trial_groups) wid.result = True diff --git a/participants/update_meta.py b/participants/update_meta.py index 1aa5ba4..2d1e8ba 100644 --- a/participants/update_meta.py +++ b/participants/update_meta.py @@ -73,7 +73,7 @@ def handle_wi(self, wid): for upload in uploaded]) if errors: wid.fields.msg.extend(errors) - wid.result = False + # wid.result = False if wid.fields.prjmeta: prjmeta = wid.fields.prjmeta.as_dict() @@ -83,7 +83,7 @@ def handle_wi(self, wid): for upload in uploaded]) if errors: wid.fields.msg.extend(errors) - wid.result = False + # wid.result = False def __update_meta(self, project, providers, metatype): """Extracts a meta xml from rpm and uploads them to project. @@ -112,12 +112,22 @@ def __update_meta(self, project, providers, metatype): for xml in extract_rpm(lab.real_path(binary), lab.path, ["*.xml"]): meta = os.path.basename(xml) + submetatype = os.path.basename(os.path.dirname(xml)) + print(meta, metatype, submetatype) try: with open(lab.real_path(xml), 'r') as fd: metadata = [line.replace("@PROJECT@", project) for line in fd.readlines()] # Update meta - core.edit_meta(metatype, project, data=metadata) - uploaded.append(meta) + if submetatype == "aggregates": + pkgname = os.path.splitext(meta)[0] + core.edit_meta(metatype='pkg', path_args=(project, pkgname), template_args=({'name': pkgname, 'user': 'cibot'}), apiurl=self.obs.apiurl) + u = core.makeurl(self.obs.apiurl, ['source', project, pkgname, '_aggregate']) + print u + print metadata + core.http_PUT(u, data="\n".join(metadata)) + else: + core.edit_meta(metatype, project, data=metadata) + uploaded.append(metatype + '/' + meta) except HTTPError as exc: errors.append("Failed to upload %s:\nHTTP %s %s\n%s" % (meta, exc.code, exc.filename, diff --git a/participants/update_patterns.py b/participants/update_patterns.py old mode 100644 new mode 100755 index 0a0f56f..de1ac67 --- a/participants/update_patterns.py +++ b/participants/update_patterns.py @@ -117,6 +117,7 @@ def __update_patterns(self, project, package, target, binary): if errors: return uploaded, errors # Extract pattern (xml) files from the rpm + print lab.real_path(binary) for xml in extract_rpm(lab.real_path(binary), lab.path, ["*.xml"]): pattern = os.path.basename(xml) diff --git a/participants/wait_for_repo_published.py b/participants/wait_for_repo_published.py new file mode 100644 index 0000000..145e141 --- /dev/null +++ b/participants/wait_for_repo_published.py @@ -0,0 +1,159 @@ +#!/usr/bin/python +"""Checks the state of a project's repositories or single repository and +returns success if the repository has been published. + +:term:`Workitem` params IN + +:Parameters: + project(string): + The project to check + repository(string): + Optionally, the repository in above project. if not provided the state of + all repositories in the project are checked + arch(string): + Optionally, the arch in above repository. if not provided the state of + all archs in the repository are checked + +:term:`Workitem` fields OUT: + +:Returns: + result(Boolean): + True if repository(ies are) is published, False otherwise. + +""" + +import datetime +from boss.obs import BuildServiceParticipant + +class State(object): + """Represents a project source and publish state cached for a set time""" + + def __init__(self, obs, project): + self.checked = None + #FIXME: make it configurable + self.lifetime = datetime.timedelta(seconds=300) + self._obs = obs + self.project = project + self._source_state = None + self._publish_states = None + + @property + def expired(self): + """indicates whether this state is expired and should be refreshed""" + + return self.checked is None or \ + self.checked + self.lifetime < datetime.datetime.now() + + @property + def publish_states(self): + """caching property representing the publish state of a project""" + + if self.expired: + print "refreshing publish state of %s" % self.project + # Returns dict {"repo/arch" : "state"} + publish_states = {} + all_states = self._obs.getRepoState(self.project) + for repo_arch, state in all_states.items(): + repo , arch = repo_arch.split("/") + # unpublished means that repository publishing is disabled + publish_states[(repo, arch)] = state.endswith("published") + self._publish_states = publish_states + + return self._publish_states + + @property + def source_state(self): + """caching property representing the source state of a project""" + + if self.expired: + print "refreshing source state of %s" % self.project + result = True + for package in self._obs.getPackageList(self.project): + if package == '_pattern': + continue + try: + _ = self._obs.getPackageFileList(self.project, package) + except Exception, exc: + print exc + result = False + + if not result: + break + self._source_state = result + + return self._source_state + + def ready(self, repository=None, architecture=None, exclude_repos=None, + exclude_archs=None): + """Decides wether a project is ready to be used based on criteria""" + + ready = True + for repo_arch, state in self.publish_states.items(): + repo, arch = repo_arch + if repository and not repo == repository: + # skip unwanted repo + continue + if architecture and not arch == architecture: + # skip unwanted arch + continue + if exclude_repos and repo in exclude_repos: + continue + if exclude_archs and arch in exclude_archs: + continue + # At this point we have the repo/arch we want + ready = ready and state + + if ready: + ready = ready and self.source_state + + if self.expired: + self.checked = datetime.datetime.now() + print "state refreshed at %s, expires after %s" % \ + ( self.checked, self.lifetime ) + + return ready + +class StateRegistry(object): + """An in-memory registry of project states""" + + def __init__(self): + self._states = {} + + def register(self, obs, project): + """Register an obs project""" + + if not project in self._states: + print "registering %s" % project + self._states[project] = State(obs, project) + return self._states[project] + +class ParticipantHandler(BuildServiceParticipant): + """Participant class as defined by the SkyNET API.""" + + def __init__(self): + + BuildServiceParticipant.__init__(self) + # start with empty project state registry + self.registry = StateRegistry() + + def handle_wi_control(self, ctrl): + """Job control thread.""" + pass + + @BuildServiceParticipant.get_oscrc + def handle_lifecycle_control(self, ctrl): + """Participant control thread.""" + pass + + @BuildServiceParticipant.setup_obs + def handle_wi(self, wid): + """Actual job thread.""" + + wid.result = False + + state = self.registry.register(self.obs, wid.params.project) + wid.result = state.ready(wid.params.repository, + wid.params.arch, + wid.fields.exclude_repos, + wid.fields.exclude_archs) + From db2b7f30d9225f583e61c294785f835ba0d0437a Mon Sep 17 00:00:00 2001 From: Niels Breet Date: Thu, 17 Aug 2017 13:26:29 +0300 Subject: [PATCH 2/2] Add supervisor confs for new participants --- conf/supervisor/defineimage.conf | 22 ++++++++++++++++++++ conf/supervisor/do_build_trial.conf | 22 ++++++++++++++++++++ conf/supervisor/wait_for_repo_published.conf | 22 ++++++++++++++++++++ 3 files changed, 66 insertions(+) create mode 100644 conf/supervisor/defineimage.conf create mode 100644 conf/supervisor/do_build_trial.conf create mode 100644 conf/supervisor/wait_for_repo_published.conf diff --git a/conf/supervisor/defineimage.conf b/conf/supervisor/defineimage.conf new file mode 100644 index 0000000..1bd1248 --- /dev/null +++ b/conf/supervisor/defineimage.conf @@ -0,0 +1,22 @@ +[program:defineimage] +command = /usr/bin/skynet_exo /etc/supervisor/conf.d/defineimage.conf +process_name = %(program_name)s_%(process_num)s +numprocs = 1 +user = bossmaintainer +umask = 022 +autostart = true +autorestart = true +startsecs = 5 +startretries = 100 +stopwaitsecs = 10 +redirect_stderr = true +stdout_logfile = /var/log/supervisor/%(program_name)s_%(process_num)s.log +stderr_logfile = off +environment = PYTHONUNBUFFERED=1,HOME="/home/bossmaintainer",USER="bossmaintainer" + +[participant] +name = defineimage +queue = defineimage +regexp = defineimage +code = /usr/share/boss-skynet/defineimage.py + diff --git a/conf/supervisor/do_build_trial.conf b/conf/supervisor/do_build_trial.conf new file mode 100644 index 0000000..f61a3b4 --- /dev/null +++ b/conf/supervisor/do_build_trial.conf @@ -0,0 +1,22 @@ +[program:do_build_trial] +command = /usr/bin/skynet_exo /etc/supervisor/conf.d/do_build_trial.conf +process_name = %(program_name)s_%(process_num)s +numprocs = 1 +user = bossmaintainer +umask = 022 +autostart = true +autorestart = true +startsecs = 5 +startretries = 100 +stopwaitsecs = 10 +redirect_stderr = true +stdout_logfile = /var/log/supervisor/%(program_name)s_%(process_num)s.log +stderr_logfile = off +environment = PYTHONUNBUFFERED=1,HOME="/home/bossmaintainer",USER="bossmaintainer" + +[participant] +name = do_build_trial +queue = do_build_trial +regexp = do_build_trial +code = /usr/share/boss-skynet/do_build_trial.py + diff --git a/conf/supervisor/wait_for_repo_published.conf b/conf/supervisor/wait_for_repo_published.conf new file mode 100644 index 0000000..7c00451 --- /dev/null +++ b/conf/supervisor/wait_for_repo_published.conf @@ -0,0 +1,22 @@ +[program:wait_for_repo_published] +command = /usr/bin/skynet_exo /etc/supervisor/conf.d/wait_for_repo_published.conf +process_name = %(program_name)s_%(process_num)s +numprocs = 1 +user = bossmaintainer +umask = 022 +autostart = true +autorestart = true +startsecs = 5 +startretries = 100 +stopwaitsecs = 10 +redirect_stderr = true +stdout_logfile = /var/log/supervisor/%(program_name)s_%(process_num)s.log +stderr_logfile = off +environment = PYTHONUNBUFFERED=1,HOME="/home/bossmaintainer",USER="bossmaintainer" + +[participant] +name = wait_for_repo_published +queue = wait_for_repo_published +regexp = wait_for_repo_published +code = /usr/share/boss-skynet/wait_for_repo_published.py +