From e56f359a2c2be5d1b6cd08e390087f242768a1b6 Mon Sep 17 00:00:00 2001 From: =?utf8?q?Petr=20=C5=A0tetiar?= Date: Sun, 18 Jun 2023 07:19:32 +0200 Subject: [PATCH] phase1: reformat with black MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit Making everything consistent, more readable, CI guarded. Signed-off-by: Petr Å tetiar --- .github/workflows/build-push.yml | 5 +- phase1/master.cfg | 2645 ++++++++++++++++++------------ 2 files changed, 1566 insertions(+), 1084 deletions(-) diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index b6f1c3c..89d57e3 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -39,9 +39,8 @@ jobs: - name: Lint with ruff run: ruff phase*/master.cfg -# FIXME -# - name: Stylecheck with black -# run: black phase*/master.cfg + - name: Stylecheck with black + run: black phase1/master.cfg build-test-push: name: Build, test and push containers diff --git a/phase1/master.cfg b/phase1/master.cfg index 91755cd..799a40b 100644 --- a/phase1/master.cfg +++ b/phase1/master.cfg @@ -42,8 +42,8 @@ from buildbot.worker.local import LocalWorker if not os.path.exists("twistd.pid"): - with open("twistd.pid", "w") as pidfile: - pidfile.write("{}".format(os.getpid())) + with open("twistd.pid", "w") as pidfile: + pidfile.write("{}".format(os.getpid())) # This is a sample buildmaster config file. It must be installed as # 'master.cfg' in your buildmaster's base directory. @@ -52,51 +52,53 @@ ini = configparser.ConfigParser() ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini")) if "general" not in ini or "phase1" not in ini: - raise ValueError("Fix your configuration") + raise ValueError("Fix your configuration") -inip1 = ini['phase1'] +inip1 = ini["phase1"] # Globals -work_dir = os.path.abspath(ini['general'].get("workdir", ".")) +work_dir = os.path.abspath(ini["general"].get("workdir", ".")) scripts_dir = os.path.abspath("../scripts") -repo_url = ini['repo'].get("url") +repo_url = ini["repo"].get("url") rsync_defopts = ["-v", "--timeout=120"] -#if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0: -# rsync_bin_defopts += ["--contimeout=20"] +# if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0: +# rsync_bin_defopts += ["--contimeout=20"] branches = {} + def ini_parse_branch(section): - b = {} - name = section.get("name") + b = {} + name = section.get("name") + + if not name: + raise ValueError("missing 'name' in " + repr(section)) + if name in branches: + raise ValueError("duplicate branch name in " + repr(section)) - if not name: - raise ValueError("missing 'name' in " + repr(section)) - if name in branches: - raise ValueError("duplicate branch name in " + repr(section)) + b["name"] = name + b["bin_url"] = section.get("binary_url") + b["bin_key"] = section.get("binary_password") - b["name"] = name - b["bin_url"] = section.get("binary_url") - b["bin_key"] = section.get("binary_password") + b["src_url"] = section.get("source_url") + b["src_key"] = section.get("source_password") - b["src_url"] = section.get("source_url") - b["src_key"] = section.get("source_password") + b["gpg_key"] = section.get("gpg_key") - b["gpg_key"] = section.get("gpg_key") + b["usign_key"] = section.get("usign_key") + usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key" + b["usign_comment"] = section.get("usign_comment", usign_comment) - b["usign_key"] = section.get("usign_key") - usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key" - b["usign_comment"] = section.get("usign_comment", usign_comment) + b["config_seed"] = section.get("config_seed") - b["config_seed"] = section.get("config_seed") + b["kmod_archive"] = section.getboolean("kmod_archive", False) - b["kmod_archive"] = section.getboolean("kmod_archive", False) + branches[name] = b + log.msg("Configured branch: {}".format(name)) - branches[name] = b - log.msg("Configured branch: {}".format(name)) # PB port can be either a numeric port or a connection string pb_port = inip1.get("port") or 9989 @@ -111,8 +113,8 @@ c = BuildmasterConfig = {} # installation's html.WebStatus home page (linked to the # 'titleURL') and is embedded in the title of the waterfall HTML page. -c['title'] = ini['general'].get("title") -c['titleURL'] = ini['general'].get("title_url") +c["title"] = ini["general"].get("title") +c["titleURL"] = ini["general"].get("title_url") # the 'buildbotURL' string should point to the location where the buildbot's # internal web server (usually the html.WebStatus page) is visible. This @@ -120,7 +122,7 @@ c['titleURL'] = ini['general'].get("title_url") # with an externally-visible host name which the buildbot cannot figure out # without some help. -c['buildbotURL'] = inip1.get("buildbot_url") +c["buildbotURL"] = inip1.get("buildbot_url") ####### BUILDWORKERS @@ -128,362 +130,407 @@ c['buildbotURL'] = inip1.get("buildbot_url") # a Worker object, specifying a unique worker name and password. The same # worker name and password must be configured on the worker. -c['workers'] = [] +c["workers"] = [] NetLocks = dict() + def ini_parse_workers(section): - name = section.get("name") - password = section.get("password") - phase = section.getint("phase") - tagonly = section.getboolean("tag_only") - rsyncipv4 = section.getboolean("rsync_ipv4") - - if not name or not password or not phase == 1: - log.msg("invalid worker configuration ignored: {}".format(repr(section))) - return - - sl_props = { 'tag_only':tagonly } - if "dl_lock" in section: - lockname = section.get("dl_lock") - sl_props['dl_lock'] = lockname - if lockname not in NetLocks: - NetLocks[lockname] = locks.MasterLock(lockname) - if "ul_lock" in section: - lockname = section.get("ul_lock") - sl_props['ul_lock'] = lockname - if lockname not in NetLocks: - NetLocks[lockname] = locks.MasterLock(lockname) - if rsyncipv4: - sl_props['rsync_ipv4'] = True # only set prop if required, we use '+' Interpolate substitution - - log.msg("Configured worker: {}".format(name)) - # NB: phase1 build factory requires workers to be single-build only - c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props)) + name = section.get("name") + password = section.get("password") + phase = section.getint("phase") + tagonly = section.getboolean("tag_only") + rsyncipv4 = section.getboolean("rsync_ipv4") + + if not name or not password or not phase == 1: + log.msg("invalid worker configuration ignored: {}".format(repr(section))) + return + + sl_props = {"tag_only": tagonly} + if "dl_lock" in section: + lockname = section.get("dl_lock") + sl_props["dl_lock"] = lockname + if lockname not in NetLocks: + NetLocks[lockname] = locks.MasterLock(lockname) + if "ul_lock" in section: + lockname = section.get("ul_lock") + sl_props["ul_lock"] = lockname + if lockname not in NetLocks: + NetLocks[lockname] = locks.MasterLock(lockname) + if rsyncipv4: + sl_props[ + "rsync_ipv4" + ] = True # only set prop if required, we use '+' Interpolate substitution + + log.msg("Configured worker: {}".format(name)) + # NB: phase1 build factory requires workers to be single-build only + c["workers"].append(Worker(name, password, max_builds=1, properties=sl_props)) for section in ini.sections(): - if section.startswith("branch "): - ini_parse_branch(ini[section]) + if section.startswith("branch "): + ini_parse_branch(ini[section]) - if section.startswith("worker "): - ini_parse_workers(ini[section]) + if section.startswith("worker "): + ini_parse_workers(ini[section]) # list of branches in build-priority order branchNames = [branches[b]["name"] for b in branches] -c['protocols'] = {'pb': {'port': pb_port}} +c["protocols"] = {"pb": {"port": pb_port}} # coalesce builds -c['collapseRequests'] = True +c["collapseRequests"] = True # Reduce amount of backlog data -c['configurators'] = [util.JanitorConfigurator( - logHorizon=timedelta(days=3), - hour=6, -)] +c["configurators"] = [ + util.JanitorConfigurator( + logHorizon=timedelta(days=3), + hour=6, + ) +] + @defer.inlineCallbacks def getNewestCompleteTime(bldr): - """Returns the complete_at of the latest completed and not SKIPPED - build request for this builder, or None if there are no such build - requests. We need to filter out SKIPPED requests because we're - using collapseRequests=True which is unfortunately marking all - previous requests as complete when new buildset is created. - - @returns: datetime instance or None, via Deferred - """ - - bldrid = yield bldr.getBuilderId() - completed = yield bldr.master.data.get( - ('builders', bldrid, 'buildrequests'), - [ - resultspec.Filter('complete', 'eq', [True]), - resultspec.Filter('results', 'ne', [results.SKIPPED]), - ], - order=['-complete_at'], limit=1) - if not completed: - return - - complete_at = completed[0]['complete_at'] - - last_build = yield bldr.master.data.get( - ('builds', ), - [ - resultspec.Filter('builderid', 'eq', [bldrid]), - ], - order=['-started_at'], limit=1) - - if last_build and last_build[0]: - last_complete_at = last_build[0]['complete_at'] - if last_complete_at and (last_complete_at > complete_at): - return last_complete_at - - return complete_at + """Returns the complete_at of the latest completed and not SKIPPED + build request for this builder, or None if there are no such build + requests. We need to filter out SKIPPED requests because we're + using collapseRequests=True which is unfortunately marking all + previous requests as complete when new buildset is created. + + @returns: datetime instance or None, via Deferred + """ + + bldrid = yield bldr.getBuilderId() + completed = yield bldr.master.data.get( + ("builders", bldrid, "buildrequests"), + [ + resultspec.Filter("complete", "eq", [True]), + resultspec.Filter("results", "ne", [results.SKIPPED]), + ], + order=["-complete_at"], + limit=1, + ) + if not completed: + return + + complete_at = completed[0]["complete_at"] + + last_build = yield bldr.master.data.get( + ("builds",), + [ + resultspec.Filter("builderid", "eq", [bldrid]), + ], + order=["-started_at"], + limit=1, + ) + + if last_build and last_build[0]: + last_complete_at = last_build[0]["complete_at"] + if last_complete_at and (last_complete_at > complete_at): + return last_complete_at + + return complete_at + @defer.inlineCallbacks def prioritizeBuilders(master, builders): - """Returns sorted list of builders by their last timestamp of completed and - not skipped build, ordered first by branch name. + """Returns sorted list of builders by their last timestamp of completed and + not skipped build, ordered first by branch name. + + @returns: list of sorted builders + """ - @returns: list of sorted builders - """ + bldrNamePrio = {"__Janitor": 0, "00_force_build": 0} + i = 1 + for bname in branchNames: + bldrNamePrio[bname] = i + i += 1 - bldrNamePrio = { "__Janitor": 0, "00_force_build": 0 } - i = 1 - for bname in branchNames: - bldrNamePrio[bname] = i - i += 1 + def is_building(bldr): + return bool(bldr.building) or bool(bldr.old_building) - def is_building(bldr): - return bool(bldr.building) or bool(bldr.old_building) + def bldr_info(bldr): + d = defer.maybeDeferred(getNewestCompleteTime, bldr) + d.addCallback(lambda complete_at: (complete_at, bldr)) + return d - def bldr_info(bldr): - d = defer.maybeDeferred(getNewestCompleteTime, bldr) - d.addCallback(lambda complete_at: (complete_at, bldr)) - return d + def bldr_sort(item): + (complete_at, bldr) = item - def bldr_sort(item): - (complete_at, bldr) = item + pos = 99 + for name, prio in bldrNamePrio.items(): + if bldr.name.startswith(name): + pos = prio + break - pos = 99 - for (name, prio) in bldrNamePrio.items(): - if bldr.name.startswith(name): - pos = prio - break + if not complete_at: + date = datetime.min + complete_at = date.replace(tzinfo=tzutc()) - if not complete_at: - date = datetime.min - complete_at = date.replace(tzinfo=tzutc()) + if is_building(bldr): + date = datetime.max + complete_at = date.replace(tzinfo=tzutc()) - if is_building(bldr): - date = datetime.max - complete_at = date.replace(tzinfo=tzutc()) + return (pos, complete_at, bldr.name) - return (pos, complete_at, bldr.name) + results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders]) + results.sort(key=bldr_sort) - results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders]) - results.sort(key=bldr_sort) + # for r in results: + # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0])) - #for r in results: - # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0])) + return [r[1] for r in results] - return [r[1] for r in results] -c['prioritizeBuilders'] = prioritizeBuilders +c["prioritizeBuilders"] = prioritizeBuilders ####### CHANGESOURCES # find targets targets = set() + def populateTargets(): - """ fetch a shallow clone of each configured branch in turn: - execute dump-target-info.pl and collate the results to ensure - targets that only exist in specific branches get built. - This takes a while during master startup but is executed only once. - """ - log.msg("Populating targets, this will take time") - sourcegit = work_dir + '/source.git' - for branch in branchNames: - if os.path.isdir(sourcegit): - subprocess.call(["rm", "-rf", sourcegit]) - - subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit]) - - os.makedirs(sourcegit + '/tmp', exist_ok=True) - findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'], - stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit) - - while True: - line = findtargets.stdout.readline() - if not line: - break - ta = line.decode().strip().split(' ') - targets.add(ta[0]) - - subprocess.call(["rm", "-rf", sourcegit]) + """fetch a shallow clone of each configured branch in turn: + execute dump-target-info.pl and collate the results to ensure + targets that only exist in specific branches get built. + This takes a while during master startup but is executed only once. + """ + log.msg("Populating targets, this will take time") + sourcegit = work_dir + "/source.git" + for branch in branchNames: + if os.path.isdir(sourcegit): + subprocess.call(["rm", "-rf", sourcegit]) + + subprocess.call( + [ + "git", + "clone", + "-q", + "--depth=1", + "--branch=" + branch, + repo_url, + sourcegit, + ] + ) + + os.makedirs(sourcegit + "/tmp", exist_ok=True) + findtargets = subprocess.Popen( + ["./scripts/dump-target-info.pl", "targets"], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + cwd=sourcegit, + ) + + while True: + line = findtargets.stdout.readline() + if not line: + break + ta = line.decode().strip().split(" ") + targets.add(ta[0]) + + subprocess.call(["rm", "-rf", sourcegit]) + populateTargets() # the 'change_source' setting tells the buildmaster how it should find out # about source code changes. -c['change_source'] = [] -c['change_source'].append(GitPoller( - repo_url, - workdir=work_dir+'/work.git', branches=branchNames, - pollAtLaunch=True, pollinterval=300)) +c["change_source"] = [] +c["change_source"].append( + GitPoller( + repo_url, + workdir=work_dir + "/work.git", + branches=branchNames, + pollAtLaunch=True, + pollinterval=300, + ) +) ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. + # Selector for known valid tags class TagChoiceParameter(BaseParameter): - spec_attributes = ["strict", "choices"] - type = "list" - strict = True + spec_attributes = ["strict", "choices"] + type = "list" + strict = True + + def __init__(self, name, label=None, **kw): + super().__init__(name, label, **kw) + self._choice_list = [] - def __init__(self, name, label=None, **kw): - super().__init__(name, label, **kw) - self._choice_list = [] + def getRevTags(self, findtag=None): + taglist = [] + branchvers = [] - def getRevTags(self, findtag=None): - taglist = [] - branchvers = [] + # we will filter out tags that do no match the configured branches + for b in branchNames: + basever = re.search(r"-([0-9]+\.[0-9]+)$", b) + if basever: + branchvers.append(basever[1]) - # we will filter out tags that do no match the configured branches - for b in branchNames: - basever = re.search(r'-([0-9]+\.[0-9]+)$', b) - if basever: - branchvers.append(basever[1]) + # grab tags from remote repository + alltags = subprocess.Popen( + ["git", "ls-remote", "--tags", repo_url], stdout=subprocess.PIPE + ) - # grab tags from remote repository - alltags = subprocess.Popen( - ['git', 'ls-remote', '--tags', repo_url], - stdout = subprocess.PIPE) + while True: + line = alltags.stdout.readline() - while True: - line = alltags.stdout.readline() + if not line: + break - if not line: - break + (rev, tag) = line.split() - (rev, tag) = line.split() + # does it match known format? ('vNN.NN.NN(-rcN)') + tagver = re.search( + r"\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$", + tag.decode().strip(), + ) - # does it match known format? ('vNN.NN.NN(-rcN)') - tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip()) + # only list valid tags matching configured branches + if tagver and any(tagver[1][1:].startswith(b) for b in branchvers): + # if we want a specific tag, ignore all that don't match + if findtag and findtag != tagver[1]: + continue + taglist.append({"rev": rev.decode().strip(), "tag": tagver[1]}) - # only list valid tags matching configured branches - if tagver and any(tagver[1][1:].startswith(b) for b in branchvers): - # if we want a specific tag, ignore all that don't match - if findtag and findtag != tagver[1]: - continue - taglist.append({'rev': rev.decode().strip(), 'tag': tagver[1]}) + return taglist - return taglist + @property + def choices(self): + taglist = [rt["tag"] for rt in self.getRevTags()] + taglist.sort( + reverse=True, + key=lambda tag: tag if re.search(r"-rc[0-9]+$", tag) else tag + "-z", + ) + taglist.insert(0, "") - @property - def choices(self): - taglist = [rt['tag'] for rt in self.getRevTags()] - taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z') - taglist.insert(0, '') + self._choice_list = taglist - self._choice_list = taglist + return self._choice_list - return self._choice_list + def updateFromKwargs(self, properties, kwargs, **unused): + tag = self.getFromKwargs(kwargs) + properties[self.name] = tag - def updateFromKwargs(self, properties, kwargs, **unused): - tag = self.getFromKwargs(kwargs) - properties[self.name] = tag + # find the commit matching the tag + findtag = self.getRevTags(tag) - # find the commit matching the tag - findtag = self.getRevTags(tag) + if not findtag: + raise ValidationError("Couldn't find tag") - if not findtag: - raise ValidationError("Couldn't find tag") + properties["force_revision"] = findtag[0]["rev"] - properties['force_revision'] = findtag[0]['rev'] + # find the branch matching the tag + branch = None + branchver = re.search(r"v([0-9]+\.[0-9]+)", tag) + for b in branchNames: + if b.endswith(branchver[1]): + branch = b - # find the branch matching the tag - branch = None - branchver = re.search(r'v([0-9]+\.[0-9]+)', tag) - for b in branchNames: - if b.endswith(branchver[1]): - branch = b + if not branch: + raise ValidationError("Couldn't find branch") - if not branch: - raise ValidationError("Couldn't find branch") + properties["force_branch"] = branch - properties['force_branch'] = branch + def parse_from_arg(self, s): + if self.strict and s not in self._choice_list: + raise ValidationError( + "'%s' does not belong to list of available choices '%s'" + % (s, self._choice_list) + ) + return s - def parse_from_arg(self, s): - if self.strict and s not in self._choice_list: - raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list)) - return s @util.renderer @defer.inlineCallbacks def builderNames(props): - """ since we have per branch and per target builders, - address the relevant builder for each new buildrequest - based on the request's desired branch and target. - """ - branch = props.getProperty("branch") - target = props.getProperty("target", "") - - if target == "all": - target = "" - - # if that didn't work, try sourcestamp to find a branch - if not branch: - # match builders with target branch - ss = props.sourcestamps[0] - if ss: - branch = ss['branch'] - else: - log.msg("couldn't find builder") - return [] # nothing works - - bname = branch + "_" + target - builders = [] - - for b in (yield props.master.data.get(('builders',))): - if not b['name'].startswith(bname): - continue - builders.append(b['name']) - - return builders - -c['schedulers'] = [] -c['schedulers'].append(AnyBranchScheduler( - name = "all", - change_filter = util.ChangeFilter(branch=branchNames), - treeStableTimer = 15*60, - builderNames = builderNames)) - -c['schedulers'].append(ForceScheduler( - name = "force", - buttonName = "Force builds", - label = "Force build details", - builderNames = [ "00_force_build" ], - - codebases = [ - util.CodebaseParameter( - "", - label = "Repository", - branch = util.FixedParameter(name = "branch", default = ""), - revision = util.FixedParameter(name = "revision", default = ""), - repository = util.FixedParameter(name = "repository", default = ""), - project = util.FixedParameter(name = "project", default = "") - ) - ], - - reason = util.StringParameter( - name = "reason", - label = "Reason", - default = "Trigger build", - required = True, - size = 80 - ), - - properties = [ - # NB: avoid nesting to simplify processing of properties - util.ChoiceStringParameter( - name = "target", - label = "Build target", - default = "all", - choices = [ "all" ] + list(targets) - ), - TagChoiceParameter( - name = "tag", - label = "Build tag", - default = "" - ) - ] -)) - -c['schedulers'].append(schedulers.Triggerable(name="trigger", builderNames=builderNames)) + """since we have per branch and per target builders, + address the relevant builder for each new buildrequest + based on the request's desired branch and target. + """ + branch = props.getProperty("branch") + target = props.getProperty("target", "") + + if target == "all": + target = "" + + # if that didn't work, try sourcestamp to find a branch + if not branch: + # match builders with target branch + ss = props.sourcestamps[0] + if ss: + branch = ss["branch"] + else: + log.msg("couldn't find builder") + return [] # nothing works + + bname = branch + "_" + target + builders = [] + + for b in (yield props.master.data.get(("builders",))): + if not b["name"].startswith(bname): + continue + builders.append(b["name"]) + + return builders + + +c["schedulers"] = [] +c["schedulers"].append( + AnyBranchScheduler( + name="all", + change_filter=util.ChangeFilter(branch=branchNames), + treeStableTimer=15 * 60, + builderNames=builderNames, + ) +) + +c["schedulers"].append( + ForceScheduler( + name="force", + buttonName="Force builds", + label="Force build details", + builderNames=["00_force_build"], + codebases=[ + util.CodebaseParameter( + "", + label="Repository", + branch=util.FixedParameter(name="branch", default=""), + revision=util.FixedParameter(name="revision", default=""), + repository=util.FixedParameter(name="repository", default=""), + project=util.FixedParameter(name="project", default=""), + ) + ], + reason=util.StringParameter( + name="reason", + label="Reason", + default="Trigger build", + required=True, + size=80, + ), + properties=[ + # NB: avoid nesting to simplify processing of properties + util.ChoiceStringParameter( + name="target", + label="Build target", + default="all", + choices=["all"] + list(targets), + ), + TagChoiceParameter(name="tag", label="Build tag", default=""), + ], + ) +) + +c["schedulers"].append( + schedulers.Triggerable(name="trigger", builderNames=builderNames) +) ####### BUILDERS @@ -491,791 +538,1225 @@ c['schedulers'].append(schedulers.Triggerable(name="trigger", builderNames=build # what steps, and which workers can execute them. Note that any particular build will # only take place on one worker. + def IsNoMasterBuild(step): - return step.getProperty("branch") != "master" + return step.getProperty("branch") != "master" + def IsUsignEnabled(step): - branch = step.getProperty("branch") - return branch and branches[branch].get("usign_key") + branch = step.getProperty("branch") + return branch and branches[branch].get("usign_key") + def IsSignEnabled(step): - branch = step.getProperty("branch") - return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key") + branch = step.getProperty("branch") + return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key") + def IsKmodArchiveEnabled(step): - branch = step.getProperty("branch") - return branch and branches[branch].get("kmod_archive") + branch = step.getProperty("branch") + return branch and branches[branch].get("kmod_archive") + def IsKmodArchiveAndRsyncEnabled(step): - branch = step.getProperty("branch") - return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url")) + branch = step.getProperty("branch") + return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url")) + def GetBaseVersion(branch): - if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch): - return branch.split('-')[1] - else: - return "master" + if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch): + return branch.split("-")[1] + else: + return "master" + @properties.renderer def GetVersionPrefix(props): - branch = props.getProperty("branch") - basever = GetBaseVersion(branch) - if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]): - return "%s/" % props["tag"][1:] - elif basever != "master": - return "%s-SNAPSHOT/" % basever - else: - return "" + branch = props.getProperty("branch") + basever = GetBaseVersion(branch) + if props.hasProperty("tag") and re.match( + r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"] + ): + return "%s/" % props["tag"][1:] + elif basever != "master": + return "%s-SNAPSHOT/" % basever + else: + return "" + @util.renderer def GetConfigSeed(props): - branch = props.getProperty("branch") - return branch and branches[branch].get("config_seed") or "" + branch = props.getProperty("branch") + return branch and branches[branch].get("config_seed") or "" + @util.renderer def GetRsyncParams(props, srcorbin, urlorkey): - # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key' - branch = props.getProperty("branch") - opt = srcorbin + "_" + urlorkey - return branch and branches[branch].get(opt) + # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key' + branch = props.getProperty("branch") + opt = srcorbin + "_" + urlorkey + return branch and branches[branch].get(opt) + @util.renderer def GetUsignKey(props): - branch = props.getProperty("branch") - return branch and branches[branch].get("usign_key") + branch = props.getProperty("branch") + return branch and branches[branch].get("usign_key") + def GetNextBuild(builder, requests): - for r in requests: - if r.properties: - # order tagged build first - if r.properties.hasProperty("tag"): - return r + for r in requests: + if r.properties: + # order tagged build first + if r.properties.hasProperty("tag"): + return r + + r = requests[0] + # log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid)) + return r - r = requests[0] - #log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid)) - return r def MakeEnv(overrides=None, tryccache=False): - env = { - 'CCC': Interpolate("%(prop:cc_command:-gcc)s"), - 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"), - } - if tryccache: - env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh") - env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh") - env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s") - else: - env['CC'] = env['CCC'] - env['CXX'] = env['CCXX'] - env['CCACHE'] = '' - if overrides is not None: - env.update(overrides) - return env + env = { + "CCC": Interpolate("%(prop:cc_command:-gcc)s"), + "CCXX": Interpolate("%(prop:cxx_command:-g++)s"), + } + if tryccache: + env["CC"] = Interpolate("%(prop:builddir)s/ccache_cc.sh") + env["CXX"] = Interpolate("%(prop:builddir)s/ccache_cxx.sh") + env["CCACHE"] = Interpolate("%(prop:ccache_command:-)s") + else: + env["CC"] = env["CCC"] + env["CXX"] = env["CCXX"] + env["CCACHE"] = "" + if overrides is not None: + env.update(overrides) + return env + @properties.renderer def NetLockDl(props, extralock=None): - lock = None - if props.hasProperty("dl_lock"): - lock = NetLocks[props["dl_lock"]] - if lock is not None: - return [lock.access('exclusive')] - else: - return [] + lock = None + if props.hasProperty("dl_lock"): + lock = NetLocks[props["dl_lock"]] + if lock is not None: + return [lock.access("exclusive")] + else: + return [] + @properties.renderer def NetLockUl(props): - lock = None - if props.hasProperty("ul_lock"): - lock = NetLocks[props["ul_lock"]] - if lock is not None: - return [lock.access('exclusive')] - else: - return [] + lock = None + if props.hasProperty("ul_lock"): + lock = NetLocks[props["ul_lock"]] + if lock is not None: + return [lock.access("exclusive")] + else: + return [] + def IsTargetSelected(target): - def CheckTargetProperty(step): - selected_target = step.getProperty("target", "all") - if selected_target != "all" and selected_target != target: - return False - return True + def CheckTargetProperty(step): + selected_target = step.getProperty("target", "all") + if selected_target != "all" and selected_target != target: + return False + return True + + return CheckTargetProperty - return CheckTargetProperty @util.renderer def UsignSec2Pub(props): - branch = props.getProperty("branch") - try: - comment = branches[branch].get("usign_comment") or "untrusted comment: secret key" - seckey = branches[branch].get("usign_key") - seckey = base64.b64decode(seckey) - except Exception: - return None - - return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment), - base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:])) + branch = props.getProperty("branch") + try: + comment = ( + branches[branch].get("usign_comment") or "untrusted comment: secret key" + ) + seckey = branches[branch].get("usign_key") + seckey = base64.b64decode(seckey) + except Exception: + return None + + return "{}\n{}".format( + re.sub(r"\bsecret key$", "public key", comment), + base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]), + ) def canStartBuild(builder, wfb, request): - """ filter out non tag requests for tag_only workers. """ - wtagonly = wfb.worker.properties.getProperty('tag_only') - tag = request.properties.getProperty('tag') + """filter out non tag requests for tag_only workers.""" + wtagonly = wfb.worker.properties.getProperty("tag_only") + tag = request.properties.getProperty("tag") + + if wtagonly and not tag: + return False - if wtagonly and not tag: - return False + return True - return True -c['builders'] = [] +c["builders"] = [] -workerNames = [ ] +workerNames = [] -for worker in c['workers']: - workerNames.append(worker.workername) +for worker in c["workers"]: + workerNames.append(worker.workername) # add a single LocalWorker to handle the forcebuild builder -c['workers'].append(LocalWorker("__local_force_build", max_builds=1)) +c["workers"].append(LocalWorker("__local_force_build", max_builds=1)) force_factory = BuildFactory() -force_factory.addStep(steps.Trigger( - name = "trigger_build", - schedulerNames = [ "trigger" ], - sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }], - set_properties = { "reason": Property("reason"), "tag": Property("tag"), "target": Property("target") }, -)) - -c['builders'].append(BuilderConfig( - name = "00_force_build", - workername = "__local_force_build", - factory = force_factory)) +force_factory.addStep( + steps.Trigger( + name="trigger_build", + schedulerNames=["trigger"], + sourceStamps=[ + { + "codebase": "", + "branch": Property("force_branch"), + "revision": Property("force_revision"), + "repository": repo_url, + "project": "", + } + ], + set_properties={ + "reason": Property("reason"), + "tag": Property("tag"), + "target": Property("target"), + }, + ) +) + +c["builders"].append( + BuilderConfig( + name="00_force_build", workername="__local_force_build", factory=force_factory + ) +) # NB the phase1 build factory assumes workers are single-build only for target in targets: - ts = target.split('/') - - factory = BuildFactory() - - # setup shared work directory if required - factory.addStep(ShellCommand( - name = "sharedwd", - descriptionDone = "Shared work directory set up", - command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")', - workdir = ".", - haltOnFailure = True, - )) - - # find number of cores - factory.addStep(SetPropertyFromCommand( - name = "nproc", - property = "nproc", - description = "Finding number of CPUs", - command = ["nproc"], - )) - - # find gcc and g++ compilers - factory.addStep(FileDownload( - name = "dlfindbinpl", - mastersrc = scripts_dir + '/findbin.pl', - workerdest = "../findbin.pl", - mode = 0o755, - )) - - factory.addStep(SetPropertyFromCommand( - name = "gcc", - property = "cc_command", - description = "Finding gcc command", - command = ["../findbin.pl", "gcc", "", ""], - haltOnFailure = True, - )) - - factory.addStep(SetPropertyFromCommand( - name = "g++", - property = "cxx_command", - description = "Finding g++ command", - command = ["../findbin.pl", "g++", "", ""], - haltOnFailure = True, - )) - - # see if ccache is available - factory.addStep(SetPropertyFromCommand( - name = "ccache", - property = "ccache_command", - description = "Testing for ccache command", - command = ["which", "ccache"], - haltOnFailure = False, - flunkOnFailure = False, - warnOnFailure = False, - hideStepIf = lambda r, s: r==results.FAILURE, - )) - - # check out the source - # Git() runs: - # if repo doesn't exist: 'git clone repourl' - # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full' - # git cat-file -e - # git checkout -f - # git checkout -B - # git rev-parse HEAD - factory.addStep(Git( - name = "git", - repourl = repo_url, - mode = 'full', - method = 'fresh', - locks = NetLockDl, - haltOnFailure = True, - )) - - # workaround for https://github.com/openwrt/buildbot/issues/5 - factory.addStep(Git( - name = "git me once more please", - repourl = repo_url, - mode = 'full', - method = 'fresh', - locks = NetLockDl, - haltOnFailure = True, - )) - - # update remote refs - factory.addStep(ShellCommand( - name = "fetchrefs", - description = "Fetching Git remote refs", - descriptionDone = "Git remote refs fetched", - command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")], - haltOnFailure = True, - )) - - # getver.sh requires local branches to track upstream otherwise version computation fails. - # Git() does not set tracking branches when cloning or switching, so work around this here - factory.addStep(ShellCommand( - name = "trackupstream", - description = "Setting upstream branch", - descriptionDone = "getver.sh is happy now", - command = ["git", "branch", "-u", Interpolate("origin/%(prop:branch)s")], - haltOnFailure = True, - )) - - # Verify that Git HEAD points to a tag or branch - # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html - factory.addStep(ShellCommand( - name = "gitverify", - description = "Ensuring that Git HEAD is pointing to a branch or tag", - descriptionDone = "Git HEAD is sane", - command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."', - haltOnFailure = True, - )) - - factory.addStep(StringDownload( - name = "ccachecc", - s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n', - workerdest = "../ccache_cc.sh", - mode = 0o755, - )) - - factory.addStep(StringDownload( - name = "ccachecxx", - s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n', - workerdest = "../ccache_cxx.sh", - mode = 0o755, - )) - - # feed - factory.addStep(ShellCommand( - name = "updatefeeds", - description = "Updating feeds", - command=["./scripts/feeds", "update"], - env = MakeEnv(tryccache=True), - haltOnFailure = True, - locks = NetLockDl, - )) - - # feed - factory.addStep(ShellCommand( - name = "installfeeds", - description = "Installing feeds", - command=["./scripts/feeds", "install", "-a"], - env = MakeEnv(tryccache=True), - haltOnFailure = True, - )) - - # seed config - factory.addStep(StringDownload( - name = "dlconfigseed", - s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed), - workerdest = ".config", - mode = 0o644, - )) - - # configure - factory.addStep(ShellCommand( - name = "newconfig", - descriptionDone = ".config seeded", - command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey), - )) - - factory.addStep(ShellCommand( - name = "defconfig", - description = "Populating .config", - command = ["make", "defconfig"], - env = MakeEnv(), - )) - - # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget - factory.addStep(ShellCommand( - name = "checkarch", - description = "Checking architecture", - descriptionDone = "Architecture validated", - command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]), - logEnviron = False, - want_stdout = False, - want_stderr = False, - haltOnFailure = True, - flunkOnFailure = False, # this is not a build FAILURE - TODO mark build as SKIPPED - )) - - # find libc suffix - factory.addStep(SetPropertyFromCommand( - name = "libc", - property = "libc", - description = "Finding libc suffix", - command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"], - )) - - # install build key - factory.addStep(StringDownload( - name = "dlkeybuildpub", - s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub), - workerdest = "key-build.pub", - mode = 0o600, - doStepIf = IsUsignEnabled, - )) - - factory.addStep(StringDownload( - name = "dlkeybuild", - s = "# fake private key", - workerdest = "key-build", - mode = 0o600, - doStepIf = IsUsignEnabled, - )) - - factory.addStep(StringDownload( - name = "dlkeybuilducert", - s = "# fake certificate", - workerdest = "key-build.ucert", - mode = 0o600, - doStepIf = IsUsignEnabled, - )) - - # prepare dl - factory.addStep(ShellCommand( - name = "dldir", - description = "Preparing dl/", - descriptionDone = "dl/ prepared", - command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"', - workdir = Property("builddir"), - logEnviron = False, - want_stdout = False, - )) - - # cleanup dl - factory.addStep(ShellCommand( - name = "dlprune", - description = "Pruning dl/", - descriptionDone = "dl/ pruned", - command = 'find dl/ -mindepth 1 -atime +15 -delete -print', - logEnviron = False, - )) - - # prepare tar - factory.addStep(ShellCommand( - name = "dltar", - description = "Building and installing GNU tar", - descriptionDone = "GNU tar built and installed", - command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"], - env = MakeEnv(tryccache=True), - haltOnFailure = True, - )) - - # populate dl - factory.addStep(ShellCommand( - name = "dlrun", - description = "Populating dl/", - descriptionDone = "dl/ populated", - command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"], - env = MakeEnv(), - logEnviron = False, - locks = NetLockDl, - )) - - factory.addStep(ShellCommand( - name = "cleanbase", - description = "Cleaning base-files", - command=["make", "package/base-files/clean", "V=s"], - )) - - # build - factory.addStep(ShellCommand( - name = "tools", - description = "Building and installing tools", - descriptionDone = "Tools built and installed", - command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"], - env = MakeEnv(tryccache=True), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "toolchain", - description = "Building and installing toolchain", - descriptionDone = "Toolchain built and installed", - command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"], - env = MakeEnv(), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "kmods", - description = "Building kmods", - descriptionDone = "Kmods built", - command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"], - env = MakeEnv(), - haltOnFailure = True, - )) - - # find kernel version - factory.addStep(SetPropertyFromCommand( - name = "kernelversion", - property = "kernelversion", - description = "Finding the effective Kernel version", - command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'", - env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }, - )) - - factory.addStep(ShellCommand( - name = "pkgclean", - description = "Cleaning up package build", - descriptionDone = "Package build cleaned up", - command=["make", "package/cleanup", "V=s"], - )) - - factory.addStep(ShellCommand( - name = "pkgbuild", - description = "Building packages", - descriptionDone = "Packages built", - command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"], - env = MakeEnv(), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "pkginstall", - description = "Installing packages", - descriptionDone = "Packages installed", - command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"], - env = MakeEnv(), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "pkgindex", - description = "Indexing packages", - descriptionDone = "Packages indexed", - command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="], - env = MakeEnv(), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "images", - description = "Building and installing images", - descriptionDone = "Images built and installed", - command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"], - env = MakeEnv(), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "buildinfo", - description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo", - command = "make -j1 buildinfo V=s || true", - env = MakeEnv(), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "json_overview_image_info", - description = "Generating profiles.json in target folder", - command = "make -j1 json_overview_image_info V=s || true", - env = MakeEnv(), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "checksums", - description = "Calculating checksums", - descriptionDone = "Checksums calculated", - command=["make", "-j1", "checksum", "V=s"], - env = MakeEnv(), - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "kmoddir", - descriptionDone = "Kmod directory created", - command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])], - haltOnFailure = True, - doStepIf = IsKmodArchiveEnabled, - )) - - factory.addStep(ShellCommand( - name = "kmodprepare", - description = "Preparing kmod archive", - descriptionDone = "Kmod archive prepared", - command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va", - Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]), - Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])], - haltOnFailure = True, - doStepIf = IsKmodArchiveEnabled, - )) - - factory.addStep(ShellCommand( - name = "kmodindex", - description = "Indexing kmod archive", - descriptionDone = "Kmod archive indexed", - command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=", - Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])], - env = MakeEnv(), - haltOnFailure = True, - doStepIf = IsKmodArchiveEnabled, - )) - - # sign - factory.addStep(MasterShellCommand( - name = "signprepare", - descriptionDone = "Temporary signing directory prepared", - command = ["mkdir", "-p", "%s/signing" %(work_dir)], - haltOnFailure = True, - doStepIf = IsSignEnabled, - - )) - - factory.addStep(ShellCommand( - name = "signpack", - description = "Packing files to sign", - descriptionDone = "Files to sign packed", - command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]), - haltOnFailure = True, - doStepIf = IsSignEnabled, - )) - - factory.addStep(FileUpload( - workersrc = "sign.tar.gz", - masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), - haltOnFailure = True, - doStepIf = IsSignEnabled, - )) - - factory.addStep(MasterShellCommand( - name = "signfiles", - description = "Signing files", - descriptionDone = "Files signed", - command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")], - env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") }, - haltOnFailure = True, - doStepIf = IsSignEnabled, - )) - - factory.addStep(FileDownload( - name = "dlsigntargz", - mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), - workerdest = "sign.tar.gz", - haltOnFailure = True, - doStepIf = IsSignEnabled, - )) - - factory.addStep(ShellCommand( - name = "signunpack", - description = "Unpacking signed files", - descriptionDone = "Signed files unpacked", - command = ["tar", "-xzf", "sign.tar.gz"], - haltOnFailure = True, - doStepIf = IsSignEnabled, - )) - - # upload - factory.addStep(ShellCommand( - name = "dirprepare", - descriptionDone = "Upload directory structure prepared", - command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)], - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "linkprepare", - descriptionDone = "Repository symlink prepared", - command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)], - doStepIf = IsNoMasterBuild, - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "kmoddirprepare", - descriptionDone = "Kmod archive upload directory prepared", - command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)], - haltOnFailure = True, - doStepIf = IsKmodArchiveEnabled, - )) - - factory.addStep(ShellCommand( - name = "dirupload", - description = "Uploading directory structure", - descriptionDone = "Directory structure uploaded", - command = ["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))], - env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) }, - haltOnFailure = True, - logEnviron = False, - locks = NetLockUl, - doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")), - )) - - # download remote sha256sums to 'target-sha256sums' - factory.addStep(ShellCommand( - name = "target-sha256sums", - description = "Fetching remote sha256sums for target", - descriptionDone = "Remote sha256sums for target fetched", - command = ["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"], - env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) }, - logEnviron = False, - haltOnFailure = False, - flunkOnFailure = False, - warnOnFailure = False, - doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")), - )) - - # build list of files to upload - factory.addStep(FileDownload( - name = "dlsha2rsyncpl", - mastersrc = scripts_dir + '/sha2rsync.pl', - workerdest = "../sha2rsync.pl", - mode = 0o755, - )) - - factory.addStep(ShellCommand( - name = "buildlist", - description = "Building list of files to upload", - descriptionDone = "List of files to upload built", - command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"], - haltOnFailure = True, - )) - - factory.addStep(FileDownload( - name = "dlrsync.sh", - mastersrc = scripts_dir + '/rsync.sh', - workerdest = "../rsync.sh", - mode = 0o755, - )) - - # upload new files and update existing ones - factory.addStep(ShellCommand( - name = "targetupload", - description = "Uploading target files", - descriptionDone = "Target files uploaded", - command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts + - [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]), - Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)], - env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) }, - haltOnFailure = True, - logEnviron = False, - doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")), - )) - - # delete files which don't exist locally - factory.addStep(ShellCommand( - name = "targetprune", - description = "Pruning target files", - descriptionDone = "Target files pruned", - command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts + - [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]), - Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)], - env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) }, - haltOnFailure = True, - logEnviron = False, - locks = NetLockUl, - doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")), - )) - - factory.addStep(ShellCommand( - name = "kmodupload", - description = "Uploading kmod archive", - descriptionDone = "Kmod archive uploaded", - command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts + - [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]), - Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)], - env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) }, - haltOnFailure = True, - logEnviron = False, - locks = NetLockUl, - doStepIf = IsKmodArchiveAndRsyncEnabled, - )) - - factory.addStep(ShellCommand( - name = "sourcelist", - description = "Finding source archives to upload", - descriptionDone = "Source archives to upload found", - command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist", - haltOnFailure = True, - )) - - factory.addStep(ShellCommand( - name = "sourceupload", - description = "Uploading source archives", - descriptionDone = "Source archives uploaded", - command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts + - [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), Interpolate("-a%(prop:rsync_ipv4:+4)s"), "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))], - env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) }, - haltOnFailure = True, - logEnviron = False, - locks = NetLockUl, - doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")), - )) - - factory.addStep(ShellCommand( - name = "df", - description = "Reporting disk usage", - command=["df", "-h", "."], - env={'LC_ALL': 'C'}, - logEnviron = False, - haltOnFailure = False, - flunkOnFailure = False, - warnOnFailure = False, - alwaysRun = True, - )) - - factory.addStep(ShellCommand( - name = "du", - description = "Reporting estimated file space usage", - command=["du", "-sh", "."], - env={'LC_ALL': 'C'}, - logEnviron = False, - haltOnFailure = False, - flunkOnFailure = False, - warnOnFailure = False, - alwaysRun = True, - )) - - factory.addStep(ShellCommand( - name = "ccachestat", - description = "Reporting ccache stats", - command=["ccache", "-s"], - logEnviron = False, - want_stderr = False, - haltOnFailure = False, - flunkOnFailure = False, - warnOnFailure = False, - doStepIf = util.Transform(bool, Property("ccache_command")), - )) - - for brname in branchNames: - bldrname = brname + "_" + target - c['builders'].append(BuilderConfig(name=bldrname, workernames=workerNames, factory=factory, tags=[brname,], nextBuild=GetNextBuild, canStartBuild=canStartBuild)) + ts = target.split("/") + + factory = BuildFactory() + + # setup shared work directory if required + factory.addStep( + ShellCommand( + name="sharedwd", + descriptionDone="Shared work directory set up", + command='test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")', + workdir=".", + haltOnFailure=True, + ) + ) + + # find number of cores + factory.addStep( + SetPropertyFromCommand( + name="nproc", + property="nproc", + description="Finding number of CPUs", + command=["nproc"], + ) + ) + + # find gcc and g++ compilers + factory.addStep( + FileDownload( + name="dlfindbinpl", + mastersrc=scripts_dir + "/findbin.pl", + workerdest="../findbin.pl", + mode=0o755, + ) + ) + + factory.addStep( + SetPropertyFromCommand( + name="gcc", + property="cc_command", + description="Finding gcc command", + command=["../findbin.pl", "gcc", "", ""], + haltOnFailure=True, + ) + ) + + factory.addStep( + SetPropertyFromCommand( + name="g++", + property="cxx_command", + description="Finding g++ command", + command=["../findbin.pl", "g++", "", ""], + haltOnFailure=True, + ) + ) + + # see if ccache is available + factory.addStep( + SetPropertyFromCommand( + name="ccache", + property="ccache_command", + description="Testing for ccache command", + command=["which", "ccache"], + haltOnFailure=False, + flunkOnFailure=False, + warnOnFailure=False, + hideStepIf=lambda r, s: r == results.FAILURE, + ) + ) + + # check out the source + # Git() runs: + # if repo doesn't exist: 'git clone repourl' + # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full' + # git cat-file -e + # git checkout -f + # git checkout -B + # git rev-parse HEAD + factory.addStep( + Git( + name="git", + repourl=repo_url, + mode="full", + method="fresh", + locks=NetLockDl, + haltOnFailure=True, + ) + ) + + # workaround for https://github.com/openwrt/buildbot/issues/5 + factory.addStep( + Git( + name="git me once more please", + repourl=repo_url, + mode="full", + method="fresh", + locks=NetLockDl, + haltOnFailure=True, + ) + ) + + # update remote refs + factory.addStep( + ShellCommand( + name="fetchrefs", + description="Fetching Git remote refs", + descriptionDone="Git remote refs fetched", + command=[ + "git", + "fetch", + "origin", + Interpolate( + "+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s" + ), + ], + haltOnFailure=True, + ) + ) + + # getver.sh requires local branches to track upstream otherwise version computation fails. + # Git() does not set tracking branches when cloning or switching, so work around this here + factory.addStep( + ShellCommand( + name="trackupstream", + description="Setting upstream branch", + descriptionDone="getver.sh is happy now", + command=["git", "branch", "-u", Interpolate("origin/%(prop:branch)s")], + haltOnFailure=True, + ) + ) + + # Verify that Git HEAD points to a tag or branch + # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html + factory.addStep( + ShellCommand( + name="gitverify", + description="Ensuring that Git HEAD is pointing to a branch or tag", + descriptionDone="Git HEAD is sane", + command='git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."', + haltOnFailure=True, + ) + ) + + factory.addStep( + StringDownload( + name="ccachecc", + s='#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n', + workerdest="../ccache_cc.sh", + mode=0o755, + ) + ) + + factory.addStep( + StringDownload( + name="ccachecxx", + s='#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n', + workerdest="../ccache_cxx.sh", + mode=0o755, + ) + ) + + # feed + factory.addStep( + ShellCommand( + name="updatefeeds", + description="Updating feeds", + command=["./scripts/feeds", "update"], + env=MakeEnv(tryccache=True), + haltOnFailure=True, + locks=NetLockDl, + ) + ) + + # feed + factory.addStep( + ShellCommand( + name="installfeeds", + description="Installing feeds", + command=["./scripts/feeds", "install", "-a"], + env=MakeEnv(tryccache=True), + haltOnFailure=True, + ) + ) + + # seed config + factory.addStep( + StringDownload( + name="dlconfigseed", + s=Interpolate("%(kw:seed)s\n", seed=GetConfigSeed), + workerdest=".config", + mode=0o644, + ) + ) + + # configure + factory.addStep( + ShellCommand( + name="newconfig", + descriptionDone=".config seeded", + command=Interpolate( + "printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", + target=ts[0], + subtarget=ts[1], + usign=GetUsignKey, + ), + ) + ) + + factory.addStep( + ShellCommand( + name="defconfig", + description="Populating .config", + command=["make", "defconfig"], + env=MakeEnv(), + ) + ) + + # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget + factory.addStep( + ShellCommand( + name="checkarch", + description="Checking architecture", + descriptionDone="Architecture validated", + command='grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' + % (ts[0], ts[1]), + logEnviron=False, + want_stdout=False, + want_stderr=False, + haltOnFailure=True, + flunkOnFailure=False, # this is not a build FAILURE - TODO mark build as SKIPPED + ) + ) + + # find libc suffix + factory.addStep( + SetPropertyFromCommand( + name="libc", + property="libc", + description="Finding libc suffix", + command=[ + "sed", + "-ne", + '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', + ".config", + ], + ) + ) + + # install build key + factory.addStep( + StringDownload( + name="dlkeybuildpub", + s=Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub), + workerdest="key-build.pub", + mode=0o600, + doStepIf=IsUsignEnabled, + ) + ) + + factory.addStep( + StringDownload( + name="dlkeybuild", + s="# fake private key", + workerdest="key-build", + mode=0o600, + doStepIf=IsUsignEnabled, + ) + ) + + factory.addStep( + StringDownload( + name="dlkeybuilducert", + s="# fake certificate", + workerdest="key-build.ucert", + mode=0o600, + doStepIf=IsUsignEnabled, + ) + ) + + # prepare dl + factory.addStep( + ShellCommand( + name="dldir", + description="Preparing dl/", + descriptionDone="dl/ prepared", + command='mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"', + workdir=Property("builddir"), + logEnviron=False, + want_stdout=False, + ) + ) + + # cleanup dl + factory.addStep( + ShellCommand( + name="dlprune", + description="Pruning dl/", + descriptionDone="dl/ pruned", + command="find dl/ -mindepth 1 -atime +15 -delete -print", + logEnviron=False, + ) + ) + + # prepare tar + factory.addStep( + ShellCommand( + name="dltar", + description="Building and installing GNU tar", + descriptionDone="GNU tar built and installed", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "tools/tar/compile", + "V=s", + ], + env=MakeEnv(tryccache=True), + haltOnFailure=True, + ) + ) + + # populate dl + factory.addStep( + ShellCommand( + name="dlrun", + description="Populating dl/", + descriptionDone="dl/ populated", + command=["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"], + env=MakeEnv(), + logEnviron=False, + locks=NetLockDl, + ) + ) + + factory.addStep( + ShellCommand( + name="cleanbase", + description="Cleaning base-files", + command=["make", "package/base-files/clean", "V=s"], + ) + ) + + # build + factory.addStep( + ShellCommand( + name="tools", + description="Building and installing tools", + descriptionDone="Tools built and installed", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "tools/install", + "V=s", + ], + env=MakeEnv(tryccache=True), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="toolchain", + description="Building and installing toolchain", + descriptionDone="Toolchain built and installed", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "toolchain/install", + "V=s", + ], + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="kmods", + description="Building kmods", + descriptionDone="Kmods built", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "target/compile", + "V=s", + "IGNORE_ERRORS=n m", + "BUILD_LOG=1", + ], + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + # find kernel version + factory.addStep( + SetPropertyFromCommand( + name="kernelversion", + property="kernelversion", + description="Finding the effective Kernel version", + command="make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'", + env={"TOPDIR": Interpolate("%(prop:builddir)s/build")}, + ) + ) + + factory.addStep( + ShellCommand( + name="pkgclean", + description="Cleaning up package build", + descriptionDone="Package build cleaned up", + command=["make", "package/cleanup", "V=s"], + ) + ) + + factory.addStep( + ShellCommand( + name="pkgbuild", + description="Building packages", + descriptionDone="Packages built", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "package/compile", + "V=s", + "IGNORE_ERRORS=n m", + "BUILD_LOG=1", + ], + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="pkginstall", + description="Installing packages", + descriptionDone="Packages installed", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "package/install", + "V=s", + ], + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="pkgindex", + description="Indexing packages", + descriptionDone="Packages indexed", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "package/index", + "V=s", + "CONFIG_SIGNED_PACKAGES=", + ], + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="images", + description="Building and installing images", + descriptionDone="Images built and installed", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "target/install", + "V=s", + ], + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="buildinfo", + description="Generating config.buildinfo, version.buildinfo and feeds.buildinfo", + command="make -j1 buildinfo V=s || true", + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="json_overview_image_info", + description="Generating profiles.json in target folder", + command="make -j1 json_overview_image_info V=s || true", + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="checksums", + description="Calculating checksums", + descriptionDone="Checksums calculated", + command=["make", "-j1", "checksum", "V=s"], + env=MakeEnv(), + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="kmoddir", + descriptionDone="Kmod directory created", + command=[ + "mkdir", + "-p", + Interpolate( + "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", + target=ts[0], + subtarget=ts[1], + ), + ], + haltOnFailure=True, + doStepIf=IsKmodArchiveEnabled, + ) + ) + + factory.addStep( + ShellCommand( + name="kmodprepare", + description="Preparing kmod archive", + descriptionDone="Kmod archive prepared", + command=[ + "rsync", + "--include=/kmod-*.ipk", + "--exclude=*", + "-va", + Interpolate( + "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", + target=ts[0], + subtarget=ts[1], + ), + Interpolate( + "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", + target=ts[0], + subtarget=ts[1], + ), + ], + haltOnFailure=True, + doStepIf=IsKmodArchiveEnabled, + ) + ) + + factory.addStep( + ShellCommand( + name="kmodindex", + description="Indexing kmod archive", + descriptionDone="Kmod archive indexed", + command=[ + "make", + Interpolate("-j%(prop:nproc:-1)s"), + "package/index", + "V=s", + "CONFIG_SIGNED_PACKAGES=", + Interpolate( + "PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", + target=ts[0], + subtarget=ts[1], + ), + ], + env=MakeEnv(), + haltOnFailure=True, + doStepIf=IsKmodArchiveEnabled, + ) + ) + + # sign + factory.addStep( + MasterShellCommand( + name="signprepare", + descriptionDone="Temporary signing directory prepared", + command=["mkdir", "-p", "%s/signing" % (work_dir)], + haltOnFailure=True, + doStepIf=IsSignEnabled, + ) + ) + + factory.addStep( + ShellCommand( + name="signpack", + description="Packing files to sign", + descriptionDone="Files to sign packed", + command=Interpolate( + "find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", + target=ts[0], + subtarget=ts[1], + ), + haltOnFailure=True, + doStepIf=IsSignEnabled, + ) + ) + + factory.addStep( + FileUpload( + workersrc="sign.tar.gz", + masterdest="%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]), + haltOnFailure=True, + doStepIf=IsSignEnabled, + ) + ) + + factory.addStep( + MasterShellCommand( + name="signfiles", + description="Signing files", + descriptionDone="Files signed", + command=[ + "%s/signall.sh" % (scripts_dir), + "%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]), + Interpolate("%(prop:branch)s"), + ], + env={"CONFIG_INI": os.getenv("BUILDMASTER_CONFIG", "./config.ini")}, + haltOnFailure=True, + doStepIf=IsSignEnabled, + ) + ) + + factory.addStep( + FileDownload( + name="dlsigntargz", + mastersrc="%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]), + workerdest="sign.tar.gz", + haltOnFailure=True, + doStepIf=IsSignEnabled, + ) + ) + + factory.addStep( + ShellCommand( + name="signunpack", + description="Unpacking signed files", + descriptionDone="Signed files unpacked", + command=["tar", "-xzf", "sign.tar.gz"], + haltOnFailure=True, + doStepIf=IsSignEnabled, + ) + ) + + # upload + factory.addStep( + ShellCommand( + name="dirprepare", + descriptionDone="Upload directory structure prepared", + command=[ + "mkdir", + "-p", + Interpolate( + "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", + target=ts[0], + subtarget=ts[1], + prefix=GetVersionPrefix, + ), + ], + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="linkprepare", + descriptionDone="Repository symlink prepared", + command=[ + "ln", + "-s", + "-f", + Interpolate( + "../packages-%(kw:basever)s", + basever=util.Transform(GetBaseVersion, Property("branch")), + ), + Interpolate( + "tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix + ), + ], + doStepIf=IsNoMasterBuild, + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="kmoddirprepare", + descriptionDone="Kmod archive upload directory prepared", + command=[ + "mkdir", + "-p", + Interpolate( + "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", + target=ts[0], + subtarget=ts[1], + prefix=GetVersionPrefix, + ), + ], + haltOnFailure=True, + doStepIf=IsKmodArchiveEnabled, + ) + ) + + factory.addStep( + ShellCommand( + name="dirupload", + description="Uploading directory structure", + descriptionDone="Directory structure uploaded", + command=["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")] + + rsync_defopts + + [ + "tmp/upload/", + Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url")), + ], + env={ + "RSYNC_PASSWORD": Interpolate( + "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key") + ) + }, + haltOnFailure=True, + logEnviron=False, + locks=NetLockUl, + doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")), + ) + ) + + # download remote sha256sums to 'target-sha256sums' + factory.addStep( + ShellCommand( + name="target-sha256sums", + description="Fetching remote sha256sums for target", + descriptionDone="Remote sha256sums for target fetched", + command=["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")] + + rsync_defopts + + [ + Interpolate( + "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", + url=GetRsyncParams.withArgs("bin", "url"), + target=ts[0], + subtarget=ts[1], + prefix=GetVersionPrefix, + ), + "target-sha256sums", + ], + env={ + "RSYNC_PASSWORD": Interpolate( + "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key") + ) + }, + logEnviron=False, + haltOnFailure=False, + flunkOnFailure=False, + warnOnFailure=False, + doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")), + ) + ) + + # build list of files to upload + factory.addStep( + FileDownload( + name="dlsha2rsyncpl", + mastersrc=scripts_dir + "/sha2rsync.pl", + workerdest="../sha2rsync.pl", + mode=0o755, + ) + ) + + factory.addStep( + ShellCommand( + name="buildlist", + description="Building list of files to upload", + descriptionDone="List of files to upload built", + command=[ + "../sha2rsync.pl", + "target-sha256sums", + Interpolate( + "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", + target=ts[0], + subtarget=ts[1], + ), + "rsynclist", + ], + haltOnFailure=True, + ) + ) + + factory.addStep( + FileDownload( + name="dlrsync.sh", + mastersrc=scripts_dir + "/rsync.sh", + workerdest="../rsync.sh", + mode=0o755, + ) + ) + + # upload new files and update existing ones + factory.addStep( + ShellCommand( + name="targetupload", + description="Uploading target files", + descriptionDone="Target files uploaded", + command=[ + "../rsync.sh", + "--exclude=/kmods/", + "--files-from=rsynclist", + "--delay-updates", + "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]), + ] + + rsync_defopts + + [ + Interpolate("-a%(prop:rsync_ipv4:+4)s"), + Interpolate( + "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", + target=ts[0], + subtarget=ts[1], + ), + Interpolate( + "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", + url=GetRsyncParams.withArgs("bin", "url"), + target=ts[0], + subtarget=ts[1], + prefix=GetVersionPrefix, + ), + ], + env={ + "RSYNC_PASSWORD": Interpolate( + "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key") + ) + }, + haltOnFailure=True, + logEnviron=False, + doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")), + ) + ) + + # delete files which don't exist locally + factory.addStep( + ShellCommand( + name="targetprune", + description="Pruning target files", + descriptionDone="Target files pruned", + command=[ + "../rsync.sh", + "--exclude=/kmods/", + "--delete", + "--existing", + "--ignore-existing", + "--delay-updates", + "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]), + ] + + rsync_defopts + + [ + Interpolate("-a%(prop:rsync_ipv4:+4)s"), + Interpolate( + "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", + target=ts[0], + subtarget=ts[1], + ), + Interpolate( + "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", + url=GetRsyncParams.withArgs("bin", "url"), + target=ts[0], + subtarget=ts[1], + prefix=GetVersionPrefix, + ), + ], + env={ + "RSYNC_PASSWORD": Interpolate( + "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key") + ) + }, + haltOnFailure=True, + logEnviron=False, + locks=NetLockUl, + doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")), + ) + ) + + factory.addStep( + ShellCommand( + name="kmodupload", + description="Uploading kmod archive", + descriptionDone="Kmod archive uploaded", + command=[ + "../rsync.sh", + "--delete", + "--delay-updates", + "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]), + ] + + rsync_defopts + + [ + Interpolate("-a%(prop:rsync_ipv4:+4)s"), + Interpolate( + "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", + target=ts[0], + subtarget=ts[1], + ), + Interpolate( + "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", + url=GetRsyncParams.withArgs("bin", "url"), + target=ts[0], + subtarget=ts[1], + prefix=GetVersionPrefix, + ), + ], + env={ + "RSYNC_PASSWORD": Interpolate( + "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key") + ) + }, + haltOnFailure=True, + logEnviron=False, + locks=NetLockUl, + doStepIf=IsKmodArchiveAndRsyncEnabled, + ) + ) + + factory.addStep( + ShellCommand( + name="sourcelist", + description="Finding source archives to upload", + descriptionDone="Source archives to upload found", + command="find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist", + haltOnFailure=True, + ) + ) + + factory.addStep( + ShellCommand( + name="sourceupload", + description="Uploading source archives", + descriptionDone="Source archives uploaded", + command=[ + "../rsync.sh", + "--files-from=sourcelist", + "--size-only", + "--delay-updates", + ] + + rsync_defopts + + [ + Interpolate( + "--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", + target=ts[0], + subtarget=ts[1], + ), + Interpolate("-a%(prop:rsync_ipv4:+4)s"), + "dl/", + Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url")), + ], + env={ + "RSYNC_PASSWORD": Interpolate( + "%(kw:key)s", key=GetRsyncParams.withArgs("src", "key") + ) + }, + haltOnFailure=True, + logEnviron=False, + locks=NetLockUl, + doStepIf=util.Transform(bool, GetRsyncParams.withArgs("src", "url")), + ) + ) + + factory.addStep( + ShellCommand( + name="df", + description="Reporting disk usage", + command=["df", "-h", "."], + env={"LC_ALL": "C"}, + logEnviron=False, + haltOnFailure=False, + flunkOnFailure=False, + warnOnFailure=False, + alwaysRun=True, + ) + ) + + factory.addStep( + ShellCommand( + name="du", + description="Reporting estimated file space usage", + command=["du", "-sh", "."], + env={"LC_ALL": "C"}, + logEnviron=False, + haltOnFailure=False, + flunkOnFailure=False, + warnOnFailure=False, + alwaysRun=True, + ) + ) + + factory.addStep( + ShellCommand( + name="ccachestat", + description="Reporting ccache stats", + command=["ccache", "-s"], + logEnviron=False, + want_stderr=False, + haltOnFailure=False, + flunkOnFailure=False, + warnOnFailure=False, + doStepIf=util.Transform(bool, Property("ccache_command")), + ) + ) + + for brname in branchNames: + bldrname = brname + "_" + target + c["builders"].append( + BuilderConfig( + name=bldrname, + workernames=workerNames, + factory=factory, + tags=[ + brname, + ], + nextBuild=GetNextBuild, + canStartBuild=canStartBuild, + ) + ) ####### STATUS TARGETS @@ -1285,54 +1766,56 @@ for target in targets: # including web pages, email senders, and IRC bots. if "status_bind" in inip1: - c['www'] = { - 'port': inip1.get("status_bind"), - 'plugins': { - 'waterfall_view': True, - 'console_view': True, - 'grid_view': True - } - } - - if "status_user" in inip1 and "status_password" in inip1: - c['www']['auth'] = util.UserPasswordAuth([ - (inip1.get("status_user"), inip1.get("status_password")) - ]) - c['www']['authz'] = util.Authz( - allowRules=[ util.AnyControlEndpointMatcher(role="admins") ], - roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ] - ) - -c['services'] = [] + c["www"] = { + "port": inip1.get("status_bind"), + "plugins": {"waterfall_view": True, "console_view": True, "grid_view": True}, + } + + if "status_user" in inip1 and "status_password" in inip1: + c["www"]["auth"] = util.UserPasswordAuth( + [(inip1.get("status_user"), inip1.get("status_password"))] + ) + c["www"]["authz"] = util.Authz( + allowRules=[util.AnyControlEndpointMatcher(role="admins")], + roleMatchers=[ + util.RolesFromUsername( + roles=["admins"], usernames=[inip1.get("status_user")] + ) + ], + ) + +c["services"] = [] if ini.has_section("irc"): - iniirc = ini['irc'] - irc_host = iniirc.get("host", None) - irc_port = iniirc.getint("port", 6667) - irc_chan = iniirc.get("channel", None) - irc_nick = iniirc.get("nickname", None) - irc_pass = iniirc.get("password", None) - - if irc_host and irc_nick and irc_chan: - irc = reporters.IRC(irc_host, irc_nick, - port = irc_port, - password = irc_pass, - channels = [ irc_chan ], - notify_events = [ 'exception', 'problem', 'recovery' ] - ) - - c['services'].append(irc) - -c['revlink'] = util.RevlinkMatch([ - r'https://git.openwrt.org/openwrt/(.*).git' - ], - r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s') + iniirc = ini["irc"] + irc_host = iniirc.get("host", None) + irc_port = iniirc.getint("port", 6667) + irc_chan = iniirc.get("channel", None) + irc_nick = iniirc.get("nickname", None) + irc_pass = iniirc.get("password", None) + + if irc_host and irc_nick and irc_chan: + irc = reporters.IRC( + irc_host, + irc_nick, + port=irc_port, + password=irc_pass, + channels=[irc_chan], + notify_events=["exception", "problem", "recovery"], + ) + + c["services"].append(irc) + +c["revlink"] = util.RevlinkMatch( + [r"https://git.openwrt.org/openwrt/(.*).git"], + r"https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s", +) ####### DB URL -c['db'] = { - # This specifies what database buildbot uses to store its state. You can leave - # this at its default for all but the largest installations. - 'db_url' : "sqlite:///state.sqlite", +c["db"] = { + # This specifies what database buildbot uses to store its state. You can leave + # this at its default for all but the largest installations. + "db_url": "sqlite:///state.sqlite", } -c['buildbotNetUsageData'] = None +c["buildbotNetUsageData"] = None -- 2.30.2