70e2162b52570e67269692b4f74bb383728fa54d
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41
42
43 if not os.path.exists("twistd.pid"):
44 with open("twistd.pid", "w") as pidfile:
45 pidfile.write("{}".format(os.getpid()))
46
47 # This is a sample buildmaster config file. It must be installed as
48 # 'master.cfg' in your buildmaster's base directory.
49
50 ini = configparser.ConfigParser()
51 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
52
53 if "general" not in ini or "phase1" not in ini:
54 raise ValueError("Fix your configuration")
55
56 inip1 = ini['phase1']
57
58 # Globals
59 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
60 scripts_dir = os.path.abspath("../scripts")
61
62 repo_url = ini['repo'].get("url")
63
64 rsync_defopts = ["-v", "-4", "--timeout=120"]
65
66 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
67 # rsync_bin_defopts += ["--contimeout=20"]
68
69 branches = {}
70
71 def ini_parse_branch(section):
72 b = {}
73 name = section.get("name")
74
75 if not name:
76 raise ValueError("missing 'name' in " + repr(section))
77 if name in branches:
78 raise ValueError("duplicate branch name in " + repr(section))
79
80 b["name"] = name
81 b["bin_url"] = section.get("binary_url")
82 b["bin_key"] = section.get("binary_password")
83
84 b["src_url"] = section.get("source_url")
85 b["src_key"] = section.get("source_password")
86
87 b["gpg_key"] = section.get("gpg_key")
88
89 b["usign_key"] = section.get("usign_key")
90 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
91 b["usign_comment"] = section.get("usign_comment", usign_comment)
92
93 b["config_seed"] = section.get("config_seed")
94
95 b["kmod_archive"] = section.getboolean("kmod_archive", False)
96
97 branches[name] = b
98 log.msg("Configured branch: {}".format(name))
99
100 # PB port can be either a numeric port or a connection string
101 pb_port = inip1.get("port") or 9989
102
103 # This is the dictionary that the buildmaster pays attention to. We also use
104 # a shorter alias to save typing.
105 c = BuildmasterConfig = {}
106
107 ####### PROJECT IDENTITY
108
109 # the 'title' string will appear at the top of this buildbot
110 # installation's html.WebStatus home page (linked to the
111 # 'titleURL') and is embedded in the title of the waterfall HTML page.
112
113 c['title'] = ini['general'].get("title")
114 c['titleURL'] = ini['general'].get("title_url")
115
116 # the 'buildbotURL' string should point to the location where the buildbot's
117 # internal web server (usually the html.WebStatus page) is visible. This
118 # typically uses the port number set in the Waterfall 'status' entry, but
119 # with an externally-visible host name which the buildbot cannot figure out
120 # without some help.
121
122 c['buildbotURL'] = inip1.get("buildbot_url")
123
124 ####### BUILDWORKERS
125
126 # The 'workers' list defines the set of recognized buildworkers. Each element is
127 # a Worker object, specifying a unique worker name and password. The same
128 # worker name and password must be configured on the worker.
129
130 c['workers'] = []
131 NetLocks = dict()
132
133 for section in ini.sections():
134 if section.startswith("branch "):
135 ini_parse_branch(ini[section])
136
137 if section.startswith("worker "):
138 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
139 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
140 sl_props = { 'dl_lock':None, 'ul_lock':None }
141 name = ini.get(section, "name")
142 password = ini.get(section, "password")
143 if ini.has_option(section, "dl_lock"):
144 lockname = ini.get(section, "dl_lock")
145 sl_props['dl_lock'] = lockname
146 if lockname not in NetLocks:
147 NetLocks[lockname] = locks.MasterLock(lockname)
148 if ini.has_option(section, "ul_lock"):
149 lockname = ini.get(section, "ul_lock")
150 sl_props['ul_lock'] = lockname
151 if lockname not in NetLocks:
152 NetLocks[lockname] = locks.MasterLock(lockname)
153 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
154
155 c['protocols'] = {'pb': {'port': pb_port}}
156
157 # coalesce builds
158 c['collapseRequests'] = True
159
160 # Reduce amount of backlog data
161 c['configurators'] = [util.JanitorConfigurator(
162 logHorizon=timedelta(days=3),
163 hour=6,
164 )]
165
166 @defer.inlineCallbacks
167 def getNewestCompleteTime(bldr):
168 """Returns the complete_at of the latest completed and not SKIPPED
169 build request for this builder, or None if there are no such build
170 requests. We need to filter out SKIPPED requests because we're
171 using collapseRequests=True which is unfortunately marking all
172 previous requests as complete when new buildset is created.
173
174 @returns: datetime instance or None, via Deferred
175 """
176
177 bldrid = yield bldr.getBuilderId()
178 completed = yield bldr.master.data.get(
179 ('builders', bldrid, 'buildrequests'),
180 [
181 resultspec.Filter('complete', 'eq', [True]),
182 resultspec.Filter('results', 'ne', [results.SKIPPED]),
183 ],
184 order=['-complete_at'], limit=1)
185 if not completed:
186 return
187
188 complete_at = completed[0]['complete_at']
189
190 last_build = yield bldr.master.data.get(
191 ('builds', ),
192 [
193 resultspec.Filter('builderid', 'eq', [bldrid]),
194 ],
195 order=['-started_at'], limit=1)
196
197 if last_build and last_build[0]:
198 last_complete_at = last_build[0]['complete_at']
199 if last_complete_at and (last_complete_at > complete_at):
200 return last_complete_at
201
202 return complete_at
203
204 @defer.inlineCallbacks
205 def prioritizeBuilders(master, builders):
206 """Returns sorted list of builders by their last timestamp of completed and
207 not skipped build.
208
209 @returns: list of sorted builders
210 """
211
212 def is_building(bldr):
213 return bool(bldr.building) or bool(bldr.old_building)
214
215 def bldr_info(bldr):
216 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
217 d.addCallback(lambda complete_at: (complete_at, bldr))
218 return d
219
220 def bldr_sort(item):
221 (complete_at, bldr) = item
222
223 if not complete_at:
224 date = datetime.min
225 complete_at = date.replace(tzinfo=tzutc())
226
227 if is_building(bldr):
228 date = datetime.max
229 complete_at = date.replace(tzinfo=tzutc())
230
231 return (complete_at, bldr.name)
232
233 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
234 results.sort(key=bldr_sort)
235
236 for r in results:
237 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
238
239 return [r[1] for r in results]
240
241 c['prioritizeBuilders'] = prioritizeBuilders
242
243 ####### CHANGESOURCES
244
245 branchNames = [branches[b]["name"] for b in branches]
246
247 # find targets
248 targets = set()
249
250 def populateTargets():
251 log.msg("Populating targets, this will take time")
252 sourcegit = work_dir + '/source.git'
253 for branch in branchNames:
254 if os.path.isdir(sourcegit):
255 subprocess.call(["rm", "-rf", sourcegit])
256
257 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
258
259 os.makedirs(sourcegit + '/tmp', exist_ok=True)
260 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
261 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
262
263 while True:
264 line = findtargets.stdout.readline()
265 if not line:
266 break
267 ta = line.decode().strip().split(' ')
268 targets.add(ta[0])
269
270 subprocess.call(["rm", "-rf", sourcegit])
271
272 populateTargets()
273
274 # the 'change_source' setting tells the buildmaster how it should find out
275 # about source code changes. Here we point to the buildbot clone of pyflakes.
276
277 c['change_source'] = []
278 c['change_source'].append(GitPoller(
279 repo_url,
280 workdir=work_dir+'/work.git', branches=branchNames,
281 pollAtLaunch=True, pollinterval=300))
282
283 ####### SCHEDULERS
284
285 # Configure the Schedulers, which decide how to react to incoming changes. In this
286 # case, just kick off a 'basebuild' build
287
288 class TagChoiceParameter(BaseParameter):
289 spec_attributes = ["strict", "choices"]
290 type = "list"
291 strict = True
292
293 def __init__(self, name, label=None, **kw):
294 super().__init__(name, label, **kw)
295 self._choice_list = []
296
297 @property
298 def choices(self):
299 taglist = []
300 basever = re.search(r'-([0-9]+\.[0-9]+)$', "master") # XXX FIXME
301
302 if basever:
303 findtags = subprocess.Popen(
304 ['git', 'ls-remote', '--tags', repo_url],
305 stdout = subprocess.PIPE)
306
307 while True:
308 line = findtags.stdout.readline()
309
310 if not line:
311 break
312
313 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
314
315 if tagver and tagver[1].find(basever[1]) == 0:
316 taglist.append(tagver[1])
317
318 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
319 taglist.insert(0, '')
320
321 self._choice_list = taglist
322
323 return self._choice_list
324
325 def parse_from_arg(self, s):
326 if self.strict and s not in self._choice_list:
327 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
328 return s
329
330 c['schedulers'] = []
331 c['schedulers'].append(AnyBranchScheduler(
332 name = "all",
333 change_filter = util.ChangeFilter(branch=branchNames),
334 treeStableTimer = 15*60,
335 builderNames = list(targets)))
336
337 c['schedulers'].append(ForceScheduler(
338 name = "force",
339 buttonName = "Force builds",
340 label = "Force build details",
341 builderNames = [ "00_force_build" ],
342
343 codebases = [
344 util.CodebaseParameter(
345 "",
346 label = "Repository",
347 branch = util.FixedParameter(name = "branch", default = ""),
348 revision = util.FixedParameter(name = "revision", default = ""),
349 repository = util.FixedParameter(name = "repository", default = ""),
350 project = util.FixedParameter(name = "project", default = "")
351 )
352 ],
353
354 reason = util.StringParameter(
355 name = "reason",
356 label = "Reason",
357 default = "Trigger build",
358 required = True,
359 size = 80
360 ),
361
362 properties = [
363 util.NestedParameter(
364 name="options",
365 label="Build Options",
366 layout="vertical",
367 fields=[
368 util.ChoiceStringParameter(
369 name = "target",
370 label = "Build target",
371 default = "all",
372 choices = set( "all" ) | targets
373 ),
374 TagChoiceParameter(
375 name = "tag",
376 label = "Build tag",
377 default = ""
378 )
379 ]
380 )
381 ]
382 ))
383
384 ####### BUILDERS
385
386 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
387 # what steps, and which workers can execute them. Note that any particular build will
388 # only take place on one worker.
389
390 def IsTaggingRequested(step):
391 tag = step.getProperty("tag")
392 return tag and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", tag)
393
394 def IsNoMasterBuild(step):
395 return step.getProperty("branch") != "master"
396
397 def IsUsignEnabled(step):
398 branch = step.getProperty("branch")
399 return branch and branches[branch].get("usign_key")
400
401 def IsSignEnabled(step):
402 branch = step.getProperty("branch")
403 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
404
405 def IsKmodArchiveEnabled(step):
406 branch = step.getProperty("branch")
407 return branch and branches[branch].get("kmod_archive")
408
409 def GetBaseVersion(branch):
410 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
411 return branch.split('-')[1]
412 else:
413 return "master"
414
415 @properties.renderer
416 def GetVersionPrefix(props):
417 branch = props.getProperty("branch")
418 basever = GetBaseVersion(branch)
419 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
420 return "%s/" % props["tag"]
421 elif basever != "master":
422 return "%s-SNAPSHOT/" % basever
423 else:
424 return ""
425
426 @util.renderer
427 def GetConfigSeed(props):
428 branch = props.getProperty("branch")
429 return branch and branches[branch].get("config_seed") or ""
430
431 @util.renderer
432 def GetRsyncParams(props, srcorbin, urlorkey):
433 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
434 branch = props.getProperty("branch")
435 opt = srcorbin + "_" + urlorkey
436 return branch and branches[branch].get(opt)
437
438 @util.renderer
439 def GetUsignKey(props):
440 branch = props.getProperty("branch")
441 return branch and branches[branch].get("usign_key")
442
443 def GetNextBuild(builder, requests):
444 for r in requests:
445 if r.properties:
446 # order tagged build first
447 if r.properties.hasProperty("tag"):
448 return r
449 # then order by branch order
450 pbranch = r.properties.getProperty("branch")
451 for name in branchNames:
452 if pbranch == name:
453 return r
454
455 r = requests[0]
456 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
457 return r
458
459 def MakeEnv(overrides=None, tryccache=False):
460 env = {
461 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
462 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
463 }
464 if tryccache:
465 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
466 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
467 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
468 else:
469 env['CC'] = env['CCC']
470 env['CXX'] = env['CCXX']
471 env['CCACHE'] = ''
472 if overrides is not None:
473 env.update(overrides)
474 return env
475
476 @properties.renderer
477 def NetLockDl(props, extralock=None):
478 lock = None
479 locks = []
480 if props.hasProperty("dl_lock"):
481 lock = NetLocks[props["dl_lock"]]
482 if lock is not None:
483 locks.append(lock.access('exclusive'))
484 if extralock is not None:
485 locks.append(extralock)
486 return locks
487
488 @properties.renderer
489 def NetLockUl(props):
490 lock = None
491 if props.hasProperty("ul_lock"):
492 lock = NetLocks[props["ul_lock"]]
493 if lock is not None:
494 return [lock.access('exclusive')]
495 else:
496 return []
497
498 @util.renderer
499 def TagPropertyValue(props):
500 if props.hasProperty("options"):
501 options = props.getProperty("options")
502 if type(options) is dict:
503 return options.get("tag")
504 return None
505
506 def IsTargetSelected(target):
507 def CheckTargetProperty(step):
508 try:
509 options = step.getProperty("options")
510 if type(options) is dict:
511 selected_target = options.get("target", "all")
512 if selected_target != "all" and selected_target != target:
513 return False
514 except KeyError:
515 pass
516
517 return True
518
519 return CheckTargetProperty
520
521 @util.renderer
522 def UsignSec2Pub(props):
523 branch = props.getProperty("branch")
524 try:
525 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
526 seckey = branches[branch].get("usign_key")
527 seckey = base64.b64decode(seckey)
528 except:
529 return None
530
531 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
532 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
533
534
535 c['builders'] = []
536
537 dlLock = locks.WorkerLock("worker_dl")
538
539 workerNames = [ ]
540
541 for worker in c['workers']:
542 workerNames.append(worker.workername)
543
544 force_factory = BuildFactory()
545
546 c['builders'].append(BuilderConfig(
547 name = "00_force_build",
548 workernames = workerNames,
549 factory = force_factory))
550
551 for target in targets:
552 ts = target.split('/')
553
554 factory = BuildFactory()
555
556 # setup shared work directory if required
557 factory.addStep(ShellCommand(
558 name = "sharedwd",
559 descriptionDone = "Shared work directory set up",
560 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
561 workdir = ".",
562 haltOnFailure = True,
563 ))
564
565 # find number of cores
566 factory.addStep(SetPropertyFromCommand(
567 name = "nproc",
568 property = "nproc",
569 description = "Finding number of CPUs",
570 command = ["nproc"],
571 ))
572
573 # find gcc and g++ compilers
574 factory.addStep(FileDownload(
575 name = "dlfindbinpl",
576 mastersrc = scripts_dir + '/findbin.pl',
577 workerdest = "../findbin.pl",
578 mode = 0o755,
579 ))
580
581 factory.addStep(SetPropertyFromCommand(
582 name = "gcc",
583 property = "cc_command",
584 description = "Finding gcc command",
585 command = ["../findbin.pl", "gcc", "", ""],
586 haltOnFailure = True,
587 ))
588
589 factory.addStep(SetPropertyFromCommand(
590 name = "g++",
591 property = "cxx_command",
592 description = "Finding g++ command",
593 command = ["../findbin.pl", "g++", "", ""],
594 haltOnFailure = True,
595 ))
596
597 # see if ccache is available
598 factory.addStep(SetPropertyFromCommand(
599 name = "ccache",
600 property = "ccache_command",
601 description = "Testing for ccache command",
602 command = ["which", "ccache"],
603 haltOnFailure = False,
604 flunkOnFailure = False,
605 warnOnFailure = False,
606 hideStepIf = lambda r, s: r==results.FAILURE,
607 ))
608
609 # check out the source
610 # Git() runs:
611 # if repo doesn't exist: 'git clone repourl'
612 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
613 # git cat-file -e <commit>
614 # git checkout -f <commit>
615 # git checkout -B <branch>
616 # git rev-parse HEAD
617 factory.addStep(Git(
618 name = "git",
619 repourl = repo_url,
620 mode = 'full',
621 method = 'fresh',
622 locks = NetLockDl,
623 haltOnFailure = True,
624 ))
625
626 # update remote refs
627 factory.addStep(ShellCommand(
628 name = "fetchrefs",
629 description = "Fetching Git remote refs",
630 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
631 haltOnFailure = True,
632 ))
633
634 # switch to tag
635 factory.addStep(ShellCommand(
636 name = "switchtag",
637 description = "Checking out Git tag",
638 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
639 haltOnFailure = True,
640 doStepIf = IsTaggingRequested
641 ))
642
643 # Verify that Git HEAD points to a tag or branch
644 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
645 factory.addStep(ShellCommand(
646 name = "gitverify",
647 description = "Ensure that Git HEAD is pointing to a branch or tag",
648 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
649 haltOnFailure = True,
650 ))
651
652 factory.addStep(ShellCommand(
653 name = "rmtmp",
654 description = "Remove tmp folder",
655 command=["rm", "-rf", "tmp/"],
656 ))
657
658 # feed
659 factory.addStep(ShellCommand(
660 name = "rmfeedlinks",
661 description = "Remove feed symlinks",
662 command=["rm", "-rf", "package/feeds/"],
663 ))
664
665 factory.addStep(StringDownload(
666 name = "ccachecc",
667 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
668 workerdest = "../ccache_cc.sh",
669 mode = 0o755,
670 ))
671
672 factory.addStep(StringDownload(
673 name = "ccachecxx",
674 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
675 workerdest = "../ccache_cxx.sh",
676 mode = 0o755,
677 ))
678
679 # feed
680 factory.addStep(ShellCommand(
681 name = "updatefeeds",
682 description = "Updating feeds",
683 command=["./scripts/feeds", "update"],
684 env = MakeEnv(tryccache=True),
685 haltOnFailure = True,
686 locks = NetLockDl,
687 ))
688
689 # feed
690 factory.addStep(ShellCommand(
691 name = "installfeeds",
692 description = "Installing feeds",
693 command=["./scripts/feeds", "install", "-a"],
694 env = MakeEnv(tryccache=True),
695 haltOnFailure = True,
696 ))
697
698 # seed config
699 factory.addStep(StringDownload(
700 name = "dlconfigseed",
701 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
702 workerdest = ".config",
703 mode = 0o644,
704 ))
705
706 # configure
707 factory.addStep(ShellCommand(
708 name = "newconfig",
709 descriptionDone = ".config seeded",
710 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
711 ))
712
713 factory.addStep(ShellCommand(
714 name = "delbin",
715 description = "Removing output directory",
716 command = ["rm", "-rf", "bin/"],
717 ))
718
719 factory.addStep(ShellCommand(
720 name = "defconfig",
721 description = "Populating .config",
722 command = ["make", "defconfig"],
723 env = MakeEnv(),
724 ))
725
726 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
727 factory.addStep(ShellCommand(
728 name = "checkarch",
729 description = "Checking architecture",
730 descriptionDone = "Architecture validated",
731 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
732 logEnviron = False,
733 want_stdout = False,
734 want_stderr = False,
735 haltOnFailure = True,
736 flunkOnFailure = False, # this is not a build FAILURE
737 ))
738
739 # find libc suffix
740 factory.addStep(SetPropertyFromCommand(
741 name = "libc",
742 property = "libc",
743 description = "Finding libc suffix",
744 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
745 ))
746
747 # install build key
748 factory.addStep(StringDownload(
749 name = "dlkeybuildpub",
750 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
751 workerdest = "key-build.pub",
752 mode = 0o600,
753 doStepIf = IsUsignEnabled,
754 ))
755
756 factory.addStep(StringDownload(
757 name = "dlkeybuild",
758 s = "# fake private key",
759 workerdest = "key-build",
760 mode = 0o600,
761 doStepIf = IsUsignEnabled,
762 ))
763
764 factory.addStep(StringDownload(
765 name = "dlkeybuilducert",
766 s = "# fake certificate",
767 workerdest = "key-build.ucert",
768 mode = 0o600,
769 doStepIf = IsUsignEnabled,
770 ))
771
772 # prepare dl
773 factory.addStep(ShellCommand(
774 name = "dldir",
775 description = "Preparing dl/",
776 descriptionDone = "dl/ prepared",
777 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
778 workdir = Property("builddir"),
779 logEnviron = False,
780 want_stdout = False,
781 ))
782
783 # prepare tar
784 factory.addStep(ShellCommand(
785 name = "dltar",
786 description = "Building and installing GNU tar",
787 descriptionDone = "GNU tar built and installed",
788 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
789 env = MakeEnv(tryccache=True),
790 haltOnFailure = True,
791 ))
792
793 # populate dl
794 factory.addStep(ShellCommand(
795 name = "dlrun",
796 description = "Populating dl/",
797 descriptionDone = "dl/ populated",
798 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
799 env = MakeEnv(),
800 logEnviron = False,
801 locks = NetLockDl.withArgs(dlLock.access('exclusive')),
802 ))
803
804 factory.addStep(ShellCommand(
805 name = "cleanbase",
806 description = "Cleaning base-files",
807 command=["make", "package/base-files/clean", "V=s"],
808 ))
809
810 # build
811 factory.addStep(ShellCommand(
812 name = "tools",
813 description = "Building and installing tools",
814 descriptionDone = "Tools built and installed",
815 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
816 env = MakeEnv(tryccache=True),
817 haltOnFailure = True,
818 ))
819
820 factory.addStep(ShellCommand(
821 name = "toolchain",
822 description = "Building and installing toolchain",
823 descriptionDone = "Toolchain built and installed",
824 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
825 env = MakeEnv(),
826 haltOnFailure = True,
827 ))
828
829 factory.addStep(ShellCommand(
830 name = "kmods",
831 description = "Building kmods",
832 descriptionDone = "Kmods built",
833 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
834 env = MakeEnv(),
835 haltOnFailure = True,
836 ))
837
838 # find kernel version
839 factory.addStep(SetPropertyFromCommand(
840 name = "kernelversion",
841 property = "kernelversion",
842 description = "Finding the effective Kernel version",
843 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
844 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
845 ))
846
847 factory.addStep(ShellCommand(
848 name = "pkgclean",
849 description = "Cleaning up package build",
850 descriptionDone = "Package build cleaned up",
851 command=["make", "package/cleanup", "V=s"],
852 ))
853
854 factory.addStep(ShellCommand(
855 name = "pkgbuild",
856 description = "Building packages",
857 descriptionDone = "Packages built",
858 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
859 env = MakeEnv(),
860 haltOnFailure = True,
861 ))
862
863 factory.addStep(ShellCommand(
864 name = "pkginstall",
865 description = "Installing packages",
866 descriptionDone = "Packages installed",
867 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
868 env = MakeEnv(),
869 haltOnFailure = True,
870 ))
871
872 factory.addStep(ShellCommand(
873 name = "pkgindex",
874 description = "Indexing packages",
875 descriptionDone = "Packages indexed",
876 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
877 env = MakeEnv(),
878 haltOnFailure = True,
879 ))
880
881 factory.addStep(ShellCommand(
882 name = "images",
883 description = "Building and installing images",
884 descriptionDone = "Images built and installed",
885 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
886 env = MakeEnv(),
887 haltOnFailure = True,
888 ))
889
890 factory.addStep(ShellCommand(
891 name = "buildinfo",
892 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
893 command = "make -j1 buildinfo V=s || true",
894 env = MakeEnv(),
895 haltOnFailure = True,
896 ))
897
898 factory.addStep(ShellCommand(
899 name = "json_overview_image_info",
900 description = "Generating profiles.json in target folder",
901 command = "make -j1 json_overview_image_info V=s || true",
902 env = MakeEnv(),
903 haltOnFailure = True,
904 ))
905
906 factory.addStep(ShellCommand(
907 name = "checksums",
908 description = "Calculating checksums",
909 descriptionDone = "Checksums calculated",
910 command=["make", "-j1", "checksum", "V=s"],
911 env = MakeEnv(),
912 haltOnFailure = True,
913 ))
914
915 factory.addStep(ShellCommand(
916 name = "kmoddir",
917 descriptionDone = "Kmod directory created",
918 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
919 haltOnFailure = True,
920 doStepIf = IsKmodArchiveEnabled,
921 ))
922
923 factory.addStep(ShellCommand(
924 name = "kmodprepare",
925 description = "Preparing kmod archive",
926 descriptionDone = "Kmod archive prepared",
927 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
928 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
929 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
930 haltOnFailure = True,
931 doStepIf = IsKmodArchiveEnabled,
932 ))
933
934 factory.addStep(ShellCommand(
935 name = "kmodindex",
936 description = "Indexing kmod archive",
937 descriptionDone = "Kmod archive indexed",
938 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
939 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
940 env = MakeEnv(),
941 haltOnFailure = True,
942 doStepIf = IsKmodArchiveEnabled,
943 ))
944
945 # sign
946 factory.addStep(MasterShellCommand(
947 name = "signprepare",
948 descriptionDone = "Temporary signing directory prepared",
949 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
950 haltOnFailure = True,
951 doStepIf = IsSignEnabled,
952
953 ))
954
955 factory.addStep(ShellCommand(
956 name = "signpack",
957 description = "Packing files to sign",
958 descriptionDone = "Files to sign packed",
959 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
960 haltOnFailure = True,
961 doStepIf = IsSignEnabled,
962 ))
963
964 factory.addStep(FileUpload(
965 workersrc = "sign.tar.gz",
966 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
967 haltOnFailure = True,
968 doStepIf = IsSignEnabled,
969 ))
970
971 factory.addStep(MasterShellCommand(
972 name = "signfiles",
973 description = "Signing files",
974 descriptionDone = "Files signed",
975 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
976 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
977 haltOnFailure = True,
978 doStepIf = IsSignEnabled,
979 ))
980
981 factory.addStep(FileDownload(
982 name = "dlsigntargz",
983 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
984 workerdest = "sign.tar.gz",
985 haltOnFailure = True,
986 doStepIf = IsSignEnabled,
987 ))
988
989 factory.addStep(ShellCommand(
990 name = "signunpack",
991 description = "Unpacking signed files",
992 descriptionDone = "Signed files unpacked",
993 command = ["tar", "-xzf", "sign.tar.gz"],
994 haltOnFailure = True,
995 doStepIf = IsSignEnabled,
996 ))
997
998 # upload
999 factory.addStep(ShellCommand(
1000 name = "dirprepare",
1001 descriptionDone = "Upload directory structure prepared",
1002 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1003 haltOnFailure = True,
1004 ))
1005
1006 factory.addStep(ShellCommand(
1007 name = "linkprepare",
1008 descriptionDone = "Repository symlink prepared",
1009 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1010 doStepIf = IsNoMasterBuild,
1011 haltOnFailure = True,
1012 ))
1013
1014 factory.addStep(ShellCommand(
1015 name = "kmoddirprepare",
1016 descriptionDone = "Kmod archive upload directory prepared",
1017 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1018 haltOnFailure = True,
1019 doStepIf = IsKmodArchiveEnabled,
1020 ))
1021
1022 factory.addStep(ShellCommand(
1023 name = "dirupload",
1024 description = "Uploading directory structure",
1025 descriptionDone = "Directory structure uploaded",
1026 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1027 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1028 haltOnFailure = True,
1029 logEnviron = False,
1030 locks = NetLockUl,
1031 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1032 ))
1033
1034 # download remote sha256sums to 'target-sha256sums'
1035 factory.addStep(ShellCommand(
1036 name = "target-sha256sums",
1037 description = "Fetching remote sha256sums for target",
1038 descriptionDone = "Remote sha256sums for target fetched",
1039 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1040 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1041 logEnviron = False,
1042 haltOnFailure = False,
1043 flunkOnFailure = False,
1044 warnOnFailure = False,
1045 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1046 ))
1047
1048 # build list of files to upload
1049 factory.addStep(FileDownload(
1050 name = "dlsha2rsyncpl",
1051 mastersrc = scripts_dir + '/sha2rsync.pl',
1052 workerdest = "../sha2rsync.pl",
1053 mode = 0o755,
1054 ))
1055
1056 factory.addStep(ShellCommand(
1057 name = "buildlist",
1058 description = "Building list of files to upload",
1059 descriptionDone = "List of files to upload built",
1060 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1061 haltOnFailure = True,
1062 ))
1063
1064 factory.addStep(FileDownload(
1065 name = "dlrsync.sh",
1066 mastersrc = scripts_dir + '/rsync.sh',
1067 workerdest = "../rsync.sh",
1068 mode = 0o755,
1069 ))
1070
1071 # upload new files and update existing ones
1072 factory.addStep(ShellCommand(
1073 name = "targetupload",
1074 description = "Uploading target files",
1075 descriptionDone = "Target files uploaded",
1076 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1077 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1078 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1079 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1080 haltOnFailure = True,
1081 logEnviron = False,
1082 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1083 ))
1084
1085 # delete files which don't exist locally
1086 factory.addStep(ShellCommand(
1087 name = "targetprune",
1088 description = "Pruning target files",
1089 descriptionDone = "Target files pruned",
1090 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1091 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1092 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1093 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1094 haltOnFailure = True,
1095 logEnviron = False,
1096 locks = NetLockUl,
1097 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1098 ))
1099
1100 factory.addStep(ShellCommand(
1101 name = "kmodupload",
1102 description = "Uploading kmod archive",
1103 descriptionDone = "Kmod archive uploaded",
1104 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1105 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1106 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1107 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1108 haltOnFailure = True,
1109 logEnviron = False,
1110 locks = NetLockUl,
1111 doStepIf = util.Transform(lambda a, b: bool(a and b), IsKmodArchiveEnabled, GetRsyncParams.withArgs("bin", "url")),
1112 ))
1113
1114 factory.addStep(ShellCommand(
1115 name = "sourcelist",
1116 description = "Finding source archives to upload",
1117 descriptionDone = "Source archives to upload found",
1118 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1119 haltOnFailure = True,
1120 ))
1121
1122 factory.addStep(ShellCommand(
1123 name = "sourceupload",
1124 description = "Uploading source archives",
1125 descriptionDone = "Source archives uploaded",
1126 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1127 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1128 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1129 haltOnFailure = True,
1130 logEnviron = False,
1131 locks = NetLockUl,
1132 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1133 ))
1134
1135 factory.addStep(ShellCommand(
1136 name = "df",
1137 description = "Reporting disk usage",
1138 command=["df", "-h", "."],
1139 env={'LC_ALL': 'C'},
1140 logEnviron = False,
1141 haltOnFailure = False,
1142 flunkOnFailure = False,
1143 warnOnFailure = False,
1144 alwaysRun = True,
1145 ))
1146
1147 factory.addStep(ShellCommand(
1148 name = "du",
1149 description = "Reporting estimated file space usage",
1150 command=["du", "-sh", "."],
1151 env={'LC_ALL': 'C'},
1152 logEnviron = False,
1153 haltOnFailure = False,
1154 flunkOnFailure = False,
1155 warnOnFailure = False,
1156 alwaysRun = True,
1157 ))
1158
1159 factory.addStep(ShellCommand(
1160 name = "ccachestat",
1161 description = "Reporting ccache stats",
1162 command=["ccache", "-s"],
1163 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1164 logEnviron = False,
1165 want_stderr = False,
1166 haltOnFailure = False,
1167 flunkOnFailure = False,
1168 warnOnFailure = False,
1169 hideStepIf = lambda r, s: r==results.FAILURE,
1170 ))
1171
1172 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1173
1174 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1175 force_factory.addStep(steps.Trigger(
1176 name = "trigger_%s" % target,
1177 description = "Triggering %s build" % target,
1178 schedulerNames = [ "trigger_%s" % target ],
1179 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1180 doStepIf = IsTargetSelected(target),
1181 ))
1182
1183
1184 ####### STATUS TARGETS
1185
1186 # 'status' is a list of Status Targets. The results of each build will be
1187 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1188 # including web pages, email senders, and IRC bots.
1189
1190 if "status_bind" in inip1:
1191 c['www'] = {
1192 'port': inip1.get("status_bind"),
1193 'plugins': {
1194 'waterfall_view': True,
1195 'console_view': True,
1196 'grid_view': True
1197 }
1198 }
1199
1200 if "status_user" in inip1 and "status_password" in inip1:
1201 c['www']['auth'] = util.UserPasswordAuth([
1202 (inip1.get("status_user"), inip1.get("status_password"))
1203 ])
1204 c['www']['authz'] = util.Authz(
1205 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1206 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1207 )
1208
1209 c['services'] = []
1210 if ini.has_section("irc"):
1211 iniirc = ini['irc']
1212 irc_host = iniirc.get("host", None)
1213 irc_port = iniirc.getint("port", 6667)
1214 irc_chan = iniirc.get("channel", None)
1215 irc_nick = iniirc.get("nickname", None)
1216 irc_pass = iniirc.get("password", None)
1217
1218 if irc_host and irc_nick and irc_chan:
1219 irc = reporters.IRC(irc_host, irc_nick,
1220 port = irc_port,
1221 password = irc_pass,
1222 channels = [ irc_chan ],
1223 notify_events = [ 'exception', 'problem', 'recovery' ]
1224 )
1225
1226 c['services'].append(irc)
1227
1228 c['revlink'] = util.RevlinkMatch([
1229 r'https://git.openwrt.org/openwrt/(.*).git'
1230 ],
1231 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1232
1233 ####### DB URL
1234
1235 c['db'] = {
1236 # This specifies what database buildbot uses to store its state. You can leave
1237 # this at its default for all but the largest installations.
1238 'db_url' : "sqlite:///state.sqlite",
1239 }
1240
1241 c['buildbotNetUsageData'] = None