phase1: make GetNextBuild() branch aware
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini['repo'].get("url")
64 repo_branch = ini['repo'].get("branch", "master")
65
66 rsync_defopts = ["-v", "-4", "--timeout=120"]
67
68 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
69 # rsync_bin_defopts += ["--contimeout=20"]
70
71 branches = {}
72
73 def ini_parse_branch(section):
74 b = {}
75 name = section.get("name")
76
77 if not name:
78 raise ValueError("missing 'name' in " + repr(section))
79 if name in branches:
80 raise ValueError("duplicate branch name in " + repr(section))
81
82 b["name"] = name
83 b["bin_url"] = section.get("binary_url")
84 b["bin_key"] = section.get("binary_password")
85
86 b["src_url"] = section.get("source_url")
87 b["src_key"] = section.get("source_password")
88
89 b["gpg_key"] = section.get("gpg_key")
90
91 b["usign_key"] = section.get("usign_key")
92 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
93 b["usign_comment"] = section.get("usign_comment", usign_comment)
94
95 b["config_seed"] = section.get("config_seed")
96
97 b["kmod_archive"] = section.getboolean("kmod_archive", False)
98
99 branches[name] = b
100 log.msg("Configured branch: {}".format(name))
101
102 # PB port can be either a numeric port or a connection string
103 pb_port = inip1.get("port") or 9989
104
105 # This is the dictionary that the buildmaster pays attention to. We also use
106 # a shorter alias to save typing.
107 c = BuildmasterConfig = {}
108
109 ####### PROJECT IDENTITY
110
111 # the 'title' string will appear at the top of this buildbot
112 # installation's html.WebStatus home page (linked to the
113 # 'titleURL') and is embedded in the title of the waterfall HTML page.
114
115 c['title'] = ini['general'].get("title")
116 c['titleURL'] = ini['general'].get("title_url")
117
118 # the 'buildbotURL' string should point to the location where the buildbot's
119 # internal web server (usually the html.WebStatus page) is visible. This
120 # typically uses the port number set in the Waterfall 'status' entry, but
121 # with an externally-visible host name which the buildbot cannot figure out
122 # without some help.
123
124 c['buildbotURL'] = inip1.get("buildbot_url")
125
126 ####### BUILDWORKERS
127
128 # The 'workers' list defines the set of recognized buildworkers. Each element is
129 # a Worker object, specifying a unique worker name and password. The same
130 # worker name and password must be configured on the worker.
131
132 c['workers'] = []
133 NetLocks = dict()
134
135 for section in ini.sections():
136 if section.startswith("branch "):
137 ini_parse_branch(ini[section])
138
139 if section.startswith("worker "):
140 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
141 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
142 sl_props = { 'dl_lock':None, 'ul_lock':None }
143 name = ini.get(section, "name")
144 password = ini.get(section, "password")
145 if ini.has_option(section, "dl_lock"):
146 lockname = ini.get(section, "dl_lock")
147 sl_props['dl_lock'] = lockname
148 if lockname not in NetLocks:
149 NetLocks[lockname] = locks.MasterLock(lockname)
150 if ini.has_option(section, "ul_lock"):
151 lockname = ini.get(section, "ul_lock")
152 sl_props['ul_lock'] = lockname
153 if lockname not in NetLocks:
154 NetLocks[lockname] = locks.MasterLock(lockname)
155 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
156
157 c['protocols'] = {'pb': {'port': pb_port}}
158
159 # coalesce builds
160 c['collapseRequests'] = True
161
162 # Reduce amount of backlog data
163 c['configurators'] = [util.JanitorConfigurator(
164 logHorizon=timedelta(days=3),
165 hour=6,
166 )]
167
168 @defer.inlineCallbacks
169 def getNewestCompleteTime(bldr):
170 """Returns the complete_at of the latest completed and not SKIPPED
171 build request for this builder, or None if there are no such build
172 requests. We need to filter out SKIPPED requests because we're
173 using collapseRequests=True which is unfortunately marking all
174 previous requests as complete when new buildset is created.
175
176 @returns: datetime instance or None, via Deferred
177 """
178
179 bldrid = yield bldr.getBuilderId()
180 completed = yield bldr.master.data.get(
181 ('builders', bldrid, 'buildrequests'),
182 [
183 resultspec.Filter('complete', 'eq', [True]),
184 resultspec.Filter('results', 'ne', [results.SKIPPED]),
185 ],
186 order=['-complete_at'], limit=1)
187 if not completed:
188 return
189
190 complete_at = completed[0]['complete_at']
191
192 last_build = yield bldr.master.data.get(
193 ('builds', ),
194 [
195 resultspec.Filter('builderid', 'eq', [bldrid]),
196 ],
197 order=['-started_at'], limit=1)
198
199 if last_build and last_build[0]:
200 last_complete_at = last_build[0]['complete_at']
201 if last_complete_at and (last_complete_at > complete_at):
202 return last_complete_at
203
204 return complete_at
205
206 @defer.inlineCallbacks
207 def prioritizeBuilders(master, builders):
208 """Returns sorted list of builders by their last timestamp of completed and
209 not skipped build.
210
211 @returns: list of sorted builders
212 """
213
214 def is_building(bldr):
215 return bool(bldr.building) or bool(bldr.old_building)
216
217 def bldr_info(bldr):
218 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
219 d.addCallback(lambda complete_at: (complete_at, bldr))
220 return d
221
222 def bldr_sort(item):
223 (complete_at, bldr) = item
224
225 if not complete_at:
226 date = datetime.min
227 complete_at = date.replace(tzinfo=tzutc())
228
229 if is_building(bldr):
230 date = datetime.max
231 complete_at = date.replace(tzinfo=tzutc())
232
233 return (complete_at, bldr.name)
234
235 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
236 results.sort(key=bldr_sort)
237
238 for r in results:
239 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
240
241 return [r[1] for r in results]
242
243 c['prioritizeBuilders'] = prioritizeBuilders
244
245 ####### CHANGESOURCES
246
247 branchNames = [branches[b]["name"] for b in branches]
248
249 # find targets
250 targets = [ ]
251
252 def populateTargets():
253 sourcegit = work_dir + '/source.git'
254 if os.path.isdir(sourcegit):
255 subprocess.call(["rm", "-rf", sourcegit])
256
257 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, sourcegit])
258
259 os.makedirs(sourcegit + '/tmp', exist_ok=True)
260 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
261 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
262
263 while True:
264 line = findtargets.stdout.readline()
265 if not line:
266 break
267 ta = line.decode().strip().split(' ')
268 targets.append(ta[0])
269
270 subprocess.call(["rm", "-rf", sourcegit])
271
272 populateTargets()
273
274 # the 'change_source' setting tells the buildmaster how it should find out
275 # about source code changes. Here we point to the buildbot clone of pyflakes.
276
277 c['change_source'] = []
278 c['change_source'].append(GitPoller(
279 repo_url,
280 workdir=work_dir+'/work.git', branch=repo_branch,
281 pollinterval=300))
282
283 ####### SCHEDULERS
284
285 # Configure the Schedulers, which decide how to react to incoming changes. In this
286 # case, just kick off a 'basebuild' build
287
288 class TagChoiceParameter(BaseParameter):
289 spec_attributes = ["strict", "choices"]
290 type = "list"
291 strict = True
292
293 def __init__(self, name, label=None, **kw):
294 super().__init__(name, label, **kw)
295 self._choice_list = []
296
297 @property
298 def choices(self):
299 taglist = []
300 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
301
302 if basever:
303 findtags = subprocess.Popen(
304 ['git', 'ls-remote', '--tags', repo_url],
305 stdout = subprocess.PIPE)
306
307 while True:
308 line = findtags.stdout.readline()
309
310 if not line:
311 break
312
313 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
314
315 if tagver and tagver[1].find(basever[1]) == 0:
316 taglist.append(tagver[1])
317
318 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
319 taglist.insert(0, '')
320
321 self._choice_list = taglist
322
323 return self._choice_list
324
325 def parse_from_arg(self, s):
326 if self.strict and s not in self._choice_list:
327 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
328 return s
329
330 c['schedulers'] = []
331 c['schedulers'].append(SingleBranchScheduler(
332 name = "all",
333 change_filter = filter.ChangeFilter(branch=repo_branch),
334 treeStableTimer = 60,
335 builderNames = targets))
336
337 c['schedulers'].append(ForceScheduler(
338 name = "force",
339 buttonName = "Force builds",
340 label = "Force build details",
341 builderNames = [ "00_force_build" ],
342
343 codebases = [
344 util.CodebaseParameter(
345 "",
346 label = "Repository",
347 branch = util.FixedParameter(name = "branch", default = ""),
348 revision = util.FixedParameter(name = "revision", default = ""),
349 repository = util.FixedParameter(name = "repository", default = ""),
350 project = util.FixedParameter(name = "project", default = "")
351 )
352 ],
353
354 reason = util.StringParameter(
355 name = "reason",
356 label = "Reason",
357 default = "Trigger build",
358 required = True,
359 size = 80
360 ),
361
362 properties = [
363 util.NestedParameter(
364 name="options",
365 label="Build Options",
366 layout="vertical",
367 fields=[
368 util.ChoiceStringParameter(
369 name = "target",
370 label = "Build target",
371 default = "all",
372 choices = [ "all" ] + targets
373 ),
374 TagChoiceParameter(
375 name = "tag",
376 label = "Build tag",
377 default = ""
378 )
379 ]
380 )
381 ]
382 ))
383
384 ####### BUILDERS
385
386 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
387 # what steps, and which workers can execute them. Note that any particular build will
388 # only take place on one worker.
389
390 def IsTaggingRequested(step):
391 tag = step.getProperty("tag")
392 return tag and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", tag)
393
394 def IsNoMasterBuild(step):
395 return step.getProperty("branch") != "master"
396
397 def IsUsignEnabled(step):
398 branch = step.getProperty("branch")
399 return branch and branches[branch].get("usign_key")
400
401 def IsSignEnabled(step):
402 branch = step.getProperty("branch")
403 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
404
405 def IsKmodArchiveEnabled(step):
406 branch = step.getProperty("branch")
407 return branch and branches[branch].get("kmod_archive")
408
409 def GetBaseVersion(branch):
410 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
411 return branch.split('-')[1]
412 else:
413 return "master"
414
415 @properties.renderer
416 def GetVersionPrefix(props):
417 branch = props.getProperty("branch")
418 basever = GetBaseVersion(branch)
419 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
420 return "%s/" % props["tag"]
421 elif basever != "master":
422 return "%s-SNAPSHOT/" % basever
423 else:
424 return ""
425
426 @util.renderer
427 def GetConfigSeed(props):
428 branch = props.getProperty("branch")
429 return branch and branches[branch].get("config_seed") or ""
430
431 @util.renderer
432 def GetRsyncParams(props, srcorbin, urlorkey):
433 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
434 branch = props.getProperty("branch")
435 opt = srcorbin + "_" + urlorkey
436 return branch and branches[branch].get(opt)
437
438 @util.renderer
439 def GetUsignKey(props):
440 branch = props.getProperty("branch")
441 return branch and branches[branch].get("usign_key")
442
443 def GetNextBuild(builder, requests):
444 for r in requests:
445 if r.properties:
446 # order tagged build first
447 if r.properties.hasProperty("tag"):
448 return r
449 # then order by branch order
450 pbranch = r.properties.getProperty("branch")
451 for name in branchNames:
452 if pbranch == name:
453 return r
454
455 r = requests[0]
456 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
457 return r
458
459 def MakeEnv(overrides=None, tryccache=False):
460 env = {
461 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
462 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
463 }
464 if tryccache:
465 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
466 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
467 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
468 else:
469 env['CC'] = env['CCC']
470 env['CXX'] = env['CCXX']
471 env['CCACHE'] = ''
472 if overrides is not None:
473 env.update(overrides)
474 return env
475
476 @properties.renderer
477 def NetLockDl(props):
478 lock = None
479 if props.hasProperty("dl_lock"):
480 lock = NetLocks[props["dl_lock"]]
481 if lock is not None:
482 return [lock.access('exclusive')]
483 else:
484 return []
485
486 @properties.renderer
487 def NetLockUl(props):
488 lock = None
489 if props.hasProperty("ul_lock"):
490 lock = NetLocks[props["ul_lock"]]
491 if lock is not None:
492 return [lock.access('exclusive')]
493 else:
494 return []
495
496 @util.renderer
497 def TagPropertyValue(props):
498 if props.hasProperty("options"):
499 options = props.getProperty("options")
500 if type(options) is dict:
501 return options.get("tag")
502 return None
503
504 def IsTargetSelected(target):
505 def CheckTargetProperty(step):
506 try:
507 options = step.getProperty("options")
508 if type(options) is dict:
509 selected_target = options.get("target", "all")
510 if selected_target != "all" and selected_target != target:
511 return False
512 except KeyError:
513 pass
514
515 return True
516
517 return CheckTargetProperty
518
519 @util.renderer
520 def UsignSec2Pub(props):
521 branch = props.getProperty("branch")
522 try:
523 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
524 seckey = branches[branch].get("usign_key")
525 seckey = base64.b64decode(seckey)
526 except:
527 return None
528
529 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
530 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
531
532
533 c['builders'] = []
534
535 dlLock = locks.WorkerLock("worker_dl")
536
537 workerNames = [ ]
538
539 for worker in c['workers']:
540 workerNames.append(worker.workername)
541
542 force_factory = BuildFactory()
543
544 c['builders'].append(BuilderConfig(
545 name = "00_force_build",
546 workernames = workerNames,
547 factory = force_factory))
548
549 for target in targets:
550 ts = target.split('/')
551
552 factory = BuildFactory()
553
554 # setup shared work directory if required
555 factory.addStep(ShellCommand(
556 name = "sharedwd",
557 description = "Setting up shared work directory",
558 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
559 workdir = ".",
560 haltOnFailure = True))
561
562 # find number of cores
563 factory.addStep(SetPropertyFromCommand(
564 name = "nproc",
565 property = "nproc",
566 description = "Finding number of CPUs",
567 command = ["nproc"]))
568
569 # find gcc and g++ compilers
570 factory.addStep(FileDownload(
571 name = "dlfindbinpl",
572 mastersrc = scripts_dir + '/findbin.pl',
573 workerdest = "../findbin.pl",
574 mode = 0o755))
575
576 factory.addStep(SetPropertyFromCommand(
577 name = "gcc",
578 property = "cc_command",
579 description = "Finding gcc command",
580 command = [
581 "../findbin.pl", "gcc", "", "",
582 ],
583 haltOnFailure = True))
584
585 factory.addStep(SetPropertyFromCommand(
586 name = "g++",
587 property = "cxx_command",
588 description = "Finding g++ command",
589 command = [
590 "../findbin.pl", "g++", "", "",
591 ],
592 haltOnFailure = True))
593
594 # see if ccache is available
595 factory.addStep(SetPropertyFromCommand(
596 property = "ccache_command",
597 command = ["which", "ccache"],
598 description = "Testing for ccache command",
599 haltOnFailure = False,
600 flunkOnFailure = False,
601 warnOnFailure = False,
602 ))
603
604 # Workaround bug when switching from a checked out tag back to a branch
605 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
606 factory.addStep(ShellCommand(
607 name = "gitcheckout",
608 description = "Ensure that Git HEAD is sane",
609 command = Interpolate("if [ -d .git ]; then git checkout -f %(prop:branch)s && git branch --set-upstream-to origin/%(prop:branch)s || rm -fr .git; else exit 0; fi"),
610 haltOnFailure = True))
611
612 # check out the source
613 # Git() runs:
614 # if repo doesn't exist: 'git clone repourl'
615 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
616 # 'git fetch -t repourl branch; git reset --hard revision'
617 factory.addStep(Git(
618 name = "git",
619 repourl = repo_url,
620 mode = 'full',
621 method = 'fresh',
622 locks = NetLockDl,
623 haltOnFailure = True,
624 ))
625
626 # update remote refs
627 factory.addStep(ShellCommand(
628 name = "fetchrefs",
629 description = "Fetching Git remote refs",
630 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
631 haltOnFailure = True
632 ))
633
634 # switch to tag
635 factory.addStep(ShellCommand(
636 name = "switchtag",
637 description = "Checking out Git tag",
638 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
639 haltOnFailure = True,
640 doStepIf = IsTaggingRequested
641 ))
642
643 # Verify that Git HEAD points to a tag or branch
644 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
645 factory.addStep(ShellCommand(
646 name = "gitverify",
647 description = "Ensure that Git HEAD is pointing to a branch or tag",
648 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
649 haltOnFailure = True))
650
651 factory.addStep(ShellCommand(
652 name = "rmtmp",
653 description = "Remove tmp folder",
654 command=["rm", "-rf", "tmp/"]))
655
656 # feed
657 factory.addStep(ShellCommand(
658 name = "rmfeedlinks",
659 description = "Remove feed symlinks",
660 command=["rm", "-rf", "package/feeds/"]))
661
662 factory.addStep(StringDownload(
663 name = "ccachecc",
664 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
665 workerdest = "../ccache_cc.sh",
666 mode = 0o755,
667 ))
668
669 factory.addStep(StringDownload(
670 name = "ccachecxx",
671 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
672 workerdest = "../ccache_cxx.sh",
673 mode = 0o755,
674 ))
675
676 # feed
677 factory.addStep(ShellCommand(
678 name = "updatefeeds",
679 description = "Updating feeds",
680 command=["./scripts/feeds", "update"],
681 env = MakeEnv(tryccache=True),
682 haltOnFailure = True,
683 locks = NetLockDl,
684 ))
685
686 # feed
687 factory.addStep(ShellCommand(
688 name = "installfeeds",
689 description = "Installing feeds",
690 command=["./scripts/feeds", "install", "-a"],
691 env = MakeEnv(tryccache=True),
692 haltOnFailure = True
693 ))
694
695 # seed config
696 factory.addStep(StringDownload(
697 name = "dlconfigseed",
698 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
699 workerdest = ".config",
700 mode = 0o644
701 ))
702
703 # configure
704 factory.addStep(ShellCommand(
705 name = "newconfig",
706 description = "Seeding .config",
707 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey)
708 ))
709
710 factory.addStep(ShellCommand(
711 name = "delbin",
712 description = "Removing output directory",
713 command = ["rm", "-rf", "bin/"]
714 ))
715
716 factory.addStep(ShellCommand(
717 name = "defconfig",
718 description = "Populating .config",
719 command = ["make", "defconfig"],
720 env = MakeEnv()
721 ))
722
723 # check arch
724 factory.addStep(ShellCommand(
725 name = "checkarch",
726 description = "Checking architecture",
727 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
728 logEnviron = False,
729 want_stdout = False,
730 want_stderr = False,
731 haltOnFailure = True
732 ))
733
734 # find libc suffix
735 factory.addStep(SetPropertyFromCommand(
736 name = "libc",
737 property = "libc",
738 description = "Finding libc suffix",
739 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
740
741 # install build key
742 factory.addStep(StringDownload(
743 name = "dlkeybuildpub",
744 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
745 workerdest = "key-build.pub",
746 mode = 0o600,
747 doStepIf = IsUsignEnabled,
748 ))
749
750 factory.addStep(StringDownload(
751 name = "dlkeybuild",
752 s = "# fake private key",
753 workerdest = "key-build",
754 mode = 0o600,
755 doStepIf = IsUsignEnabled,
756 ))
757
758 factory.addStep(StringDownload(
759 name = "dlkeybuilducert",
760 s = "# fake certificate",
761 workerdest = "key-build.ucert",
762 mode = 0o600,
763 doStepIf = IsUsignEnabled,
764 ))
765
766 # prepare dl
767 factory.addStep(ShellCommand(
768 name = "dldir",
769 description = "Preparing dl/",
770 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
771 logEnviron = False,
772 want_stdout = False
773 ))
774
775 # prepare tar
776 factory.addStep(ShellCommand(
777 name = "dltar",
778 description = "Building and installing GNU tar",
779 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
780 env = MakeEnv(tryccache=True),
781 haltOnFailure = True
782 ))
783
784 # populate dl
785 factory.addStep(ShellCommand(
786 name = "dlrun",
787 description = "Populating dl/",
788 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
789 env = MakeEnv(),
790 logEnviron = False,
791 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
792 ))
793
794 factory.addStep(ShellCommand(
795 name = "cleanbase",
796 description = "Cleaning base-files",
797 command=["make", "package/base-files/clean", "V=s"]
798 ))
799
800 # build
801 factory.addStep(ShellCommand(
802 name = "tools",
803 description = "Building and installing tools",
804 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
805 env = MakeEnv(tryccache=True),
806 haltOnFailure = True
807 ))
808
809 factory.addStep(ShellCommand(
810 name = "toolchain",
811 description = "Building and installing toolchain",
812 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
813 env = MakeEnv(),
814 haltOnFailure = True
815 ))
816
817 factory.addStep(ShellCommand(
818 name = "kmods",
819 description = "Building kmods",
820 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
821 env = MakeEnv(),
822 haltOnFailure = True
823 ))
824
825 # find kernel version
826 factory.addStep(SetPropertyFromCommand(
827 name = "kernelversion",
828 property = "kernelversion",
829 description = "Finding the effective Kernel version",
830 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
831 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
832 ))
833
834 factory.addStep(ShellCommand(
835 name = "pkgclean",
836 description = "Cleaning up package build",
837 command=["make", "package/cleanup", "V=s"]
838 ))
839
840 factory.addStep(ShellCommand(
841 name = "pkgbuild",
842 description = "Building packages",
843 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
844 env = MakeEnv(),
845 haltOnFailure = True
846 ))
847
848 factory.addStep(ShellCommand(
849 name = "pkginstall",
850 description = "Installing packages",
851 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
852 env = MakeEnv(),
853 haltOnFailure = True
854 ))
855
856 factory.addStep(ShellCommand(
857 name = "pkgindex",
858 description = "Indexing packages",
859 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
860 env = MakeEnv(),
861 haltOnFailure = True
862 ))
863
864 factory.addStep(ShellCommand(
865 name = "images",
866 description = "Building and installing images",
867 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
868 env = MakeEnv(),
869 haltOnFailure = True
870 ))
871
872 factory.addStep(ShellCommand(
873 name = "buildinfo",
874 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
875 command = "make -j1 buildinfo V=s || true",
876 env = MakeEnv(),
877 haltOnFailure = True
878 ))
879
880 factory.addStep(ShellCommand(
881 name = "json_overview_image_info",
882 description = "Generate profiles.json in target folder",
883 command = "make -j1 json_overview_image_info V=s || true",
884 env = MakeEnv(),
885 haltOnFailure = True
886 ))
887
888 factory.addStep(ShellCommand(
889 name = "checksums",
890 description = "Calculating checksums",
891 command=["make", "-j1", "checksum", "V=s"],
892 env = MakeEnv(),
893 haltOnFailure = True
894 ))
895
896 factory.addStep(ShellCommand(
897 name = "kmoddir",
898 description = "Creating kmod directory",
899 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
900 haltOnFailure = True,
901 doStepIf = IsKmodArchiveEnabled,
902 ))
903
904 factory.addStep(ShellCommand(
905 name = "kmodprepare",
906 description = "Preparing kmod archive",
907 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
908 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
909 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
910 haltOnFailure = True,
911 doStepIf = IsKmodArchiveEnabled,
912 ))
913
914 factory.addStep(ShellCommand(
915 name = "kmodindex",
916 description = "Indexing kmod archive",
917 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
918 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
919 env = MakeEnv(),
920 haltOnFailure = True,
921 doStepIf = IsKmodArchiveEnabled,
922 ))
923
924 # sign
925 factory.addStep(MasterShellCommand(
926 name = "signprepare",
927 description = "Preparing temporary signing directory",
928 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
929 haltOnFailure = True,
930 doStepIf = IsSignEnabled,
931
932 ))
933
934 factory.addStep(ShellCommand(
935 name = "signpack",
936 description = "Packing files to sign",
937 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
938 haltOnFailure = True,
939 doStepIf = IsSignEnabled,
940 ))
941
942 factory.addStep(FileUpload(
943 workersrc = "sign.tar.gz",
944 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
945 haltOnFailure = True,
946 doStepIf = IsSignEnabled,
947 ))
948
949 factory.addStep(MasterShellCommand(
950 name = "signfiles",
951 description = "Signing files",
952 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
953 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
954 haltOnFailure = True,
955 doStepIf = IsSignEnabled,
956 ))
957
958 factory.addStep(FileDownload(
959 name = "dlsigntargz",
960 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
961 workerdest = "sign.tar.gz",
962 haltOnFailure = True,
963 doStepIf = IsSignEnabled,
964 ))
965
966 factory.addStep(ShellCommand(
967 name = "signunpack",
968 description = "Unpacking signed files",
969 command = ["tar", "-xzf", "sign.tar.gz"],
970 haltOnFailure = True,
971 doStepIf = IsSignEnabled,
972 ))
973
974 # upload
975 factory.addStep(ShellCommand(
976 name = "dirprepare",
977 description = "Preparing upload directory structure",
978 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
979 haltOnFailure = True
980 ))
981
982 factory.addStep(ShellCommand(
983 name = "linkprepare",
984 description = "Preparing repository symlink",
985 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
986 doStepIf = IsNoMasterBuild,
987 haltOnFailure = True
988 ))
989
990 factory.addStep(ShellCommand(
991 name = "kmoddirprepare",
992 description = "Preparing kmod archive upload directory",
993 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
994 haltOnFailure = True,
995 doStepIf = IsKmodArchiveEnabled,
996 ))
997
998 factory.addStep(ShellCommand(
999 name = "dirupload",
1000 description = "Uploading directory structure",
1001 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1002 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1003 haltOnFailure = True,
1004 logEnviron = False,
1005 locks = NetLockUl,
1006 ))
1007
1008 # download remote sha256sums to 'target-sha256sums'
1009 factory.addStep(ShellCommand(
1010 name = "target-sha256sums",
1011 description = "Fetching remote sha256sums for target",
1012 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1013 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1014 logEnviron = False,
1015 haltOnFailure = False,
1016 flunkOnFailure = False,
1017 warnOnFailure = False,
1018 ))
1019
1020 # build list of files to upload
1021 factory.addStep(FileDownload(
1022 name = "dlsha2rsyncpl",
1023 mastersrc = scripts_dir + '/sha2rsync.pl',
1024 workerdest = "../sha2rsync.pl",
1025 mode = 0o755,
1026 ))
1027
1028 factory.addStep(ShellCommand(
1029 name = "buildlist",
1030 description = "Building list of files to upload",
1031 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1032 haltOnFailure = True,
1033 ))
1034
1035 factory.addStep(FileDownload(
1036 name = "dlrsync.sh",
1037 mastersrc = scripts_dir + '/rsync.sh',
1038 workerdest = "../rsync.sh",
1039 mode = 0o755
1040 ))
1041
1042 # upload new files and update existing ones
1043 factory.addStep(ShellCommand(
1044 name = "targetupload",
1045 description = "Uploading target files",
1046 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1047 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1048 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1049 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1050 haltOnFailure = True,
1051 logEnviron = False,
1052 ))
1053
1054 # delete files which don't exist locally
1055 factory.addStep(ShellCommand(
1056 name = "targetprune",
1057 description = "Pruning target files",
1058 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1059 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1060 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1061 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1062 haltOnFailure = True,
1063 logEnviron = False,
1064 locks = NetLockUl,
1065 ))
1066
1067 factory.addStep(ShellCommand(
1068 name = "kmodupload",
1069 description = "Uploading kmod archive",
1070 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1071 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1072 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1073 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1074 haltOnFailure = True,
1075 logEnviron = False,
1076 locks = NetLockUl,
1077 doStepIf = IsKmodArchiveEnabled,
1078 ))
1079
1080 factory.addStep(ShellCommand(
1081 name = "sourcelist",
1082 description = "Finding source archives to upload",
1083 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1084 haltOnFailure = True
1085 ))
1086
1087 factory.addStep(ShellCommand(
1088 name = "sourceupload",
1089 description = "Uploading source archives",
1090 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1091 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1092 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1093 haltOnFailure = True,
1094 logEnviron = False,
1095 locks = NetLockUl,
1096 ))
1097
1098 factory.addStep(ShellCommand(
1099 name = "df",
1100 description = "Reporting disk usage",
1101 command=["df", "-h", "."],
1102 env={'LC_ALL': 'C'},
1103 haltOnFailure = False,
1104 flunkOnFailure = False,
1105 warnOnFailure = False,
1106 alwaysRun = True
1107 ))
1108
1109 factory.addStep(ShellCommand(
1110 name = "du",
1111 description = "Reporting estimated file space usage",
1112 command=["du", "-sh", "."],
1113 env={'LC_ALL': 'C'},
1114 haltOnFailure = False,
1115 flunkOnFailure = False,
1116 warnOnFailure = False,
1117 alwaysRun = True
1118 ))
1119
1120 factory.addStep(ShellCommand(
1121 name = "ccachestat",
1122 description = "Reporting ccache stats",
1123 command=["ccache", "-s"],
1124 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1125 want_stderr = False,
1126 haltOnFailure = False,
1127 flunkOnFailure = False,
1128 warnOnFailure = False,
1129 alwaysRun = True,
1130 ))
1131
1132 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1133
1134 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1135 force_factory.addStep(steps.Trigger(
1136 name = "trigger_%s" % target,
1137 description = "Triggering %s build" % target,
1138 schedulerNames = [ "trigger_%s" % target ],
1139 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1140 doStepIf = IsTargetSelected(target)
1141 ))
1142
1143
1144 ####### STATUS TARGETS
1145
1146 # 'status' is a list of Status Targets. The results of each build will be
1147 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1148 # including web pages, email senders, and IRC bots.
1149
1150 if "status_bind" in inip1:
1151 c['www'] = {
1152 'port': inip1.get("status_bind"),
1153 'plugins': {
1154 'waterfall_view': True,
1155 'console_view': True,
1156 'grid_view': True
1157 }
1158 }
1159
1160 if "status_user" in inip1 and "status_password" in inip1:
1161 c['www']['auth'] = util.UserPasswordAuth([
1162 (inip1.get("status_user"), inip1.get("status_password"))
1163 ])
1164 c['www']['authz'] = util.Authz(
1165 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1166 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1167 )
1168
1169 c['services'] = []
1170 if ini.has_section("irc"):
1171 iniirc = ini['irc']
1172 irc_host = iniirc.get("host", None)
1173 irc_port = iniirc.getint("port", 6667)
1174 irc_chan = iniirc.get("channel", None)
1175 irc_nick = iniirc.get("nickname", None)
1176 irc_pass = iniirc.get("password", None)
1177
1178 if irc_host and irc_nick and irc_chan:
1179 irc = reporters.IRC(irc_host, irc_nick,
1180 port = irc_port,
1181 password = irc_pass,
1182 channels = [ irc_chan ],
1183 notify_events = [ 'exception', 'problem', 'recovery' ]
1184 )
1185
1186 c['services'].append(irc)
1187
1188 c['revlink'] = util.RevlinkMatch([
1189 r'https://git.openwrt.org/openwrt/(.*).git'
1190 ],
1191 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1192
1193 ####### DB URL
1194
1195 c['db'] = {
1196 # This specifies what database buildbot uses to store its state. You can leave
1197 # this at its default for all but the largest installations.
1198 'db_url' : "sqlite:///state.sqlite",
1199 }
1200
1201 c['buildbotNetUsageData'] = None