phase1: gitcheckout workaround is no longer needed
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41
42
43 if not os.path.exists("twistd.pid"):
44 with open("twistd.pid", "w") as pidfile:
45 pidfile.write("{}".format(os.getpid()))
46
47 # This is a sample buildmaster config file. It must be installed as
48 # 'master.cfg' in your buildmaster's base directory.
49
50 ini = configparser.ConfigParser()
51 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
52
53 if "general" not in ini or "phase1" not in ini:
54 raise ValueError("Fix your configuration")
55
56 inip1 = ini['phase1']
57
58 # Globals
59 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
60 scripts_dir = os.path.abspath("../scripts")
61
62 repo_url = ini['repo'].get("url")
63
64 rsync_defopts = ["-v", "-4", "--timeout=120"]
65
66 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
67 # rsync_bin_defopts += ["--contimeout=20"]
68
69 branches = {}
70
71 def ini_parse_branch(section):
72 b = {}
73 name = section.get("name")
74
75 if not name:
76 raise ValueError("missing 'name' in " + repr(section))
77 if name in branches:
78 raise ValueError("duplicate branch name in " + repr(section))
79
80 b["name"] = name
81 b["bin_url"] = section.get("binary_url")
82 b["bin_key"] = section.get("binary_password")
83
84 b["src_url"] = section.get("source_url")
85 b["src_key"] = section.get("source_password")
86
87 b["gpg_key"] = section.get("gpg_key")
88
89 b["usign_key"] = section.get("usign_key")
90 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
91 b["usign_comment"] = section.get("usign_comment", usign_comment)
92
93 b["config_seed"] = section.get("config_seed")
94
95 b["kmod_archive"] = section.getboolean("kmod_archive", False)
96
97 branches[name] = b
98 log.msg("Configured branch: {}".format(name))
99
100 # PB port can be either a numeric port or a connection string
101 pb_port = inip1.get("port") or 9989
102
103 # This is the dictionary that the buildmaster pays attention to. We also use
104 # a shorter alias to save typing.
105 c = BuildmasterConfig = {}
106
107 ####### PROJECT IDENTITY
108
109 # the 'title' string will appear at the top of this buildbot
110 # installation's html.WebStatus home page (linked to the
111 # 'titleURL') and is embedded in the title of the waterfall HTML page.
112
113 c['title'] = ini['general'].get("title")
114 c['titleURL'] = ini['general'].get("title_url")
115
116 # the 'buildbotURL' string should point to the location where the buildbot's
117 # internal web server (usually the html.WebStatus page) is visible. This
118 # typically uses the port number set in the Waterfall 'status' entry, but
119 # with an externally-visible host name which the buildbot cannot figure out
120 # without some help.
121
122 c['buildbotURL'] = inip1.get("buildbot_url")
123
124 ####### BUILDWORKERS
125
126 # The 'workers' list defines the set of recognized buildworkers. Each element is
127 # a Worker object, specifying a unique worker name and password. The same
128 # worker name and password must be configured on the worker.
129
130 c['workers'] = []
131 NetLocks = dict()
132
133 for section in ini.sections():
134 if section.startswith("branch "):
135 ini_parse_branch(ini[section])
136
137 if section.startswith("worker "):
138 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
139 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
140 sl_props = { 'dl_lock':None, 'ul_lock':None }
141 name = ini.get(section, "name")
142 password = ini.get(section, "password")
143 if ini.has_option(section, "dl_lock"):
144 lockname = ini.get(section, "dl_lock")
145 sl_props['dl_lock'] = lockname
146 if lockname not in NetLocks:
147 NetLocks[lockname] = locks.MasterLock(lockname)
148 if ini.has_option(section, "ul_lock"):
149 lockname = ini.get(section, "ul_lock")
150 sl_props['ul_lock'] = lockname
151 if lockname not in NetLocks:
152 NetLocks[lockname] = locks.MasterLock(lockname)
153 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
154
155 c['protocols'] = {'pb': {'port': pb_port}}
156
157 # coalesce builds
158 c['collapseRequests'] = True
159
160 # Reduce amount of backlog data
161 c['configurators'] = [util.JanitorConfigurator(
162 logHorizon=timedelta(days=3),
163 hour=6,
164 )]
165
166 @defer.inlineCallbacks
167 def getNewestCompleteTime(bldr):
168 """Returns the complete_at of the latest completed and not SKIPPED
169 build request for this builder, or None if there are no such build
170 requests. We need to filter out SKIPPED requests because we're
171 using collapseRequests=True which is unfortunately marking all
172 previous requests as complete when new buildset is created.
173
174 @returns: datetime instance or None, via Deferred
175 """
176
177 bldrid = yield bldr.getBuilderId()
178 completed = yield bldr.master.data.get(
179 ('builders', bldrid, 'buildrequests'),
180 [
181 resultspec.Filter('complete', 'eq', [True]),
182 resultspec.Filter('results', 'ne', [results.SKIPPED]),
183 ],
184 order=['-complete_at'], limit=1)
185 if not completed:
186 return
187
188 complete_at = completed[0]['complete_at']
189
190 last_build = yield bldr.master.data.get(
191 ('builds', ),
192 [
193 resultspec.Filter('builderid', 'eq', [bldrid]),
194 ],
195 order=['-started_at'], limit=1)
196
197 if last_build and last_build[0]:
198 last_complete_at = last_build[0]['complete_at']
199 if last_complete_at and (last_complete_at > complete_at):
200 return last_complete_at
201
202 return complete_at
203
204 @defer.inlineCallbacks
205 def prioritizeBuilders(master, builders):
206 """Returns sorted list of builders by their last timestamp of completed and
207 not skipped build.
208
209 @returns: list of sorted builders
210 """
211
212 def is_building(bldr):
213 return bool(bldr.building) or bool(bldr.old_building)
214
215 def bldr_info(bldr):
216 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
217 d.addCallback(lambda complete_at: (complete_at, bldr))
218 return d
219
220 def bldr_sort(item):
221 (complete_at, bldr) = item
222
223 if not complete_at:
224 date = datetime.min
225 complete_at = date.replace(tzinfo=tzutc())
226
227 if is_building(bldr):
228 date = datetime.max
229 complete_at = date.replace(tzinfo=tzutc())
230
231 return (complete_at, bldr.name)
232
233 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
234 results.sort(key=bldr_sort)
235
236 for r in results:
237 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
238
239 return [r[1] for r in results]
240
241 c['prioritizeBuilders'] = prioritizeBuilders
242
243 ####### CHANGESOURCES
244
245 branchNames = [branches[b]["name"] for b in branches]
246
247 # find targets
248 targets = set()
249
250 def populateTargets():
251 log.msg("Populating targets, this will take time")
252 sourcegit = work_dir + '/source.git'
253 for branch in branchNames:
254 if os.path.isdir(sourcegit):
255 subprocess.call(["rm", "-rf", sourcegit])
256
257 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
258
259 os.makedirs(sourcegit + '/tmp', exist_ok=True)
260 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
261 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
262
263 while True:
264 line = findtargets.stdout.readline()
265 if not line:
266 break
267 ta = line.decode().strip().split(' ')
268 targets.add(ta[0])
269
270 subprocess.call(["rm", "-rf", sourcegit])
271
272 populateTargets()
273
274 # the 'change_source' setting tells the buildmaster how it should find out
275 # about source code changes. Here we point to the buildbot clone of pyflakes.
276
277 c['change_source'] = []
278 c['change_source'].append(GitPoller(
279 repo_url,
280 workdir=work_dir+'/work.git', branches=branchNames,
281 pollAtLaunch=True, pollinterval=300))
282
283 ####### SCHEDULERS
284
285 # Configure the Schedulers, which decide how to react to incoming changes. In this
286 # case, just kick off a 'basebuild' build
287
288 class TagChoiceParameter(BaseParameter):
289 spec_attributes = ["strict", "choices"]
290 type = "list"
291 strict = True
292
293 def __init__(self, name, label=None, **kw):
294 super().__init__(name, label, **kw)
295 self._choice_list = []
296
297 @property
298 def choices(self):
299 taglist = []
300 basever = re.search(r'-([0-9]+\.[0-9]+)$', "master") # XXX FIXME
301
302 if basever:
303 findtags = subprocess.Popen(
304 ['git', 'ls-remote', '--tags', repo_url],
305 stdout = subprocess.PIPE)
306
307 while True:
308 line = findtags.stdout.readline()
309
310 if not line:
311 break
312
313 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
314
315 if tagver and tagver[1].find(basever[1]) == 0:
316 taglist.append(tagver[1])
317
318 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
319 taglist.insert(0, '')
320
321 self._choice_list = taglist
322
323 return self._choice_list
324
325 def parse_from_arg(self, s):
326 if self.strict and s not in self._choice_list:
327 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
328 return s
329
330 c['schedulers'] = []
331 c['schedulers'].append(AnyBranchScheduler(
332 name = "all",
333 change_filter = util.ChangeFilter(branch=branchNames),
334 treeStableTimer = 15*60,
335 builderNames = list(targets)))
336
337 c['schedulers'].append(ForceScheduler(
338 name = "force",
339 buttonName = "Force builds",
340 label = "Force build details",
341 builderNames = [ "00_force_build" ],
342
343 codebases = [
344 util.CodebaseParameter(
345 "",
346 label = "Repository",
347 branch = util.FixedParameter(name = "branch", default = ""),
348 revision = util.FixedParameter(name = "revision", default = ""),
349 repository = util.FixedParameter(name = "repository", default = ""),
350 project = util.FixedParameter(name = "project", default = "")
351 )
352 ],
353
354 reason = util.StringParameter(
355 name = "reason",
356 label = "Reason",
357 default = "Trigger build",
358 required = True,
359 size = 80
360 ),
361
362 properties = [
363 util.NestedParameter(
364 name="options",
365 label="Build Options",
366 layout="vertical",
367 fields=[
368 util.ChoiceStringParameter(
369 name = "target",
370 label = "Build target",
371 default = "all",
372 choices = set( "all" ) | targets
373 ),
374 TagChoiceParameter(
375 name = "tag",
376 label = "Build tag",
377 default = ""
378 )
379 ]
380 )
381 ]
382 ))
383
384 ####### BUILDERS
385
386 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
387 # what steps, and which workers can execute them. Note that any particular build will
388 # only take place on one worker.
389
390 def IsTaggingRequested(step):
391 tag = step.getProperty("tag")
392 return tag and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", tag)
393
394 def IsNoMasterBuild(step):
395 return step.getProperty("branch") != "master"
396
397 def IsUsignEnabled(step):
398 branch = step.getProperty("branch")
399 return branch and branches[branch].get("usign_key")
400
401 def IsSignEnabled(step):
402 branch = step.getProperty("branch")
403 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
404
405 def IsKmodArchiveEnabled(step):
406 branch = step.getProperty("branch")
407 return branch and branches[branch].get("kmod_archive")
408
409 def GetBaseVersion(branch):
410 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
411 return branch.split('-')[1]
412 else:
413 return "master"
414
415 @properties.renderer
416 def GetVersionPrefix(props):
417 branch = props.getProperty("branch")
418 basever = GetBaseVersion(branch)
419 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
420 return "%s/" % props["tag"]
421 elif basever != "master":
422 return "%s-SNAPSHOT/" % basever
423 else:
424 return ""
425
426 @util.renderer
427 def GetConfigSeed(props):
428 branch = props.getProperty("branch")
429 return branch and branches[branch].get("config_seed") or ""
430
431 @util.renderer
432 def GetRsyncParams(props, srcorbin, urlorkey):
433 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
434 branch = props.getProperty("branch")
435 opt = srcorbin + "_" + urlorkey
436 return branch and branches[branch].get(opt)
437
438 @util.renderer
439 def GetUsignKey(props):
440 branch = props.getProperty("branch")
441 return branch and branches[branch].get("usign_key")
442
443 def GetNextBuild(builder, requests):
444 for r in requests:
445 if r.properties:
446 # order tagged build first
447 if r.properties.hasProperty("tag"):
448 return r
449 # then order by branch order
450 pbranch = r.properties.getProperty("branch")
451 for name in branchNames:
452 if pbranch == name:
453 return r
454
455 r = requests[0]
456 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
457 return r
458
459 def MakeEnv(overrides=None, tryccache=False):
460 env = {
461 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
462 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
463 }
464 if tryccache:
465 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
466 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
467 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
468 else:
469 env['CC'] = env['CCC']
470 env['CXX'] = env['CCXX']
471 env['CCACHE'] = ''
472 if overrides is not None:
473 env.update(overrides)
474 return env
475
476 @properties.renderer
477 def NetLockDl(props, extralock=None):
478 lock = None
479 locks = []
480 if props.hasProperty("dl_lock"):
481 lock = NetLocks[props["dl_lock"]]
482 if lock is not None:
483 locks.append(lock.access('exclusive'))
484 if extralock is not None:
485 locks.append(extralock)
486 return locks
487
488 @properties.renderer
489 def NetLockUl(props):
490 lock = None
491 if props.hasProperty("ul_lock"):
492 lock = NetLocks[props["ul_lock"]]
493 if lock is not None:
494 return [lock.access('exclusive')]
495 else:
496 return []
497
498 @util.renderer
499 def TagPropertyValue(props):
500 if props.hasProperty("options"):
501 options = props.getProperty("options")
502 if type(options) is dict:
503 return options.get("tag")
504 return None
505
506 def IsTargetSelected(target):
507 def CheckTargetProperty(step):
508 try:
509 options = step.getProperty("options")
510 if type(options) is dict:
511 selected_target = options.get("target", "all")
512 if selected_target != "all" and selected_target != target:
513 return False
514 except KeyError:
515 pass
516
517 return True
518
519 return CheckTargetProperty
520
521 @util.renderer
522 def UsignSec2Pub(props):
523 branch = props.getProperty("branch")
524 try:
525 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
526 seckey = branches[branch].get("usign_key")
527 seckey = base64.b64decode(seckey)
528 except:
529 return None
530
531 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
532 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
533
534
535 c['builders'] = []
536
537 dlLock = locks.WorkerLock("worker_dl")
538
539 workerNames = [ ]
540
541 for worker in c['workers']:
542 workerNames.append(worker.workername)
543
544 force_factory = BuildFactory()
545
546 c['builders'].append(BuilderConfig(
547 name = "00_force_build",
548 workernames = workerNames,
549 factory = force_factory))
550
551 for target in targets:
552 ts = target.split('/')
553
554 factory = BuildFactory()
555
556 # setup shared work directory if required
557 factory.addStep(ShellCommand(
558 name = "sharedwd",
559 descriptionDone = "Shared work directory set up",
560 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
561 workdir = ".",
562 haltOnFailure = True))
563
564 # find number of cores
565 factory.addStep(SetPropertyFromCommand(
566 name = "nproc",
567 property = "nproc",
568 description = "Finding number of CPUs",
569 command = ["nproc"]))
570
571 # find gcc and g++ compilers
572 factory.addStep(FileDownload(
573 name = "dlfindbinpl",
574 mastersrc = scripts_dir + '/findbin.pl',
575 workerdest = "../findbin.pl",
576 mode = 0o755))
577
578 factory.addStep(SetPropertyFromCommand(
579 name = "gcc",
580 property = "cc_command",
581 description = "Finding gcc command",
582 command = [
583 "../findbin.pl", "gcc", "", "",
584 ],
585 haltOnFailure = True))
586
587 factory.addStep(SetPropertyFromCommand(
588 name = "g++",
589 property = "cxx_command",
590 description = "Finding g++ command",
591 command = [
592 "../findbin.pl", "g++", "", "",
593 ],
594 haltOnFailure = True))
595
596 # see if ccache is available
597 factory.addStep(SetPropertyFromCommand(
598 name = "ccache",
599 property = "ccache_command",
600 command = ["which", "ccache"],
601 description = "Testing for ccache command",
602 haltOnFailure = False,
603 flunkOnFailure = False,
604 warnOnFailure = False,
605 hideStepIf = lambda r, s: r==results.FAILURE,
606 ))
607
608 # check out the source
609 # Git() runs:
610 # if repo doesn't exist: 'git clone repourl'
611 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
612 # git cat-file -e <commit>
613 # git checkout -f <commit>
614 # git checkout -B <branch>
615 # git rev-parse HEAD
616 factory.addStep(Git(
617 name = "git",
618 repourl = repo_url,
619 mode = 'full',
620 method = 'fresh',
621 locks = NetLockDl,
622 haltOnFailure = True,
623 ))
624
625 # update remote refs
626 factory.addStep(ShellCommand(
627 name = "fetchrefs",
628 description = "Fetching Git remote refs",
629 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
630 haltOnFailure = True
631 ))
632
633 # switch to tag
634 factory.addStep(ShellCommand(
635 name = "switchtag",
636 description = "Checking out Git tag",
637 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
638 haltOnFailure = True,
639 doStepIf = IsTaggingRequested
640 ))
641
642 # Verify that Git HEAD points to a tag or branch
643 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
644 factory.addStep(ShellCommand(
645 name = "gitverify",
646 description = "Ensure that Git HEAD is pointing to a branch or tag",
647 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
648 haltOnFailure = True))
649
650 factory.addStep(ShellCommand(
651 name = "rmtmp",
652 description = "Remove tmp folder",
653 command=["rm", "-rf", "tmp/"]))
654
655 # feed
656 factory.addStep(ShellCommand(
657 name = "rmfeedlinks",
658 description = "Remove feed symlinks",
659 command=["rm", "-rf", "package/feeds/"]))
660
661 factory.addStep(StringDownload(
662 name = "ccachecc",
663 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
664 workerdest = "../ccache_cc.sh",
665 mode = 0o755,
666 ))
667
668 factory.addStep(StringDownload(
669 name = "ccachecxx",
670 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
671 workerdest = "../ccache_cxx.sh",
672 mode = 0o755,
673 ))
674
675 # feed
676 factory.addStep(ShellCommand(
677 name = "updatefeeds",
678 description = "Updating feeds",
679 command=["./scripts/feeds", "update"],
680 env = MakeEnv(tryccache=True),
681 haltOnFailure = True,
682 locks = NetLockDl,
683 ))
684
685 # feed
686 factory.addStep(ShellCommand(
687 name = "installfeeds",
688 description = "Installing feeds",
689 command=["./scripts/feeds", "install", "-a"],
690 env = MakeEnv(tryccache=True),
691 haltOnFailure = True
692 ))
693
694 # seed config
695 factory.addStep(StringDownload(
696 name = "dlconfigseed",
697 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
698 workerdest = ".config",
699 mode = 0o644
700 ))
701
702 # configure
703 factory.addStep(ShellCommand(
704 name = "newconfig",
705 descriptionDone = ".config seeded",
706 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey)
707 ))
708
709 factory.addStep(ShellCommand(
710 name = "delbin",
711 description = "Removing output directory",
712 command = ["rm", "-rf", "bin/"]
713 ))
714
715 factory.addStep(ShellCommand(
716 name = "defconfig",
717 description = "Populating .config",
718 command = ["make", "defconfig"],
719 env = MakeEnv()
720 ))
721
722 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
723 factory.addStep(ShellCommand(
724 name = "checkarch",
725 description = "Checking architecture",
726 descriptionDone = "Architecture validated",
727 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
728 logEnviron = False,
729 want_stdout = False,
730 want_stderr = False,
731 haltOnFailure = True,
732 flunkOnFailure = False, # this is not a build FAILURE
733 ))
734
735 # find libc suffix
736 factory.addStep(SetPropertyFromCommand(
737 name = "libc",
738 property = "libc",
739 description = "Finding libc suffix",
740 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
741
742 # install build key
743 factory.addStep(StringDownload(
744 name = "dlkeybuildpub",
745 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
746 workerdest = "key-build.pub",
747 mode = 0o600,
748 doStepIf = IsUsignEnabled,
749 ))
750
751 factory.addStep(StringDownload(
752 name = "dlkeybuild",
753 s = "# fake private key",
754 workerdest = "key-build",
755 mode = 0o600,
756 doStepIf = IsUsignEnabled,
757 ))
758
759 factory.addStep(StringDownload(
760 name = "dlkeybuilducert",
761 s = "# fake certificate",
762 workerdest = "key-build.ucert",
763 mode = 0o600,
764 doStepIf = IsUsignEnabled,
765 ))
766
767 # prepare dl
768 factory.addStep(ShellCommand(
769 name = "dldir",
770 description = "Preparing dl/",
771 descriptionDone = "dl/ prepared",
772 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
773 workdir = Property("builddir"),
774 logEnviron = False,
775 want_stdout = False
776 ))
777
778 # prepare tar
779 factory.addStep(ShellCommand(
780 name = "dltar",
781 description = "Building and installing GNU tar",
782 descriptionDone = "GNU tar built and installed",
783 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
784 env = MakeEnv(tryccache=True),
785 haltOnFailure = True
786 ))
787
788 # populate dl
789 factory.addStep(ShellCommand(
790 name = "dlrun",
791 description = "Populating dl/",
792 descriptionDone = "dl/ populated",
793 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
794 env = MakeEnv(),
795 logEnviron = False,
796 locks = NetLockDl.withArgs(dlLock.access('exclusive')),
797 ))
798
799 factory.addStep(ShellCommand(
800 name = "cleanbase",
801 description = "Cleaning base-files",
802 command=["make", "package/base-files/clean", "V=s"]
803 ))
804
805 # build
806 factory.addStep(ShellCommand(
807 name = "tools",
808 description = "Building and installing tools",
809 descriptionDone = "Tools built and installed",
810 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
811 env = MakeEnv(tryccache=True),
812 haltOnFailure = True
813 ))
814
815 factory.addStep(ShellCommand(
816 name = "toolchain",
817 description = "Building and installing toolchain",
818 descriptionDone = "Toolchain built and installed",
819 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
820 env = MakeEnv(),
821 haltOnFailure = True
822 ))
823
824 factory.addStep(ShellCommand(
825 name = "kmods",
826 description = "Building kmods",
827 descriptionDone = "Kmods built",
828 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
829 env = MakeEnv(),
830 haltOnFailure = True
831 ))
832
833 # find kernel version
834 factory.addStep(SetPropertyFromCommand(
835 name = "kernelversion",
836 property = "kernelversion",
837 description = "Finding the effective Kernel version",
838 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
839 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
840 ))
841
842 factory.addStep(ShellCommand(
843 name = "pkgclean",
844 description = "Cleaning up package build",
845 descriptionDone = "Package build cleaned up",
846 command=["make", "package/cleanup", "V=s"]
847 ))
848
849 factory.addStep(ShellCommand(
850 name = "pkgbuild",
851 description = "Building packages",
852 descriptionDone = "Packages built",
853 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
854 env = MakeEnv(),
855 haltOnFailure = True
856 ))
857
858 factory.addStep(ShellCommand(
859 name = "pkginstall",
860 description = "Installing packages",
861 descriptionDone = "Packages installed",
862 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
863 env = MakeEnv(),
864 haltOnFailure = True
865 ))
866
867 factory.addStep(ShellCommand(
868 name = "pkgindex",
869 description = "Indexing packages",
870 descriptionDone = "Packages indexed",
871 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
872 env = MakeEnv(),
873 haltOnFailure = True
874 ))
875
876 factory.addStep(ShellCommand(
877 name = "images",
878 description = "Building and installing images",
879 descriptionDone = "Images built and installed",
880 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
881 env = MakeEnv(),
882 haltOnFailure = True
883 ))
884
885 factory.addStep(ShellCommand(
886 name = "buildinfo",
887 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
888 command = "make -j1 buildinfo V=s || true",
889 env = MakeEnv(),
890 haltOnFailure = True
891 ))
892
893 factory.addStep(ShellCommand(
894 name = "json_overview_image_info",
895 description = "Generating profiles.json in target folder",
896 command = "make -j1 json_overview_image_info V=s || true",
897 env = MakeEnv(),
898 haltOnFailure = True
899 ))
900
901 factory.addStep(ShellCommand(
902 name = "checksums",
903 description = "Calculating checksums",
904 descriptionDone = "Checksums calculated",
905 command=["make", "-j1", "checksum", "V=s"],
906 env = MakeEnv(),
907 haltOnFailure = True
908 ))
909
910 factory.addStep(ShellCommand(
911 name = "kmoddir",
912 descriptionDone = "Kmod directory created",
913 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
914 haltOnFailure = True,
915 doStepIf = IsKmodArchiveEnabled,
916 ))
917
918 factory.addStep(ShellCommand(
919 name = "kmodprepare",
920 description = "Preparing kmod archive",
921 descriptionDone = "Kmod archive prepared",
922 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
923 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
924 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
925 haltOnFailure = True,
926 doStepIf = IsKmodArchiveEnabled,
927 ))
928
929 factory.addStep(ShellCommand(
930 name = "kmodindex",
931 description = "Indexing kmod archive",
932 descriptionDone = "Kmod archive indexed",
933 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
934 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
935 env = MakeEnv(),
936 haltOnFailure = True,
937 doStepIf = IsKmodArchiveEnabled,
938 ))
939
940 # sign
941 factory.addStep(MasterShellCommand(
942 name = "signprepare",
943 descriptionDone = "Temporary signing directory prepared",
944 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
945 haltOnFailure = True,
946 doStepIf = IsSignEnabled,
947
948 ))
949
950 factory.addStep(ShellCommand(
951 name = "signpack",
952 description = "Packing files to sign",
953 descriptionDone = "Files to sign packed",
954 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
955 haltOnFailure = True,
956 doStepIf = IsSignEnabled,
957 ))
958
959 factory.addStep(FileUpload(
960 workersrc = "sign.tar.gz",
961 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
962 haltOnFailure = True,
963 doStepIf = IsSignEnabled,
964 ))
965
966 factory.addStep(MasterShellCommand(
967 name = "signfiles",
968 description = "Signing files",
969 descriptionDone = "Files signed",
970 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
971 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
972 haltOnFailure = True,
973 doStepIf = IsSignEnabled,
974 ))
975
976 factory.addStep(FileDownload(
977 name = "dlsigntargz",
978 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
979 workerdest = "sign.tar.gz",
980 haltOnFailure = True,
981 doStepIf = IsSignEnabled,
982 ))
983
984 factory.addStep(ShellCommand(
985 name = "signunpack",
986 description = "Unpacking signed files",
987 descriptionDone = "Signed files unpacked",
988 command = ["tar", "-xzf", "sign.tar.gz"],
989 haltOnFailure = True,
990 doStepIf = IsSignEnabled,
991 ))
992
993 # upload
994 factory.addStep(ShellCommand(
995 name = "dirprepare",
996 descriptionDone = "Upload directory structure prepared",
997 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
998 haltOnFailure = True
999 ))
1000
1001 factory.addStep(ShellCommand(
1002 name = "linkprepare",
1003 descriptionDone = "Repository symlink prepared",
1004 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1005 doStepIf = IsNoMasterBuild,
1006 haltOnFailure = True
1007 ))
1008
1009 factory.addStep(ShellCommand(
1010 name = "kmoddirprepare",
1011 descriptionDone = "Kmod archive upload directory prepared",
1012 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1013 haltOnFailure = True,
1014 doStepIf = IsKmodArchiveEnabled,
1015 ))
1016
1017 factory.addStep(ShellCommand(
1018 name = "dirupload",
1019 description = "Uploading directory structure",
1020 descriptionDone = "Directory structure uploaded",
1021 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1022 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1023 haltOnFailure = True,
1024 logEnviron = False,
1025 locks = NetLockUl,
1026 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1027 ))
1028
1029 # download remote sha256sums to 'target-sha256sums'
1030 factory.addStep(ShellCommand(
1031 name = "target-sha256sums",
1032 description = "Fetching remote sha256sums for target",
1033 descriptionDone = "Remote sha256sums for target fetched",
1034 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1035 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1036 logEnviron = False,
1037 haltOnFailure = False,
1038 flunkOnFailure = False,
1039 warnOnFailure = False,
1040 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1041 ))
1042
1043 # build list of files to upload
1044 factory.addStep(FileDownload(
1045 name = "dlsha2rsyncpl",
1046 mastersrc = scripts_dir + '/sha2rsync.pl',
1047 workerdest = "../sha2rsync.pl",
1048 mode = 0o755,
1049 ))
1050
1051 factory.addStep(ShellCommand(
1052 name = "buildlist",
1053 description = "Building list of files to upload",
1054 descriptionDone = "List of files to upload built",
1055 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1056 haltOnFailure = True,
1057 ))
1058
1059 factory.addStep(FileDownload(
1060 name = "dlrsync.sh",
1061 mastersrc = scripts_dir + '/rsync.sh',
1062 workerdest = "../rsync.sh",
1063 mode = 0o755
1064 ))
1065
1066 # upload new files and update existing ones
1067 factory.addStep(ShellCommand(
1068 name = "targetupload",
1069 description = "Uploading target files",
1070 descriptionDone = "Target files uploaded",
1071 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1072 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1073 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1074 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1075 haltOnFailure = True,
1076 logEnviron = False,
1077 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1078 ))
1079
1080 # delete files which don't exist locally
1081 factory.addStep(ShellCommand(
1082 name = "targetprune",
1083 description = "Pruning target files",
1084 descriptionDone = "Target files pruned",
1085 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1086 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1087 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1088 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1089 haltOnFailure = True,
1090 logEnviron = False,
1091 locks = NetLockUl,
1092 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1093 ))
1094
1095 factory.addStep(ShellCommand(
1096 name = "kmodupload",
1097 description = "Uploading kmod archive",
1098 descriptionDone = "Kmod archive uploaded",
1099 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1100 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1101 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1102 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1103 haltOnFailure = True,
1104 logEnviron = False,
1105 locks = NetLockUl,
1106 doStepIf = util.Transform(lambda a, b: bool(a and b), IsKmodArchiveEnabled, GetRsyncParams.withArgs("bin", "url")),
1107 ))
1108
1109 factory.addStep(ShellCommand(
1110 name = "sourcelist",
1111 description = "Finding source archives to upload",
1112 descriptionDone = "Source archives to upload found",
1113 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1114 haltOnFailure = True
1115 ))
1116
1117 factory.addStep(ShellCommand(
1118 name = "sourceupload",
1119 description = "Uploading source archives",
1120 descriptionDone = "Source archives uploaded",
1121 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1122 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1123 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1124 haltOnFailure = True,
1125 logEnviron = False,
1126 locks = NetLockUl,
1127 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1128 ))
1129
1130 factory.addStep(ShellCommand(
1131 name = "df",
1132 description = "Reporting disk usage",
1133 command=["df", "-h", "."],
1134 env={'LC_ALL': 'C'},
1135 haltOnFailure = False,
1136 flunkOnFailure = False,
1137 warnOnFailure = False,
1138 alwaysRun = True
1139 ))
1140
1141 factory.addStep(ShellCommand(
1142 name = "du",
1143 description = "Reporting estimated file space usage",
1144 command=["du", "-sh", "."],
1145 env={'LC_ALL': 'C'},
1146 haltOnFailure = False,
1147 flunkOnFailure = False,
1148 warnOnFailure = False,
1149 alwaysRun = True
1150 ))
1151
1152 factory.addStep(ShellCommand(
1153 name = "ccachestat",
1154 description = "Reporting ccache stats",
1155 command=["ccache", "-s"],
1156 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1157 want_stderr = False,
1158 haltOnFailure = False,
1159 flunkOnFailure = False,
1160 warnOnFailure = False,
1161 hideStepIf = lambda r, s: r==results.FAILURE,
1162 ))
1163
1164 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1165
1166 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1167 force_factory.addStep(steps.Trigger(
1168 name = "trigger_%s" % target,
1169 description = "Triggering %s build" % target,
1170 schedulerNames = [ "trigger_%s" % target ],
1171 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1172 doStepIf = IsTargetSelected(target)
1173 ))
1174
1175
1176 ####### STATUS TARGETS
1177
1178 # 'status' is a list of Status Targets. The results of each build will be
1179 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1180 # including web pages, email senders, and IRC bots.
1181
1182 if "status_bind" in inip1:
1183 c['www'] = {
1184 'port': inip1.get("status_bind"),
1185 'plugins': {
1186 'waterfall_view': True,
1187 'console_view': True,
1188 'grid_view': True
1189 }
1190 }
1191
1192 if "status_user" in inip1 and "status_password" in inip1:
1193 c['www']['auth'] = util.UserPasswordAuth([
1194 (inip1.get("status_user"), inip1.get("status_password"))
1195 ])
1196 c['www']['authz'] = util.Authz(
1197 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1198 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1199 )
1200
1201 c['services'] = []
1202 if ini.has_section("irc"):
1203 iniirc = ini['irc']
1204 irc_host = iniirc.get("host", None)
1205 irc_port = iniirc.getint("port", 6667)
1206 irc_chan = iniirc.get("channel", None)
1207 irc_nick = iniirc.get("nickname", None)
1208 irc_pass = iniirc.get("password", None)
1209
1210 if irc_host and irc_nick and irc_chan:
1211 irc = reporters.IRC(irc_host, irc_nick,
1212 port = irc_port,
1213 password = irc_pass,
1214 channels = [ irc_chan ],
1215 notify_events = [ 'exception', 'problem', 'recovery' ]
1216 )
1217
1218 c['services'].append(irc)
1219
1220 c['revlink'] = util.RevlinkMatch([
1221 r'https://git.openwrt.org/openwrt/(.*).git'
1222 ],
1223 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1224
1225 ####### DB URL
1226
1227 c['db'] = {
1228 # This specifies what database buildbot uses to store its state. You can leave
1229 # this at its default for all but the largest installations.
1230 'db_url' : "sqlite:///state.sqlite",
1231 }
1232
1233 c['buildbotNetUsageData'] = None