phase1: fix option parsing typo for 'ul_lock'
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
62
63 ####### PROJECT IDENTITY
64
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
68
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
71
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
76 # without some help.
77
78 c['buildbotURL'] = inip1.get("buildbot_url")
79
80 ####### BUILDWORKERS
81
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 if ini.has_option(section, "dl_lock"):
97 lockname = ini.get(section, "dl_lock")
98 sl_props['dl_lock'] = lockname
99 if lockname not in NetLocks:
100 NetLocks[lockname] = locks.MasterLock(lockname)
101 if ini.has_option(section, "ul_lock"):
102 lockname = ini.get(section, "ul_lock")
103 sl_props['ul_lock'] = lockname
104 if lockname not in NetLocks:
105 NetLocks[lockname] = locks.MasterLock(lockname)
106 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
107
108 # PB port can be either a numeric port or a connection string
109 pb_port = inip1.get("port") or 9989
110 c['protocols'] = {'pb': {'port': pb_port}}
111
112 # coalesce builds
113 c['collapseRequests'] = True
114
115 # Reduce amount of backlog data
116 c['configurators'] = [util.JanitorConfigurator(
117 logHorizon=timedelta(days=3),
118 hour=6,
119 )]
120
121 @defer.inlineCallbacks
122 def getNewestCompleteTime(bldr):
123 """Returns the complete_at of the latest completed and not SKIPPED
124 build request for this builder, or None if there are no such build
125 requests. We need to filter out SKIPPED requests because we're
126 using collapseRequests=True which is unfortunately marking all
127 previous requests as complete when new buildset is created.
128
129 @returns: datetime instance or None, via Deferred
130 """
131
132 bldrid = yield bldr.getBuilderId()
133 completed = yield bldr.master.data.get(
134 ('builders', bldrid, 'buildrequests'),
135 [
136 resultspec.Filter('complete', 'eq', [True]),
137 resultspec.Filter('results', 'ne', [results.SKIPPED]),
138 ],
139 order=['-complete_at'], limit=1)
140 if not completed:
141 return
142
143 complete_at = completed[0]['complete_at']
144
145 last_build = yield bldr.master.data.get(
146 ('builds', ),
147 [
148 resultspec.Filter('builderid', 'eq', [bldrid]),
149 ],
150 order=['-started_at'], limit=1)
151
152 if last_build and last_build[0]:
153 last_complete_at = last_build[0]['complete_at']
154 if last_complete_at and (last_complete_at > complete_at):
155 return last_complete_at
156
157 return complete_at
158
159 @defer.inlineCallbacks
160 def prioritizeBuilders(master, builders):
161 """Returns sorted list of builders by their last timestamp of completed and
162 not skipped build.
163
164 @returns: list of sorted builders
165 """
166
167 def is_building(bldr):
168 return bool(bldr.building) or bool(bldr.old_building)
169
170 def bldr_info(bldr):
171 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
172 d.addCallback(lambda complete_at: (complete_at, bldr))
173 return d
174
175 def bldr_sort(item):
176 (complete_at, bldr) = item
177
178 if not complete_at:
179 date = datetime.min
180 complete_at = date.replace(tzinfo=tzutc())
181
182 if is_building(bldr):
183 date = datetime.max
184 complete_at = date.replace(tzinfo=tzutc())
185
186 return (complete_at, bldr.name)
187
188 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
189 results.sort(key=bldr_sort)
190
191 for r in results:
192 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
193
194 return [r[1] for r in results]
195
196 c['prioritizeBuilders'] = prioritizeBuilders
197
198 ####### CHANGESOURCES
199
200 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
201 scripts_dir = os.path.abspath("../scripts")
202
203 config_seed = inip1.get("config_seed", "")
204
205 repo_url = ini['repo'].get("url")
206 repo_branch = ini['repo'].get("branch", "master")
207
208 rsync_bin_url = ini['rsync'].get("binary_url")
209 rsync_bin_key = ini['rsync'].get("binary_password")
210 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
211
212 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
213 rsync_bin_defopts += ["--contimeout=20"]
214
215 rsync_src_url = ini['rsync'].get("source_url")
216 rsync_src_key = ini['rsync'].get("source_password")
217 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
218
219 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
220 rsync_src_defopts += ["--contimeout=20"]
221
222 usign_key = None
223 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
224
225 if ini.has_section("usign"):
226 usign_key = ini['usign'].get("key")
227 usign_comment = ini['usign'].get("comment", usign_comment)
228
229 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
230
231
232 # find targets
233 targets = [ ]
234
235 if not os.path.isdir(work_dir+'/source.git'):
236 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
237 else:
238 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
239
240 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
241 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
242 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
243
244 while True:
245 line = findtargets.stdout.readline()
246 if not line:
247 break
248 ta = line.decode().strip().split(' ')
249 targets.append(ta[0])
250
251
252 # the 'change_source' setting tells the buildmaster how it should find out
253 # about source code changes. Here we point to the buildbot clone of pyflakes.
254
255 c['change_source'] = []
256 c['change_source'].append(GitPoller(
257 repo_url,
258 workdir=work_dir+'/work.git', branch=repo_branch,
259 pollinterval=300))
260
261 ####### SCHEDULERS
262
263 # Configure the Schedulers, which decide how to react to incoming changes. In this
264 # case, just kick off a 'basebuild' build
265
266 class TagChoiceParameter(BaseParameter):
267 spec_attributes = ["strict", "choices"]
268 type = "list"
269 strict = True
270
271 def __init__(self, name, label=None, **kw):
272 super().__init__(name, label, **kw)
273 self._choice_list = []
274
275 @property
276 def choices(self):
277 taglist = []
278 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
279
280 if basever:
281 findtags = subprocess.Popen(
282 ['git', 'ls-remote', '--tags', repo_url],
283 stdout = subprocess.PIPE)
284
285 while True:
286 line = findtags.stdout.readline()
287
288 if not line:
289 break
290
291 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
292
293 if tagver and tagver[1].find(basever[1]) == 0:
294 taglist.append(tagver[1])
295
296 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
297 taglist.insert(0, '')
298
299 self._choice_list = taglist
300
301 return self._choice_list
302
303 def parse_from_arg(self, s):
304 if self.strict and s not in self._choice_list:
305 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
306 return s
307
308 c['schedulers'] = []
309 c['schedulers'].append(SingleBranchScheduler(
310 name = "all",
311 change_filter = filter.ChangeFilter(branch=repo_branch),
312 treeStableTimer = 60,
313 builderNames = targets))
314
315 c['schedulers'].append(ForceScheduler(
316 name = "force",
317 buttonName = "Force builds",
318 label = "Force build details",
319 builderNames = [ "00_force_build" ],
320
321 codebases = [
322 util.CodebaseParameter(
323 "",
324 label = "Repository",
325 branch = util.FixedParameter(name = "branch", default = ""),
326 revision = util.FixedParameter(name = "revision", default = ""),
327 repository = util.FixedParameter(name = "repository", default = ""),
328 project = util.FixedParameter(name = "project", default = "")
329 )
330 ],
331
332 reason = util.StringParameter(
333 name = "reason",
334 label = "Reason",
335 default = "Trigger build",
336 required = True,
337 size = 80
338 ),
339
340 properties = [
341 util.NestedParameter(
342 name="options",
343 label="Build Options",
344 layout="vertical",
345 fields=[
346 util.ChoiceStringParameter(
347 name = "target",
348 label = "Build target",
349 default = "all",
350 choices = [ "all" ] + targets
351 ),
352 TagChoiceParameter(
353 name = "tag",
354 label = "Build tag",
355 default = ""
356 )
357 ]
358 )
359 ]
360 ))
361
362 ####### BUILDERS
363
364 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
365 # what steps, and which workers can execute them. Note that any particular build will
366 # only take place on one worker.
367
368 def IsTaggingRequested(step):
369 val = step.getProperty("tag")
370 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
371 return True
372 else:
373 return False
374
375 def IsNoMasterBuild(step):
376 return repo_branch != "master"
377
378 def GetBaseVersion():
379 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
380 return repo_branch.split('-')[1]
381 else:
382 return "master"
383
384 @properties.renderer
385 def GetVersionPrefix(props):
386 basever = GetBaseVersion()
387 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
388 return "%s/" % props["tag"]
389 elif basever != "master":
390 return "%s-SNAPSHOT/" % basever
391 else:
392 return ""
393
394 def GetNextBuild(builder, requests):
395 for r in requests:
396 if r.properties and r.properties.hasProperty("tag"):
397 return r
398
399 r = requests[0]
400 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
401 return r
402
403 def MakeEnv(overrides=None, tryccache=False):
404 env = {
405 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
406 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
407 }
408 if tryccache:
409 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
410 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
411 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
412 else:
413 env['CC'] = env['CCC']
414 env['CXX'] = env['CCXX']
415 env['CCACHE'] = ''
416 if overrides is not None:
417 env.update(overrides)
418 return env
419
420 @properties.renderer
421 def NetLockDl(props):
422 lock = None
423 if props.hasProperty("dl_lock"):
424 lock = NetLocks[props["dl_lock"]]
425 if lock is not None:
426 return [lock.access('exclusive')]
427 else:
428 return []
429
430 @properties.renderer
431 def NetLockUl(props):
432 lock = None
433 if props.hasProperty("ul_lock"):
434 lock = NetLocks[props["ul_lock"]]
435 if lock is not None:
436 return [lock.access('exclusive')]
437 else:
438 return []
439
440 @util.renderer
441 def TagPropertyValue(props):
442 if props.hasProperty("options"):
443 options = props.getProperty("options")
444 if type(options) is dict:
445 return options.get("tag")
446 return None
447
448 def IsTargetSelected(target):
449 def CheckTargetProperty(step):
450 try:
451 options = step.getProperty("options")
452 if type(options) is dict:
453 selected_target = options.get("target", "all")
454 if selected_target != "all" and selected_target != target:
455 return False
456 except KeyError:
457 pass
458
459 return True
460
461 return CheckTargetProperty
462
463 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
464 try:
465 seckey = base64.b64decode(seckey)
466 except:
467 return None
468
469 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
470 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
471
472
473 c['builders'] = []
474
475 dlLock = locks.WorkerLock("worker_dl")
476
477 workerNames = [ ]
478
479 for worker in c['workers']:
480 workerNames.append(worker.workername)
481
482 force_factory = BuildFactory()
483
484 c['builders'].append(BuilderConfig(
485 name = "00_force_build",
486 workernames = workerNames,
487 factory = force_factory))
488
489 for target in targets:
490 ts = target.split('/')
491
492 factory = BuildFactory()
493
494 # setup shared work directory if required
495 factory.addStep(ShellCommand(
496 name = "sharedwd",
497 description = "Setting up shared work directory",
498 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
499 workdir = ".",
500 haltOnFailure = True))
501
502 # find number of cores
503 factory.addStep(SetPropertyFromCommand(
504 name = "nproc",
505 property = "nproc",
506 description = "Finding number of CPUs",
507 command = ["nproc"]))
508
509 # find gcc and g++ compilers
510 factory.addStep(FileDownload(
511 name = "dlfindbinpl",
512 mastersrc = scripts_dir + '/findbin.pl',
513 workerdest = "../findbin.pl",
514 mode = 0o755))
515
516 factory.addStep(SetPropertyFromCommand(
517 name = "gcc",
518 property = "cc_command",
519 description = "Finding gcc command",
520 command = [
521 "../findbin.pl", "gcc", "", "",
522 ],
523 haltOnFailure = True))
524
525 factory.addStep(SetPropertyFromCommand(
526 name = "g++",
527 property = "cxx_command",
528 description = "Finding g++ command",
529 command = [
530 "../findbin.pl", "g++", "", "",
531 ],
532 haltOnFailure = True))
533
534 # see if ccache is available
535 factory.addStep(SetPropertyFromCommand(
536 property = "ccache_command",
537 command = ["which", "ccache"],
538 description = "Testing for ccache command",
539 haltOnFailure = False,
540 flunkOnFailure = False,
541 warnOnFailure = False,
542 ))
543
544 # Workaround bug when switching from a checked out tag back to a branch
545 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
546 factory.addStep(ShellCommand(
547 name = "gitcheckout",
548 description = "Ensure that Git HEAD is sane",
549 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
550 haltOnFailure = True))
551
552 # check out the source
553 # Git() runs:
554 # if repo doesn't exist: 'git clone repourl'
555 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
556 # 'git fetch -t repourl branch; git reset --hard revision'
557 factory.addStep(Git(
558 name = "git",
559 repourl = repo_url,
560 branch = repo_branch,
561 mode = 'full',
562 method = 'fresh',
563 locks = NetLockDl,
564 haltOnFailure = True,
565 ))
566
567 # update remote refs
568 factory.addStep(ShellCommand(
569 name = "fetchrefs",
570 description = "Fetching Git remote refs",
571 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
572 haltOnFailure = True
573 ))
574
575 # switch to tag
576 factory.addStep(ShellCommand(
577 name = "switchtag",
578 description = "Checking out Git tag",
579 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
580 haltOnFailure = True,
581 doStepIf = IsTaggingRequested
582 ))
583
584 # Verify that Git HEAD points to a tag or branch
585 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
586 factory.addStep(ShellCommand(
587 name = "gitverify",
588 description = "Ensure that Git HEAD is pointing to a branch or tag",
589 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
590 haltOnFailure = True))
591
592 factory.addStep(ShellCommand(
593 name = "rmtmp",
594 description = "Remove tmp folder",
595 command=["rm", "-rf", "tmp/"]))
596
597 # feed
598 factory.addStep(ShellCommand(
599 name = "rmfeedlinks",
600 description = "Remove feed symlinks",
601 command=["rm", "-rf", "package/feeds/"]))
602
603 factory.addStep(StringDownload(
604 name = "ccachecc",
605 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
606 workerdest = "../ccache_cc.sh",
607 mode = 0o755,
608 ))
609
610 factory.addStep(StringDownload(
611 name = "ccachecxx",
612 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
613 workerdest = "../ccache_cxx.sh",
614 mode = 0o755,
615 ))
616
617 # feed
618 factory.addStep(ShellCommand(
619 name = "updatefeeds",
620 description = "Updating feeds",
621 command=["./scripts/feeds", "update"],
622 env = MakeEnv(tryccache=True),
623 haltOnFailure = True,
624 locks = NetLockDl,
625 ))
626
627 # feed
628 factory.addStep(ShellCommand(
629 name = "installfeeds",
630 description = "Installing feeds",
631 command=["./scripts/feeds", "install", "-a"],
632 env = MakeEnv(tryccache=True),
633 haltOnFailure = True
634 ))
635
636 # seed config
637 if config_seed is not None:
638 factory.addStep(StringDownload(
639 name = "dlconfigseed",
640 s = config_seed + '\n',
641 workerdest = ".config",
642 mode = 0o644
643 ))
644
645 # configure
646 factory.addStep(ShellCommand(
647 name = "newconfig",
648 description = "Seeding .config",
649 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
650 ))
651
652 factory.addStep(ShellCommand(
653 name = "delbin",
654 description = "Removing output directory",
655 command = ["rm", "-rf", "bin/"]
656 ))
657
658 factory.addStep(ShellCommand(
659 name = "defconfig",
660 description = "Populating .config",
661 command = ["make", "defconfig"],
662 env = MakeEnv()
663 ))
664
665 # check arch
666 factory.addStep(ShellCommand(
667 name = "checkarch",
668 description = "Checking architecture",
669 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
670 logEnviron = False,
671 want_stdout = False,
672 want_stderr = False,
673 haltOnFailure = True
674 ))
675
676 # find libc suffix
677 factory.addStep(SetPropertyFromCommand(
678 name = "libc",
679 property = "libc",
680 description = "Finding libc suffix",
681 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
682
683 # install build key
684 if usign_key is not None:
685 factory.addStep(StringDownload(
686 name = "dlkeybuildpub",
687 s = UsignSec2Pub(usign_key, usign_comment),
688 workerdest = "key-build.pub",
689 mode = 0o600,
690 ))
691
692 factory.addStep(StringDownload(
693 name = "dlkeybuild",
694 s = "# fake private key",
695 workerdest = "key-build",
696 mode = 0o600,
697 ))
698
699 factory.addStep(StringDownload(
700 name = "dlkeybuilducert",
701 s = "# fake certificate",
702 workerdest = "key-build.ucert",
703 mode = 0o600,
704 ))
705
706 # prepare dl
707 factory.addStep(ShellCommand(
708 name = "dldir",
709 description = "Preparing dl/",
710 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
711 logEnviron = False,
712 want_stdout = False
713 ))
714
715 # prepare tar
716 factory.addStep(ShellCommand(
717 name = "dltar",
718 description = "Building and installing GNU tar",
719 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
720 env = MakeEnv(tryccache=True),
721 haltOnFailure = True
722 ))
723
724 # populate dl
725 factory.addStep(ShellCommand(
726 name = "dlrun",
727 description = "Populating dl/",
728 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
729 env = MakeEnv(),
730 logEnviron = False,
731 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
732 ))
733
734 factory.addStep(ShellCommand(
735 name = "cleanbase",
736 description = "Cleaning base-files",
737 command=["make", "package/base-files/clean", "V=s"]
738 ))
739
740 # build
741 factory.addStep(ShellCommand(
742 name = "tools",
743 description = "Building and installing tools",
744 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
745 env = MakeEnv(tryccache=True),
746 haltOnFailure = True
747 ))
748
749 factory.addStep(ShellCommand(
750 name = "toolchain",
751 description = "Building and installing toolchain",
752 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
753 env = MakeEnv(),
754 haltOnFailure = True
755 ))
756
757 factory.addStep(ShellCommand(
758 name = "kmods",
759 description = "Building kmods",
760 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
761 env = MakeEnv(),
762 haltOnFailure = True
763 ))
764
765 # find kernel version
766 factory.addStep(SetPropertyFromCommand(
767 name = "kernelversion",
768 property = "kernelversion",
769 description = "Finding the effective Kernel version",
770 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
771 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
772 ))
773
774 factory.addStep(ShellCommand(
775 name = "pkgclean",
776 description = "Cleaning up package build",
777 command=["make", "package/cleanup", "V=s"]
778 ))
779
780 factory.addStep(ShellCommand(
781 name = "pkgbuild",
782 description = "Building packages",
783 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
784 env = MakeEnv(),
785 haltOnFailure = True
786 ))
787
788 factory.addStep(ShellCommand(
789 name = "pkginstall",
790 description = "Installing packages",
791 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
792 env = MakeEnv(),
793 haltOnFailure = True
794 ))
795
796 factory.addStep(ShellCommand(
797 name = "pkgindex",
798 description = "Indexing packages",
799 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
800 env = MakeEnv(),
801 haltOnFailure = True
802 ))
803
804 factory.addStep(ShellCommand(
805 name = "images",
806 description = "Building and installing images",
807 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
808 env = MakeEnv(),
809 haltOnFailure = True
810 ))
811
812 factory.addStep(ShellCommand(
813 name = "buildinfo",
814 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
815 command = "make -j1 buildinfo V=s || true",
816 env = MakeEnv(),
817 haltOnFailure = True
818 ))
819
820 factory.addStep(ShellCommand(
821 name = "json_overview_image_info",
822 description = "Generate profiles.json in target folder",
823 command = "make -j1 json_overview_image_info V=s || true",
824 env = MakeEnv(),
825 haltOnFailure = True
826 ))
827
828 factory.addStep(ShellCommand(
829 name = "checksums",
830 description = "Calculating checksums",
831 command=["make", "-j1", "checksum", "V=s"],
832 env = MakeEnv(),
833 haltOnFailure = True
834 ))
835
836 if enable_kmod_archive:
837 factory.addStep(ShellCommand(
838 name = "kmoddir",
839 description = "Creating kmod directory",
840 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
841 haltOnFailure = True
842 ))
843
844 factory.addStep(ShellCommand(
845 name = "kmodprepare",
846 description = "Preparing kmod archive",
847 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
848 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
849 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
850 haltOnFailure = True
851 ))
852
853 factory.addStep(ShellCommand(
854 name = "kmodindex",
855 description = "Indexing kmod archive",
856 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
857 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
858 env = MakeEnv(),
859 haltOnFailure = True
860 ))
861
862 # sign
863 if ini.has_option("gpg", "key") or usign_key is not None:
864 factory.addStep(MasterShellCommand(
865 name = "signprepare",
866 description = "Preparing temporary signing directory",
867 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
868 haltOnFailure = True
869 ))
870
871 factory.addStep(ShellCommand(
872 name = "signpack",
873 description = "Packing files to sign",
874 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
875 haltOnFailure = True
876 ))
877
878 factory.addStep(FileUpload(
879 workersrc = "sign.tar.gz",
880 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
881 haltOnFailure = True
882 ))
883
884 factory.addStep(MasterShellCommand(
885 name = "signfiles",
886 description = "Signing files",
887 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
888 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
889 haltOnFailure = True
890 ))
891
892 factory.addStep(FileDownload(
893 name = "dlsigntargz",
894 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
895 workerdest = "sign.tar.gz",
896 haltOnFailure = True
897 ))
898
899 factory.addStep(ShellCommand(
900 name = "signunpack",
901 description = "Unpacking signed files",
902 command = ["tar", "-xzf", "sign.tar.gz"],
903 haltOnFailure = True
904 ))
905
906 # upload
907 factory.addStep(ShellCommand(
908 name = "dirprepare",
909 description = "Preparing upload directory structure",
910 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
911 haltOnFailure = True
912 ))
913
914 factory.addStep(ShellCommand(
915 name = "linkprepare",
916 description = "Preparing repository symlink",
917 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
918 doStepIf = IsNoMasterBuild,
919 haltOnFailure = True
920 ))
921
922 if enable_kmod_archive:
923 factory.addStep(ShellCommand(
924 name = "kmoddirprepare",
925 description = "Preparing kmod archive upload directory",
926 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
927 haltOnFailure = True
928 ))
929
930 factory.addStep(ShellCommand(
931 name = "dirupload",
932 description = "Uploading directory structure",
933 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
934 env={'RSYNC_PASSWORD': rsync_bin_key},
935 haltOnFailure = True,
936 logEnviron = False,
937 locks = NetLockUl,
938 ))
939
940 # download remote sha256sums to 'target-sha256sums'
941 factory.addStep(ShellCommand(
942 name = "target-sha256sums",
943 description = "Fetching remote sha256sums for target",
944 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
945 env={'RSYNC_PASSWORD': rsync_bin_key},
946 logEnviron = False,
947 haltOnFailure = False,
948 flunkOnFailure = False,
949 warnOnFailure = False,
950 ))
951
952 # build list of files to upload
953 factory.addStep(FileDownload(
954 name = "dlsha2rsyncpl",
955 mastersrc = scripts_dir + '/sha2rsync.pl',
956 workerdest = "../sha2rsync.pl",
957 mode = 0o755,
958 ))
959
960 factory.addStep(ShellCommand(
961 name = "buildlist",
962 description = "Building list of files to upload",
963 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
964 haltOnFailure = True,
965 ))
966
967 factory.addStep(FileDownload(
968 name = "dlrsync.sh",
969 mastersrc = scripts_dir + '/rsync.sh',
970 workerdest = "../rsync.sh",
971 mode = 0o755
972 ))
973
974 # upload new files and update existing ones
975 factory.addStep(ShellCommand(
976 name = "targetupload",
977 description = "Uploading target files",
978 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
979 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
980 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
981 env={'RSYNC_PASSWORD': rsync_bin_key},
982 haltOnFailure = True,
983 logEnviron = False,
984 ))
985
986 # delete files which don't exist locally
987 factory.addStep(ShellCommand(
988 name = "targetprune",
989 description = "Pruning target files",
990 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
991 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
992 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
993 env={'RSYNC_PASSWORD': rsync_bin_key},
994 haltOnFailure = True,
995 logEnviron = False,
996 locks = NetLockUl,
997 ))
998
999 if enable_kmod_archive:
1000 factory.addStep(ShellCommand(
1001 name = "kmodupload",
1002 description = "Uploading kmod archive",
1003 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1004 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1005 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1006 env={'RSYNC_PASSWORD': rsync_bin_key},
1007 haltOnFailure = True,
1008 logEnviron = False,
1009 locks = NetLockUl,
1010 ))
1011
1012 if rsync_src_url is not None:
1013 factory.addStep(ShellCommand(
1014 name = "sourcelist",
1015 description = "Finding source archives to upload",
1016 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1017 haltOnFailure = True
1018 ))
1019
1020 factory.addStep(ShellCommand(
1021 name = "sourceupload",
1022 description = "Uploading source archives",
1023 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1024 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1025 env={'RSYNC_PASSWORD': rsync_src_key},
1026 haltOnFailure = True,
1027 logEnviron = False,
1028 locks = NetLockUl,
1029 ))
1030
1031 factory.addStep(ShellCommand(
1032 name = "df",
1033 description = "Reporting disk usage",
1034 command=["df", "-h", "."],
1035 env={'LC_ALL': 'C'},
1036 haltOnFailure = False,
1037 flunkOnFailure = False,
1038 warnOnFailure = False,
1039 alwaysRun = True
1040 ))
1041
1042 factory.addStep(ShellCommand(
1043 name = "du",
1044 description = "Reporting estimated file space usage",
1045 command=["du", "-sh", "."],
1046 env={'LC_ALL': 'C'},
1047 haltOnFailure = False,
1048 flunkOnFailure = False,
1049 warnOnFailure = False,
1050 alwaysRun = True
1051 ))
1052
1053 factory.addStep(ShellCommand(
1054 name = "ccachestat",
1055 description = "Reporting ccache stats",
1056 command=["ccache", "-s"],
1057 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1058 want_stderr = False,
1059 haltOnFailure = False,
1060 flunkOnFailure = False,
1061 warnOnFailure = False,
1062 alwaysRun = True,
1063 ))
1064
1065 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1066
1067 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1068 force_factory.addStep(steps.Trigger(
1069 name = "trigger_%s" % target,
1070 description = "Triggering %s build" % target,
1071 schedulerNames = [ "trigger_%s" % target ],
1072 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1073 doStepIf = IsTargetSelected(target)
1074 ))
1075
1076
1077 ####### STATUS TARGETS
1078
1079 # 'status' is a list of Status Targets. The results of each build will be
1080 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1081 # including web pages, email senders, and IRC bots.
1082
1083 if "status_bind" in inip1:
1084 c['www'] = {
1085 'port': inip1.get("status_bind"),
1086 'plugins': {
1087 'waterfall_view': True,
1088 'console_view': True,
1089 'grid_view': True
1090 }
1091 }
1092
1093 if "status_user" in inip1 and "status_password" in inip1:
1094 c['www']['auth'] = util.UserPasswordAuth([
1095 (inip1.get("status_user"), inip1.get("status_password"))
1096 ])
1097 c['www']['authz'] = util.Authz(
1098 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1099 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1100 )
1101
1102 c['services'] = []
1103 if ini.has_section("irc"):
1104 iniirc = ini['irc']
1105 irc_host = iniirc.get("host", None)
1106 irc_port = iniirc.getint("port", 6667)
1107 irc_chan = iniirc.get("channel", None)
1108 irc_nick = iniirc.get("nickname", None)
1109 irc_pass = iniirc.get("password", None)
1110
1111 if irc_host and irc_nick and irc_chan:
1112 irc = reporters.IRC(irc_host, irc_nick,
1113 port = irc_port,
1114 password = irc_pass,
1115 channels = [ irc_chan ],
1116 notify_events = [ 'exception', 'problem', 'recovery' ]
1117 )
1118
1119 c['services'].append(irc)
1120
1121 c['revlink'] = util.RevlinkMatch([
1122 r'https://git.openwrt.org/openwrt/(.*).git'
1123 ],
1124 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1125
1126 ####### DB URL
1127
1128 c['db'] = {
1129 # This specifies what database buildbot uses to store its state. You can leave
1130 # this at its default for all but the largest installations.
1131 'db_url' : "sqlite:///state.sqlite",
1132 }
1133
1134 c['buildbotNetUsageData'] = None