phase1: reorder/regroup globals
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 config_seed = inip1.get("config_seed", "")
64
65 repo_url = ini['repo'].get("url")
66 repo_branch = ini['repo'].get("branch", "master")
67
68 rsync_bin_url = ini['rsync'].get("binary_url")
69 rsync_bin_key = ini['rsync'].get("binary_password")
70 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
71
72 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
73 rsync_bin_defopts += ["--contimeout=20"]
74
75 rsync_src_url = ini['rsync'].get("source_url")
76 rsync_src_key = ini['rsync'].get("source_password")
77 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
78
79 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
80 rsync_src_defopts += ["--contimeout=20"]
81
82 usign_key = None
83 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
84
85 if ini.has_section("usign"):
86 usign_key = ini['usign'].get("key")
87 usign_comment = ini['usign'].get("comment", usign_comment)
88
89 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
90
91 # PB port can be either a numeric port or a connection string
92 pb_port = inip1.get("port") or 9989
93
94 # This is the dictionary that the buildmaster pays attention to. We also use
95 # a shorter alias to save typing.
96 c = BuildmasterConfig = {}
97
98 ####### PROJECT IDENTITY
99
100 # the 'title' string will appear at the top of this buildbot
101 # installation's html.WebStatus home page (linked to the
102 # 'titleURL') and is embedded in the title of the waterfall HTML page.
103
104 c['title'] = ini['general'].get("title")
105 c['titleURL'] = ini['general'].get("title_url")
106
107 # the 'buildbotURL' string should point to the location where the buildbot's
108 # internal web server (usually the html.WebStatus page) is visible. This
109 # typically uses the port number set in the Waterfall 'status' entry, but
110 # with an externally-visible host name which the buildbot cannot figure out
111 # without some help.
112
113 c['buildbotURL'] = inip1.get("buildbot_url")
114
115 ####### BUILDWORKERS
116
117 # The 'workers' list defines the set of recognized buildworkers. Each element is
118 # a Worker object, specifying a unique worker name and password. The same
119 # worker name and password must be configured on the worker.
120
121 c['workers'] = []
122 NetLocks = dict()
123
124 for section in ini.sections():
125 if section.startswith("worker "):
126 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
127 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
128 sl_props = { 'dl_lock':None, 'ul_lock':None }
129 name = ini.get(section, "name")
130 password = ini.get(section, "password")
131 if ini.has_option(section, "dl_lock"):
132 lockname = ini.get(section, "dl_lock")
133 sl_props['dl_lock'] = lockname
134 if lockname not in NetLocks:
135 NetLocks[lockname] = locks.MasterLock(lockname)
136 if ini.has_option(section, "ul_lock"):
137 lockname = ini.get(section, "ul_lock")
138 sl_props['ul_lock'] = lockname
139 if lockname not in NetLocks:
140 NetLocks[lockname] = locks.MasterLock(lockname)
141 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
142
143 c['protocols'] = {'pb': {'port': pb_port}}
144
145 # coalesce builds
146 c['collapseRequests'] = True
147
148 # Reduce amount of backlog data
149 c['configurators'] = [util.JanitorConfigurator(
150 logHorizon=timedelta(days=3),
151 hour=6,
152 )]
153
154 @defer.inlineCallbacks
155 def getNewestCompleteTime(bldr):
156 """Returns the complete_at of the latest completed and not SKIPPED
157 build request for this builder, or None if there are no such build
158 requests. We need to filter out SKIPPED requests because we're
159 using collapseRequests=True which is unfortunately marking all
160 previous requests as complete when new buildset is created.
161
162 @returns: datetime instance or None, via Deferred
163 """
164
165 bldrid = yield bldr.getBuilderId()
166 completed = yield bldr.master.data.get(
167 ('builders', bldrid, 'buildrequests'),
168 [
169 resultspec.Filter('complete', 'eq', [True]),
170 resultspec.Filter('results', 'ne', [results.SKIPPED]),
171 ],
172 order=['-complete_at'], limit=1)
173 if not completed:
174 return
175
176 complete_at = completed[0]['complete_at']
177
178 last_build = yield bldr.master.data.get(
179 ('builds', ),
180 [
181 resultspec.Filter('builderid', 'eq', [bldrid]),
182 ],
183 order=['-started_at'], limit=1)
184
185 if last_build and last_build[0]:
186 last_complete_at = last_build[0]['complete_at']
187 if last_complete_at and (last_complete_at > complete_at):
188 return last_complete_at
189
190 return complete_at
191
192 @defer.inlineCallbacks
193 def prioritizeBuilders(master, builders):
194 """Returns sorted list of builders by their last timestamp of completed and
195 not skipped build.
196
197 @returns: list of sorted builders
198 """
199
200 def is_building(bldr):
201 return bool(bldr.building) or bool(bldr.old_building)
202
203 def bldr_info(bldr):
204 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
205 d.addCallback(lambda complete_at: (complete_at, bldr))
206 return d
207
208 def bldr_sort(item):
209 (complete_at, bldr) = item
210
211 if not complete_at:
212 date = datetime.min
213 complete_at = date.replace(tzinfo=tzutc())
214
215 if is_building(bldr):
216 date = datetime.max
217 complete_at = date.replace(tzinfo=tzutc())
218
219 return (complete_at, bldr.name)
220
221 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
222 results.sort(key=bldr_sort)
223
224 for r in results:
225 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
226
227 return [r[1] for r in results]
228
229 c['prioritizeBuilders'] = prioritizeBuilders
230
231 ####### CHANGESOURCES
232
233
234 # find targets
235 targets = [ ]
236
237 if not os.path.isdir(work_dir+'/source.git'):
238 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
239 else:
240 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
241
242 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
243 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
244 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
245
246 while True:
247 line = findtargets.stdout.readline()
248 if not line:
249 break
250 ta = line.decode().strip().split(' ')
251 targets.append(ta[0])
252
253
254 # the 'change_source' setting tells the buildmaster how it should find out
255 # about source code changes. Here we point to the buildbot clone of pyflakes.
256
257 c['change_source'] = []
258 c['change_source'].append(GitPoller(
259 repo_url,
260 workdir=work_dir+'/work.git', branch=repo_branch,
261 pollinterval=300))
262
263 ####### SCHEDULERS
264
265 # Configure the Schedulers, which decide how to react to incoming changes. In this
266 # case, just kick off a 'basebuild' build
267
268 class TagChoiceParameter(BaseParameter):
269 spec_attributes = ["strict", "choices"]
270 type = "list"
271 strict = True
272
273 def __init__(self, name, label=None, **kw):
274 super().__init__(name, label, **kw)
275 self._choice_list = []
276
277 @property
278 def choices(self):
279 taglist = []
280 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
281
282 if basever:
283 findtags = subprocess.Popen(
284 ['git', 'ls-remote', '--tags', repo_url],
285 stdout = subprocess.PIPE)
286
287 while True:
288 line = findtags.stdout.readline()
289
290 if not line:
291 break
292
293 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
294
295 if tagver and tagver[1].find(basever[1]) == 0:
296 taglist.append(tagver[1])
297
298 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
299 taglist.insert(0, '')
300
301 self._choice_list = taglist
302
303 return self._choice_list
304
305 def parse_from_arg(self, s):
306 if self.strict and s not in self._choice_list:
307 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
308 return s
309
310 c['schedulers'] = []
311 c['schedulers'].append(SingleBranchScheduler(
312 name = "all",
313 change_filter = filter.ChangeFilter(branch=repo_branch),
314 treeStableTimer = 60,
315 builderNames = targets))
316
317 c['schedulers'].append(ForceScheduler(
318 name = "force",
319 buttonName = "Force builds",
320 label = "Force build details",
321 builderNames = [ "00_force_build" ],
322
323 codebases = [
324 util.CodebaseParameter(
325 "",
326 label = "Repository",
327 branch = util.FixedParameter(name = "branch", default = ""),
328 revision = util.FixedParameter(name = "revision", default = ""),
329 repository = util.FixedParameter(name = "repository", default = ""),
330 project = util.FixedParameter(name = "project", default = "")
331 )
332 ],
333
334 reason = util.StringParameter(
335 name = "reason",
336 label = "Reason",
337 default = "Trigger build",
338 required = True,
339 size = 80
340 ),
341
342 properties = [
343 util.NestedParameter(
344 name="options",
345 label="Build Options",
346 layout="vertical",
347 fields=[
348 util.ChoiceStringParameter(
349 name = "target",
350 label = "Build target",
351 default = "all",
352 choices = [ "all" ] + targets
353 ),
354 TagChoiceParameter(
355 name = "tag",
356 label = "Build tag",
357 default = ""
358 )
359 ]
360 )
361 ]
362 ))
363
364 ####### BUILDERS
365
366 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
367 # what steps, and which workers can execute them. Note that any particular build will
368 # only take place on one worker.
369
370 def IsTaggingRequested(step):
371 val = step.getProperty("tag")
372 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
373 return True
374 else:
375 return False
376
377 def IsNoMasterBuild(step):
378 return repo_branch != "master"
379
380 def GetBaseVersion():
381 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
382 return repo_branch.split('-')[1]
383 else:
384 return "master"
385
386 @properties.renderer
387 def GetVersionPrefix(props):
388 basever = GetBaseVersion()
389 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
390 return "%s/" % props["tag"]
391 elif basever != "master":
392 return "%s-SNAPSHOT/" % basever
393 else:
394 return ""
395
396 def GetNextBuild(builder, requests):
397 for r in requests:
398 if r.properties and r.properties.hasProperty("tag"):
399 return r
400
401 r = requests[0]
402 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
403 return r
404
405 def MakeEnv(overrides=None, tryccache=False):
406 env = {
407 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
408 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
409 }
410 if tryccache:
411 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
412 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
413 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
414 else:
415 env['CC'] = env['CCC']
416 env['CXX'] = env['CCXX']
417 env['CCACHE'] = ''
418 if overrides is not None:
419 env.update(overrides)
420 return env
421
422 @properties.renderer
423 def NetLockDl(props):
424 lock = None
425 if props.hasProperty("dl_lock"):
426 lock = NetLocks[props["dl_lock"]]
427 if lock is not None:
428 return [lock.access('exclusive')]
429 else:
430 return []
431
432 @properties.renderer
433 def NetLockUl(props):
434 lock = None
435 if props.hasProperty("ul_lock"):
436 lock = NetLocks[props["ul_lock"]]
437 if lock is not None:
438 return [lock.access('exclusive')]
439 else:
440 return []
441
442 @util.renderer
443 def TagPropertyValue(props):
444 if props.hasProperty("options"):
445 options = props.getProperty("options")
446 if type(options) is dict:
447 return options.get("tag")
448 return None
449
450 def IsTargetSelected(target):
451 def CheckTargetProperty(step):
452 try:
453 options = step.getProperty("options")
454 if type(options) is dict:
455 selected_target = options.get("target", "all")
456 if selected_target != "all" and selected_target != target:
457 return False
458 except KeyError:
459 pass
460
461 return True
462
463 return CheckTargetProperty
464
465 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
466 try:
467 seckey = base64.b64decode(seckey)
468 except:
469 return None
470
471 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
472 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
473
474
475 c['builders'] = []
476
477 dlLock = locks.WorkerLock("worker_dl")
478
479 workerNames = [ ]
480
481 for worker in c['workers']:
482 workerNames.append(worker.workername)
483
484 force_factory = BuildFactory()
485
486 c['builders'].append(BuilderConfig(
487 name = "00_force_build",
488 workernames = workerNames,
489 factory = force_factory))
490
491 for target in targets:
492 ts = target.split('/')
493
494 factory = BuildFactory()
495
496 # setup shared work directory if required
497 factory.addStep(ShellCommand(
498 name = "sharedwd",
499 description = "Setting up shared work directory",
500 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
501 workdir = ".",
502 haltOnFailure = True))
503
504 # find number of cores
505 factory.addStep(SetPropertyFromCommand(
506 name = "nproc",
507 property = "nproc",
508 description = "Finding number of CPUs",
509 command = ["nproc"]))
510
511 # find gcc and g++ compilers
512 factory.addStep(FileDownload(
513 name = "dlfindbinpl",
514 mastersrc = scripts_dir + '/findbin.pl',
515 workerdest = "../findbin.pl",
516 mode = 0o755))
517
518 factory.addStep(SetPropertyFromCommand(
519 name = "gcc",
520 property = "cc_command",
521 description = "Finding gcc command",
522 command = [
523 "../findbin.pl", "gcc", "", "",
524 ],
525 haltOnFailure = True))
526
527 factory.addStep(SetPropertyFromCommand(
528 name = "g++",
529 property = "cxx_command",
530 description = "Finding g++ command",
531 command = [
532 "../findbin.pl", "g++", "", "",
533 ],
534 haltOnFailure = True))
535
536 # see if ccache is available
537 factory.addStep(SetPropertyFromCommand(
538 property = "ccache_command",
539 command = ["which", "ccache"],
540 description = "Testing for ccache command",
541 haltOnFailure = False,
542 flunkOnFailure = False,
543 warnOnFailure = False,
544 ))
545
546 # Workaround bug when switching from a checked out tag back to a branch
547 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
548 factory.addStep(ShellCommand(
549 name = "gitcheckout",
550 description = "Ensure that Git HEAD is sane",
551 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
552 haltOnFailure = True))
553
554 # check out the source
555 # Git() runs:
556 # if repo doesn't exist: 'git clone repourl'
557 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
558 # 'git fetch -t repourl branch; git reset --hard revision'
559 factory.addStep(Git(
560 name = "git",
561 repourl = repo_url,
562 branch = repo_branch,
563 mode = 'full',
564 method = 'fresh',
565 locks = NetLockDl,
566 haltOnFailure = True,
567 ))
568
569 # update remote refs
570 factory.addStep(ShellCommand(
571 name = "fetchrefs",
572 description = "Fetching Git remote refs",
573 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
574 haltOnFailure = True
575 ))
576
577 # switch to tag
578 factory.addStep(ShellCommand(
579 name = "switchtag",
580 description = "Checking out Git tag",
581 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
582 haltOnFailure = True,
583 doStepIf = IsTaggingRequested
584 ))
585
586 # Verify that Git HEAD points to a tag or branch
587 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
588 factory.addStep(ShellCommand(
589 name = "gitverify",
590 description = "Ensure that Git HEAD is pointing to a branch or tag",
591 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
592 haltOnFailure = True))
593
594 factory.addStep(ShellCommand(
595 name = "rmtmp",
596 description = "Remove tmp folder",
597 command=["rm", "-rf", "tmp/"]))
598
599 # feed
600 factory.addStep(ShellCommand(
601 name = "rmfeedlinks",
602 description = "Remove feed symlinks",
603 command=["rm", "-rf", "package/feeds/"]))
604
605 factory.addStep(StringDownload(
606 name = "ccachecc",
607 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
608 workerdest = "../ccache_cc.sh",
609 mode = 0o755,
610 ))
611
612 factory.addStep(StringDownload(
613 name = "ccachecxx",
614 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
615 workerdest = "../ccache_cxx.sh",
616 mode = 0o755,
617 ))
618
619 # feed
620 factory.addStep(ShellCommand(
621 name = "updatefeeds",
622 description = "Updating feeds",
623 command=["./scripts/feeds", "update"],
624 env = MakeEnv(tryccache=True),
625 haltOnFailure = True,
626 locks = NetLockDl,
627 ))
628
629 # feed
630 factory.addStep(ShellCommand(
631 name = "installfeeds",
632 description = "Installing feeds",
633 command=["./scripts/feeds", "install", "-a"],
634 env = MakeEnv(tryccache=True),
635 haltOnFailure = True
636 ))
637
638 # seed config
639 if config_seed is not None:
640 factory.addStep(StringDownload(
641 name = "dlconfigseed",
642 s = config_seed + '\n',
643 workerdest = ".config",
644 mode = 0o644
645 ))
646
647 # configure
648 factory.addStep(ShellCommand(
649 name = "newconfig",
650 description = "Seeding .config",
651 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
652 ))
653
654 factory.addStep(ShellCommand(
655 name = "delbin",
656 description = "Removing output directory",
657 command = ["rm", "-rf", "bin/"]
658 ))
659
660 factory.addStep(ShellCommand(
661 name = "defconfig",
662 description = "Populating .config",
663 command = ["make", "defconfig"],
664 env = MakeEnv()
665 ))
666
667 # check arch
668 factory.addStep(ShellCommand(
669 name = "checkarch",
670 description = "Checking architecture",
671 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
672 logEnviron = False,
673 want_stdout = False,
674 want_stderr = False,
675 haltOnFailure = True
676 ))
677
678 # find libc suffix
679 factory.addStep(SetPropertyFromCommand(
680 name = "libc",
681 property = "libc",
682 description = "Finding libc suffix",
683 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
684
685 # install build key
686 if usign_key is not None:
687 factory.addStep(StringDownload(
688 name = "dlkeybuildpub",
689 s = UsignSec2Pub(usign_key, usign_comment),
690 workerdest = "key-build.pub",
691 mode = 0o600,
692 ))
693
694 factory.addStep(StringDownload(
695 name = "dlkeybuild",
696 s = "# fake private key",
697 workerdest = "key-build",
698 mode = 0o600,
699 ))
700
701 factory.addStep(StringDownload(
702 name = "dlkeybuilducert",
703 s = "# fake certificate",
704 workerdest = "key-build.ucert",
705 mode = 0o600,
706 ))
707
708 # prepare dl
709 factory.addStep(ShellCommand(
710 name = "dldir",
711 description = "Preparing dl/",
712 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
713 logEnviron = False,
714 want_stdout = False
715 ))
716
717 # prepare tar
718 factory.addStep(ShellCommand(
719 name = "dltar",
720 description = "Building and installing GNU tar",
721 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
722 env = MakeEnv(tryccache=True),
723 haltOnFailure = True
724 ))
725
726 # populate dl
727 factory.addStep(ShellCommand(
728 name = "dlrun",
729 description = "Populating dl/",
730 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
731 env = MakeEnv(),
732 logEnviron = False,
733 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
734 ))
735
736 factory.addStep(ShellCommand(
737 name = "cleanbase",
738 description = "Cleaning base-files",
739 command=["make", "package/base-files/clean", "V=s"]
740 ))
741
742 # build
743 factory.addStep(ShellCommand(
744 name = "tools",
745 description = "Building and installing tools",
746 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
747 env = MakeEnv(tryccache=True),
748 haltOnFailure = True
749 ))
750
751 factory.addStep(ShellCommand(
752 name = "toolchain",
753 description = "Building and installing toolchain",
754 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
755 env = MakeEnv(),
756 haltOnFailure = True
757 ))
758
759 factory.addStep(ShellCommand(
760 name = "kmods",
761 description = "Building kmods",
762 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
763 env = MakeEnv(),
764 haltOnFailure = True
765 ))
766
767 # find kernel version
768 factory.addStep(SetPropertyFromCommand(
769 name = "kernelversion",
770 property = "kernelversion",
771 description = "Finding the effective Kernel version",
772 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
773 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
774 ))
775
776 factory.addStep(ShellCommand(
777 name = "pkgclean",
778 description = "Cleaning up package build",
779 command=["make", "package/cleanup", "V=s"]
780 ))
781
782 factory.addStep(ShellCommand(
783 name = "pkgbuild",
784 description = "Building packages",
785 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
786 env = MakeEnv(),
787 haltOnFailure = True
788 ))
789
790 factory.addStep(ShellCommand(
791 name = "pkginstall",
792 description = "Installing packages",
793 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
794 env = MakeEnv(),
795 haltOnFailure = True
796 ))
797
798 factory.addStep(ShellCommand(
799 name = "pkgindex",
800 description = "Indexing packages",
801 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
802 env = MakeEnv(),
803 haltOnFailure = True
804 ))
805
806 factory.addStep(ShellCommand(
807 name = "images",
808 description = "Building and installing images",
809 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
810 env = MakeEnv(),
811 haltOnFailure = True
812 ))
813
814 factory.addStep(ShellCommand(
815 name = "buildinfo",
816 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
817 command = "make -j1 buildinfo V=s || true",
818 env = MakeEnv(),
819 haltOnFailure = True
820 ))
821
822 factory.addStep(ShellCommand(
823 name = "json_overview_image_info",
824 description = "Generate profiles.json in target folder",
825 command = "make -j1 json_overview_image_info V=s || true",
826 env = MakeEnv(),
827 haltOnFailure = True
828 ))
829
830 factory.addStep(ShellCommand(
831 name = "checksums",
832 description = "Calculating checksums",
833 command=["make", "-j1", "checksum", "V=s"],
834 env = MakeEnv(),
835 haltOnFailure = True
836 ))
837
838 if enable_kmod_archive:
839 factory.addStep(ShellCommand(
840 name = "kmoddir",
841 description = "Creating kmod directory",
842 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
843 haltOnFailure = True
844 ))
845
846 factory.addStep(ShellCommand(
847 name = "kmodprepare",
848 description = "Preparing kmod archive",
849 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
850 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
851 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
852 haltOnFailure = True
853 ))
854
855 factory.addStep(ShellCommand(
856 name = "kmodindex",
857 description = "Indexing kmod archive",
858 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
859 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
860 env = MakeEnv(),
861 haltOnFailure = True
862 ))
863
864 # sign
865 if ini.has_option("gpg", "key") or usign_key is not None:
866 factory.addStep(MasterShellCommand(
867 name = "signprepare",
868 description = "Preparing temporary signing directory",
869 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
870 haltOnFailure = True
871 ))
872
873 factory.addStep(ShellCommand(
874 name = "signpack",
875 description = "Packing files to sign",
876 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
877 haltOnFailure = True
878 ))
879
880 factory.addStep(FileUpload(
881 workersrc = "sign.tar.gz",
882 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
883 haltOnFailure = True
884 ))
885
886 factory.addStep(MasterShellCommand(
887 name = "signfiles",
888 description = "Signing files",
889 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
890 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
891 haltOnFailure = True
892 ))
893
894 factory.addStep(FileDownload(
895 name = "dlsigntargz",
896 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
897 workerdest = "sign.tar.gz",
898 haltOnFailure = True
899 ))
900
901 factory.addStep(ShellCommand(
902 name = "signunpack",
903 description = "Unpacking signed files",
904 command = ["tar", "-xzf", "sign.tar.gz"],
905 haltOnFailure = True
906 ))
907
908 # upload
909 factory.addStep(ShellCommand(
910 name = "dirprepare",
911 description = "Preparing upload directory structure",
912 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
913 haltOnFailure = True
914 ))
915
916 factory.addStep(ShellCommand(
917 name = "linkprepare",
918 description = "Preparing repository symlink",
919 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
920 doStepIf = IsNoMasterBuild,
921 haltOnFailure = True
922 ))
923
924 if enable_kmod_archive:
925 factory.addStep(ShellCommand(
926 name = "kmoddirprepare",
927 description = "Preparing kmod archive upload directory",
928 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
929 haltOnFailure = True
930 ))
931
932 factory.addStep(ShellCommand(
933 name = "dirupload",
934 description = "Uploading directory structure",
935 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
936 env={'RSYNC_PASSWORD': rsync_bin_key},
937 haltOnFailure = True,
938 logEnviron = False,
939 locks = NetLockUl,
940 ))
941
942 # download remote sha256sums to 'target-sha256sums'
943 factory.addStep(ShellCommand(
944 name = "target-sha256sums",
945 description = "Fetching remote sha256sums for target",
946 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
947 env={'RSYNC_PASSWORD': rsync_bin_key},
948 logEnviron = False,
949 haltOnFailure = False,
950 flunkOnFailure = False,
951 warnOnFailure = False,
952 ))
953
954 # build list of files to upload
955 factory.addStep(FileDownload(
956 name = "dlsha2rsyncpl",
957 mastersrc = scripts_dir + '/sha2rsync.pl',
958 workerdest = "../sha2rsync.pl",
959 mode = 0o755,
960 ))
961
962 factory.addStep(ShellCommand(
963 name = "buildlist",
964 description = "Building list of files to upload",
965 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
966 haltOnFailure = True,
967 ))
968
969 factory.addStep(FileDownload(
970 name = "dlrsync.sh",
971 mastersrc = scripts_dir + '/rsync.sh',
972 workerdest = "../rsync.sh",
973 mode = 0o755
974 ))
975
976 # upload new files and update existing ones
977 factory.addStep(ShellCommand(
978 name = "targetupload",
979 description = "Uploading target files",
980 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
981 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
982 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
983 env={'RSYNC_PASSWORD': rsync_bin_key},
984 haltOnFailure = True,
985 logEnviron = False,
986 ))
987
988 # delete files which don't exist locally
989 factory.addStep(ShellCommand(
990 name = "targetprune",
991 description = "Pruning target files",
992 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
993 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
994 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
995 env={'RSYNC_PASSWORD': rsync_bin_key},
996 haltOnFailure = True,
997 logEnviron = False,
998 locks = NetLockUl,
999 ))
1000
1001 if enable_kmod_archive:
1002 factory.addStep(ShellCommand(
1003 name = "kmodupload",
1004 description = "Uploading kmod archive",
1005 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1006 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1007 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1008 env={'RSYNC_PASSWORD': rsync_bin_key},
1009 haltOnFailure = True,
1010 logEnviron = False,
1011 locks = NetLockUl,
1012 ))
1013
1014 if rsync_src_url is not None:
1015 factory.addStep(ShellCommand(
1016 name = "sourcelist",
1017 description = "Finding source archives to upload",
1018 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1019 haltOnFailure = True
1020 ))
1021
1022 factory.addStep(ShellCommand(
1023 name = "sourceupload",
1024 description = "Uploading source archives",
1025 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1026 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1027 env={'RSYNC_PASSWORD': rsync_src_key},
1028 haltOnFailure = True,
1029 logEnviron = False,
1030 locks = NetLockUl,
1031 ))
1032
1033 factory.addStep(ShellCommand(
1034 name = "df",
1035 description = "Reporting disk usage",
1036 command=["df", "-h", "."],
1037 env={'LC_ALL': 'C'},
1038 haltOnFailure = False,
1039 flunkOnFailure = False,
1040 warnOnFailure = False,
1041 alwaysRun = True
1042 ))
1043
1044 factory.addStep(ShellCommand(
1045 name = "du",
1046 description = "Reporting estimated file space usage",
1047 command=["du", "-sh", "."],
1048 env={'LC_ALL': 'C'},
1049 haltOnFailure = False,
1050 flunkOnFailure = False,
1051 warnOnFailure = False,
1052 alwaysRun = True
1053 ))
1054
1055 factory.addStep(ShellCommand(
1056 name = "ccachestat",
1057 description = "Reporting ccache stats",
1058 command=["ccache", "-s"],
1059 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1060 want_stderr = False,
1061 haltOnFailure = False,
1062 flunkOnFailure = False,
1063 warnOnFailure = False,
1064 alwaysRun = True,
1065 ))
1066
1067 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1068
1069 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1070 force_factory.addStep(steps.Trigger(
1071 name = "trigger_%s" % target,
1072 description = "Triggering %s build" % target,
1073 schedulerNames = [ "trigger_%s" % target ],
1074 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1075 doStepIf = IsTargetSelected(target)
1076 ))
1077
1078
1079 ####### STATUS TARGETS
1080
1081 # 'status' is a list of Status Targets. The results of each build will be
1082 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1083 # including web pages, email senders, and IRC bots.
1084
1085 if "status_bind" in inip1:
1086 c['www'] = {
1087 'port': inip1.get("status_bind"),
1088 'plugins': {
1089 'waterfall_view': True,
1090 'console_view': True,
1091 'grid_view': True
1092 }
1093 }
1094
1095 if "status_user" in inip1 and "status_password" in inip1:
1096 c['www']['auth'] = util.UserPasswordAuth([
1097 (inip1.get("status_user"), inip1.get("status_password"))
1098 ])
1099 c['www']['authz'] = util.Authz(
1100 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1101 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1102 )
1103
1104 c['services'] = []
1105 if ini.has_section("irc"):
1106 iniirc = ini['irc']
1107 irc_host = iniirc.get("host", None)
1108 irc_port = iniirc.getint("port", 6667)
1109 irc_chan = iniirc.get("channel", None)
1110 irc_nick = iniirc.get("nickname", None)
1111 irc_pass = iniirc.get("password", None)
1112
1113 if irc_host and irc_nick and irc_chan:
1114 irc = reporters.IRC(irc_host, irc_nick,
1115 port = irc_port,
1116 password = irc_pass,
1117 channels = [ irc_chan ],
1118 notify_events = [ 'exception', 'problem', 'recovery' ]
1119 )
1120
1121 c['services'].append(irc)
1122
1123 c['revlink'] = util.RevlinkMatch([
1124 r'https://git.openwrt.org/openwrt/(.*).git'
1125 ],
1126 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1127
1128 ####### DB URL
1129
1130 c['db'] = {
1131 # This specifies what database buildbot uses to store its state. You can leave
1132 # this at its default for all but the largest installations.
1133 'db_url' : "sqlite:///state.sqlite",
1134 }
1135
1136 c['buildbotNetUsageData'] = None