phase1: refactor populateTargets()
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 config_seed = inip1.get("config_seed", "")
64
65 repo_url = ini['repo'].get("url")
66 repo_branch = ini['repo'].get("branch", "master")
67
68 rsync_bin_url = ini['rsync'].get("binary_url")
69 rsync_bin_key = ini['rsync'].get("binary_password")
70 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
71
72 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
73 rsync_bin_defopts += ["--contimeout=20"]
74
75 rsync_src_url = ini['rsync'].get("source_url")
76 rsync_src_key = ini['rsync'].get("source_password")
77 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
78
79 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
80 rsync_src_defopts += ["--contimeout=20"]
81
82 usign_key = None
83 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
84
85 if ini.has_section("usign"):
86 usign_key = ini['usign'].get("key")
87 usign_comment = ini['usign'].get("comment", usign_comment)
88
89 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
90
91 # PB port can be either a numeric port or a connection string
92 pb_port = inip1.get("port") or 9989
93
94 # This is the dictionary that the buildmaster pays attention to. We also use
95 # a shorter alias to save typing.
96 c = BuildmasterConfig = {}
97
98 ####### PROJECT IDENTITY
99
100 # the 'title' string will appear at the top of this buildbot
101 # installation's html.WebStatus home page (linked to the
102 # 'titleURL') and is embedded in the title of the waterfall HTML page.
103
104 c['title'] = ini['general'].get("title")
105 c['titleURL'] = ini['general'].get("title_url")
106
107 # the 'buildbotURL' string should point to the location where the buildbot's
108 # internal web server (usually the html.WebStatus page) is visible. This
109 # typically uses the port number set in the Waterfall 'status' entry, but
110 # with an externally-visible host name which the buildbot cannot figure out
111 # without some help.
112
113 c['buildbotURL'] = inip1.get("buildbot_url")
114
115 ####### BUILDWORKERS
116
117 # The 'workers' list defines the set of recognized buildworkers. Each element is
118 # a Worker object, specifying a unique worker name and password. The same
119 # worker name and password must be configured on the worker.
120
121 c['workers'] = []
122 NetLocks = dict()
123
124 for section in ini.sections():
125 if section.startswith("worker "):
126 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
127 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
128 sl_props = { 'dl_lock':None, 'ul_lock':None }
129 name = ini.get(section, "name")
130 password = ini.get(section, "password")
131 if ini.has_option(section, "dl_lock"):
132 lockname = ini.get(section, "dl_lock")
133 sl_props['dl_lock'] = lockname
134 if lockname not in NetLocks:
135 NetLocks[lockname] = locks.MasterLock(lockname)
136 if ini.has_option(section, "ul_lock"):
137 lockname = ini.get(section, "ul_lock")
138 sl_props['ul_lock'] = lockname
139 if lockname not in NetLocks:
140 NetLocks[lockname] = locks.MasterLock(lockname)
141 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
142
143 c['protocols'] = {'pb': {'port': pb_port}}
144
145 # coalesce builds
146 c['collapseRequests'] = True
147
148 # Reduce amount of backlog data
149 c['configurators'] = [util.JanitorConfigurator(
150 logHorizon=timedelta(days=3),
151 hour=6,
152 )]
153
154 @defer.inlineCallbacks
155 def getNewestCompleteTime(bldr):
156 """Returns the complete_at of the latest completed and not SKIPPED
157 build request for this builder, or None if there are no such build
158 requests. We need to filter out SKIPPED requests because we're
159 using collapseRequests=True which is unfortunately marking all
160 previous requests as complete when new buildset is created.
161
162 @returns: datetime instance or None, via Deferred
163 """
164
165 bldrid = yield bldr.getBuilderId()
166 completed = yield bldr.master.data.get(
167 ('builders', bldrid, 'buildrequests'),
168 [
169 resultspec.Filter('complete', 'eq', [True]),
170 resultspec.Filter('results', 'ne', [results.SKIPPED]),
171 ],
172 order=['-complete_at'], limit=1)
173 if not completed:
174 return
175
176 complete_at = completed[0]['complete_at']
177
178 last_build = yield bldr.master.data.get(
179 ('builds', ),
180 [
181 resultspec.Filter('builderid', 'eq', [bldrid]),
182 ],
183 order=['-started_at'], limit=1)
184
185 if last_build and last_build[0]:
186 last_complete_at = last_build[0]['complete_at']
187 if last_complete_at and (last_complete_at > complete_at):
188 return last_complete_at
189
190 return complete_at
191
192 @defer.inlineCallbacks
193 def prioritizeBuilders(master, builders):
194 """Returns sorted list of builders by their last timestamp of completed and
195 not skipped build.
196
197 @returns: list of sorted builders
198 """
199
200 def is_building(bldr):
201 return bool(bldr.building) or bool(bldr.old_building)
202
203 def bldr_info(bldr):
204 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
205 d.addCallback(lambda complete_at: (complete_at, bldr))
206 return d
207
208 def bldr_sort(item):
209 (complete_at, bldr) = item
210
211 if not complete_at:
212 date = datetime.min
213 complete_at = date.replace(tzinfo=tzutc())
214
215 if is_building(bldr):
216 date = datetime.max
217 complete_at = date.replace(tzinfo=tzutc())
218
219 return (complete_at, bldr.name)
220
221 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
222 results.sort(key=bldr_sort)
223
224 for r in results:
225 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
226
227 return [r[1] for r in results]
228
229 c['prioritizeBuilders'] = prioritizeBuilders
230
231 ####### CHANGESOURCES
232
233
234 # find targets
235 targets = [ ]
236
237 def populateTargets():
238 sourcegit = work_dir + '/source.git'
239 if os.path.isdir(sourcegit):
240 subprocess.call(["rm", "-rf", sourcegit])
241
242 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, sourcegit])
243
244 os.makedirs(sourcegit + '/tmp', exist_ok=True)
245 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
246 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
247
248 while True:
249 line = findtargets.stdout.readline()
250 if not line:
251 break
252 ta = line.decode().strip().split(' ')
253 targets.append(ta[0])
254
255 subprocess.call(["rm", "-rf", sourcegit])
256
257 populateTargets()
258
259 # the 'change_source' setting tells the buildmaster how it should find out
260 # about source code changes. Here we point to the buildbot clone of pyflakes.
261
262 c['change_source'] = []
263 c['change_source'].append(GitPoller(
264 repo_url,
265 workdir=work_dir+'/work.git', branch=repo_branch,
266 pollinterval=300))
267
268 ####### SCHEDULERS
269
270 # Configure the Schedulers, which decide how to react to incoming changes. In this
271 # case, just kick off a 'basebuild' build
272
273 class TagChoiceParameter(BaseParameter):
274 spec_attributes = ["strict", "choices"]
275 type = "list"
276 strict = True
277
278 def __init__(self, name, label=None, **kw):
279 super().__init__(name, label, **kw)
280 self._choice_list = []
281
282 @property
283 def choices(self):
284 taglist = []
285 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
286
287 if basever:
288 findtags = subprocess.Popen(
289 ['git', 'ls-remote', '--tags', repo_url],
290 stdout = subprocess.PIPE)
291
292 while True:
293 line = findtags.stdout.readline()
294
295 if not line:
296 break
297
298 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
299
300 if tagver and tagver[1].find(basever[1]) == 0:
301 taglist.append(tagver[1])
302
303 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
304 taglist.insert(0, '')
305
306 self._choice_list = taglist
307
308 return self._choice_list
309
310 def parse_from_arg(self, s):
311 if self.strict and s not in self._choice_list:
312 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
313 return s
314
315 c['schedulers'] = []
316 c['schedulers'].append(SingleBranchScheduler(
317 name = "all",
318 change_filter = filter.ChangeFilter(branch=repo_branch),
319 treeStableTimer = 60,
320 builderNames = targets))
321
322 c['schedulers'].append(ForceScheduler(
323 name = "force",
324 buttonName = "Force builds",
325 label = "Force build details",
326 builderNames = [ "00_force_build" ],
327
328 codebases = [
329 util.CodebaseParameter(
330 "",
331 label = "Repository",
332 branch = util.FixedParameter(name = "branch", default = ""),
333 revision = util.FixedParameter(name = "revision", default = ""),
334 repository = util.FixedParameter(name = "repository", default = ""),
335 project = util.FixedParameter(name = "project", default = "")
336 )
337 ],
338
339 reason = util.StringParameter(
340 name = "reason",
341 label = "Reason",
342 default = "Trigger build",
343 required = True,
344 size = 80
345 ),
346
347 properties = [
348 util.NestedParameter(
349 name="options",
350 label="Build Options",
351 layout="vertical",
352 fields=[
353 util.ChoiceStringParameter(
354 name = "target",
355 label = "Build target",
356 default = "all",
357 choices = [ "all" ] + targets
358 ),
359 TagChoiceParameter(
360 name = "tag",
361 label = "Build tag",
362 default = ""
363 )
364 ]
365 )
366 ]
367 ))
368
369 ####### BUILDERS
370
371 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
372 # what steps, and which workers can execute them. Note that any particular build will
373 # only take place on one worker.
374
375 def IsTaggingRequested(step):
376 val = step.getProperty("tag")
377 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
378 return True
379 else:
380 return False
381
382 def IsNoMasterBuild(step):
383 return repo_branch != "master"
384
385 def GetBaseVersion():
386 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
387 return repo_branch.split('-')[1]
388 else:
389 return "master"
390
391 @properties.renderer
392 def GetVersionPrefix(props):
393 basever = GetBaseVersion()
394 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
395 return "%s/" % props["tag"]
396 elif basever != "master":
397 return "%s-SNAPSHOT/" % basever
398 else:
399 return ""
400
401 def GetNextBuild(builder, requests):
402 for r in requests:
403 if r.properties and r.properties.hasProperty("tag"):
404 return r
405
406 r = requests[0]
407 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
408 return r
409
410 def MakeEnv(overrides=None, tryccache=False):
411 env = {
412 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
413 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
414 }
415 if tryccache:
416 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
417 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
418 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
419 else:
420 env['CC'] = env['CCC']
421 env['CXX'] = env['CCXX']
422 env['CCACHE'] = ''
423 if overrides is not None:
424 env.update(overrides)
425 return env
426
427 @properties.renderer
428 def NetLockDl(props):
429 lock = None
430 if props.hasProperty("dl_lock"):
431 lock = NetLocks[props["dl_lock"]]
432 if lock is not None:
433 return [lock.access('exclusive')]
434 else:
435 return []
436
437 @properties.renderer
438 def NetLockUl(props):
439 lock = None
440 if props.hasProperty("ul_lock"):
441 lock = NetLocks[props["ul_lock"]]
442 if lock is not None:
443 return [lock.access('exclusive')]
444 else:
445 return []
446
447 @util.renderer
448 def TagPropertyValue(props):
449 if props.hasProperty("options"):
450 options = props.getProperty("options")
451 if type(options) is dict:
452 return options.get("tag")
453 return None
454
455 def IsTargetSelected(target):
456 def CheckTargetProperty(step):
457 try:
458 options = step.getProperty("options")
459 if type(options) is dict:
460 selected_target = options.get("target", "all")
461 if selected_target != "all" and selected_target != target:
462 return False
463 except KeyError:
464 pass
465
466 return True
467
468 return CheckTargetProperty
469
470 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
471 try:
472 seckey = base64.b64decode(seckey)
473 except:
474 return None
475
476 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
477 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
478
479
480 c['builders'] = []
481
482 dlLock = locks.WorkerLock("worker_dl")
483
484 workerNames = [ ]
485
486 for worker in c['workers']:
487 workerNames.append(worker.workername)
488
489 force_factory = BuildFactory()
490
491 c['builders'].append(BuilderConfig(
492 name = "00_force_build",
493 workernames = workerNames,
494 factory = force_factory))
495
496 for target in targets:
497 ts = target.split('/')
498
499 factory = BuildFactory()
500
501 # setup shared work directory if required
502 factory.addStep(ShellCommand(
503 name = "sharedwd",
504 description = "Setting up shared work directory",
505 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
506 workdir = ".",
507 haltOnFailure = True))
508
509 # find number of cores
510 factory.addStep(SetPropertyFromCommand(
511 name = "nproc",
512 property = "nproc",
513 description = "Finding number of CPUs",
514 command = ["nproc"]))
515
516 # find gcc and g++ compilers
517 factory.addStep(FileDownload(
518 name = "dlfindbinpl",
519 mastersrc = scripts_dir + '/findbin.pl',
520 workerdest = "../findbin.pl",
521 mode = 0o755))
522
523 factory.addStep(SetPropertyFromCommand(
524 name = "gcc",
525 property = "cc_command",
526 description = "Finding gcc command",
527 command = [
528 "../findbin.pl", "gcc", "", "",
529 ],
530 haltOnFailure = True))
531
532 factory.addStep(SetPropertyFromCommand(
533 name = "g++",
534 property = "cxx_command",
535 description = "Finding g++ command",
536 command = [
537 "../findbin.pl", "g++", "", "",
538 ],
539 haltOnFailure = True))
540
541 # see if ccache is available
542 factory.addStep(SetPropertyFromCommand(
543 property = "ccache_command",
544 command = ["which", "ccache"],
545 description = "Testing for ccache command",
546 haltOnFailure = False,
547 flunkOnFailure = False,
548 warnOnFailure = False,
549 ))
550
551 # Workaround bug when switching from a checked out tag back to a branch
552 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
553 factory.addStep(ShellCommand(
554 name = "gitcheckout",
555 description = "Ensure that Git HEAD is sane",
556 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
557 haltOnFailure = True))
558
559 # check out the source
560 # Git() runs:
561 # if repo doesn't exist: 'git clone repourl'
562 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
563 # 'git fetch -t repourl branch; git reset --hard revision'
564 factory.addStep(Git(
565 name = "git",
566 repourl = repo_url,
567 branch = repo_branch,
568 mode = 'full',
569 method = 'fresh',
570 locks = NetLockDl,
571 haltOnFailure = True,
572 ))
573
574 # update remote refs
575 factory.addStep(ShellCommand(
576 name = "fetchrefs",
577 description = "Fetching Git remote refs",
578 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
579 haltOnFailure = True
580 ))
581
582 # switch to tag
583 factory.addStep(ShellCommand(
584 name = "switchtag",
585 description = "Checking out Git tag",
586 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
587 haltOnFailure = True,
588 doStepIf = IsTaggingRequested
589 ))
590
591 # Verify that Git HEAD points to a tag or branch
592 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
593 factory.addStep(ShellCommand(
594 name = "gitverify",
595 description = "Ensure that Git HEAD is pointing to a branch or tag",
596 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
597 haltOnFailure = True))
598
599 factory.addStep(ShellCommand(
600 name = "rmtmp",
601 description = "Remove tmp folder",
602 command=["rm", "-rf", "tmp/"]))
603
604 # feed
605 factory.addStep(ShellCommand(
606 name = "rmfeedlinks",
607 description = "Remove feed symlinks",
608 command=["rm", "-rf", "package/feeds/"]))
609
610 factory.addStep(StringDownload(
611 name = "ccachecc",
612 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
613 workerdest = "../ccache_cc.sh",
614 mode = 0o755,
615 ))
616
617 factory.addStep(StringDownload(
618 name = "ccachecxx",
619 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
620 workerdest = "../ccache_cxx.sh",
621 mode = 0o755,
622 ))
623
624 # feed
625 factory.addStep(ShellCommand(
626 name = "updatefeeds",
627 description = "Updating feeds",
628 command=["./scripts/feeds", "update"],
629 env = MakeEnv(tryccache=True),
630 haltOnFailure = True,
631 locks = NetLockDl,
632 ))
633
634 # feed
635 factory.addStep(ShellCommand(
636 name = "installfeeds",
637 description = "Installing feeds",
638 command=["./scripts/feeds", "install", "-a"],
639 env = MakeEnv(tryccache=True),
640 haltOnFailure = True
641 ))
642
643 # seed config
644 if config_seed is not None:
645 factory.addStep(StringDownload(
646 name = "dlconfigseed",
647 s = config_seed + '\n',
648 workerdest = ".config",
649 mode = 0o644
650 ))
651
652 # configure
653 factory.addStep(ShellCommand(
654 name = "newconfig",
655 description = "Seeding .config",
656 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
657 ))
658
659 factory.addStep(ShellCommand(
660 name = "delbin",
661 description = "Removing output directory",
662 command = ["rm", "-rf", "bin/"]
663 ))
664
665 factory.addStep(ShellCommand(
666 name = "defconfig",
667 description = "Populating .config",
668 command = ["make", "defconfig"],
669 env = MakeEnv()
670 ))
671
672 # check arch
673 factory.addStep(ShellCommand(
674 name = "checkarch",
675 description = "Checking architecture",
676 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
677 logEnviron = False,
678 want_stdout = False,
679 want_stderr = False,
680 haltOnFailure = True
681 ))
682
683 # find libc suffix
684 factory.addStep(SetPropertyFromCommand(
685 name = "libc",
686 property = "libc",
687 description = "Finding libc suffix",
688 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
689
690 # install build key
691 if usign_key is not None:
692 factory.addStep(StringDownload(
693 name = "dlkeybuildpub",
694 s = UsignSec2Pub(usign_key, usign_comment),
695 workerdest = "key-build.pub",
696 mode = 0o600,
697 ))
698
699 factory.addStep(StringDownload(
700 name = "dlkeybuild",
701 s = "# fake private key",
702 workerdest = "key-build",
703 mode = 0o600,
704 ))
705
706 factory.addStep(StringDownload(
707 name = "dlkeybuilducert",
708 s = "# fake certificate",
709 workerdest = "key-build.ucert",
710 mode = 0o600,
711 ))
712
713 # prepare dl
714 factory.addStep(ShellCommand(
715 name = "dldir",
716 description = "Preparing dl/",
717 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
718 logEnviron = False,
719 want_stdout = False
720 ))
721
722 # prepare tar
723 factory.addStep(ShellCommand(
724 name = "dltar",
725 description = "Building and installing GNU tar",
726 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
727 env = MakeEnv(tryccache=True),
728 haltOnFailure = True
729 ))
730
731 # populate dl
732 factory.addStep(ShellCommand(
733 name = "dlrun",
734 description = "Populating dl/",
735 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
736 env = MakeEnv(),
737 logEnviron = False,
738 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
739 ))
740
741 factory.addStep(ShellCommand(
742 name = "cleanbase",
743 description = "Cleaning base-files",
744 command=["make", "package/base-files/clean", "V=s"]
745 ))
746
747 # build
748 factory.addStep(ShellCommand(
749 name = "tools",
750 description = "Building and installing tools",
751 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
752 env = MakeEnv(tryccache=True),
753 haltOnFailure = True
754 ))
755
756 factory.addStep(ShellCommand(
757 name = "toolchain",
758 description = "Building and installing toolchain",
759 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
760 env = MakeEnv(),
761 haltOnFailure = True
762 ))
763
764 factory.addStep(ShellCommand(
765 name = "kmods",
766 description = "Building kmods",
767 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
768 env = MakeEnv(),
769 haltOnFailure = True
770 ))
771
772 # find kernel version
773 factory.addStep(SetPropertyFromCommand(
774 name = "kernelversion",
775 property = "kernelversion",
776 description = "Finding the effective Kernel version",
777 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
778 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
779 ))
780
781 factory.addStep(ShellCommand(
782 name = "pkgclean",
783 description = "Cleaning up package build",
784 command=["make", "package/cleanup", "V=s"]
785 ))
786
787 factory.addStep(ShellCommand(
788 name = "pkgbuild",
789 description = "Building packages",
790 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
791 env = MakeEnv(),
792 haltOnFailure = True
793 ))
794
795 factory.addStep(ShellCommand(
796 name = "pkginstall",
797 description = "Installing packages",
798 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
799 env = MakeEnv(),
800 haltOnFailure = True
801 ))
802
803 factory.addStep(ShellCommand(
804 name = "pkgindex",
805 description = "Indexing packages",
806 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
807 env = MakeEnv(),
808 haltOnFailure = True
809 ))
810
811 factory.addStep(ShellCommand(
812 name = "images",
813 description = "Building and installing images",
814 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
815 env = MakeEnv(),
816 haltOnFailure = True
817 ))
818
819 factory.addStep(ShellCommand(
820 name = "buildinfo",
821 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
822 command = "make -j1 buildinfo V=s || true",
823 env = MakeEnv(),
824 haltOnFailure = True
825 ))
826
827 factory.addStep(ShellCommand(
828 name = "json_overview_image_info",
829 description = "Generate profiles.json in target folder",
830 command = "make -j1 json_overview_image_info V=s || true",
831 env = MakeEnv(),
832 haltOnFailure = True
833 ))
834
835 factory.addStep(ShellCommand(
836 name = "checksums",
837 description = "Calculating checksums",
838 command=["make", "-j1", "checksum", "V=s"],
839 env = MakeEnv(),
840 haltOnFailure = True
841 ))
842
843 if enable_kmod_archive:
844 factory.addStep(ShellCommand(
845 name = "kmoddir",
846 description = "Creating kmod directory",
847 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
848 haltOnFailure = True
849 ))
850
851 factory.addStep(ShellCommand(
852 name = "kmodprepare",
853 description = "Preparing kmod archive",
854 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
855 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
856 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
857 haltOnFailure = True
858 ))
859
860 factory.addStep(ShellCommand(
861 name = "kmodindex",
862 description = "Indexing kmod archive",
863 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
864 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
865 env = MakeEnv(),
866 haltOnFailure = True
867 ))
868
869 # sign
870 if ini.has_option("gpg", "key") or usign_key is not None:
871 factory.addStep(MasterShellCommand(
872 name = "signprepare",
873 description = "Preparing temporary signing directory",
874 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
875 haltOnFailure = True
876 ))
877
878 factory.addStep(ShellCommand(
879 name = "signpack",
880 description = "Packing files to sign",
881 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
882 haltOnFailure = True
883 ))
884
885 factory.addStep(FileUpload(
886 workersrc = "sign.tar.gz",
887 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
888 haltOnFailure = True
889 ))
890
891 factory.addStep(MasterShellCommand(
892 name = "signfiles",
893 description = "Signing files",
894 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
895 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
896 haltOnFailure = True
897 ))
898
899 factory.addStep(FileDownload(
900 name = "dlsigntargz",
901 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
902 workerdest = "sign.tar.gz",
903 haltOnFailure = True
904 ))
905
906 factory.addStep(ShellCommand(
907 name = "signunpack",
908 description = "Unpacking signed files",
909 command = ["tar", "-xzf", "sign.tar.gz"],
910 haltOnFailure = True
911 ))
912
913 # upload
914 factory.addStep(ShellCommand(
915 name = "dirprepare",
916 description = "Preparing upload directory structure",
917 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
918 haltOnFailure = True
919 ))
920
921 factory.addStep(ShellCommand(
922 name = "linkprepare",
923 description = "Preparing repository symlink",
924 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
925 doStepIf = IsNoMasterBuild,
926 haltOnFailure = True
927 ))
928
929 if enable_kmod_archive:
930 factory.addStep(ShellCommand(
931 name = "kmoddirprepare",
932 description = "Preparing kmod archive upload directory",
933 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
934 haltOnFailure = True
935 ))
936
937 factory.addStep(ShellCommand(
938 name = "dirupload",
939 description = "Uploading directory structure",
940 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
941 env={'RSYNC_PASSWORD': rsync_bin_key},
942 haltOnFailure = True,
943 logEnviron = False,
944 locks = NetLockUl,
945 ))
946
947 # download remote sha256sums to 'target-sha256sums'
948 factory.addStep(ShellCommand(
949 name = "target-sha256sums",
950 description = "Fetching remote sha256sums for target",
951 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
952 env={'RSYNC_PASSWORD': rsync_bin_key},
953 logEnviron = False,
954 haltOnFailure = False,
955 flunkOnFailure = False,
956 warnOnFailure = False,
957 ))
958
959 # build list of files to upload
960 factory.addStep(FileDownload(
961 name = "dlsha2rsyncpl",
962 mastersrc = scripts_dir + '/sha2rsync.pl',
963 workerdest = "../sha2rsync.pl",
964 mode = 0o755,
965 ))
966
967 factory.addStep(ShellCommand(
968 name = "buildlist",
969 description = "Building list of files to upload",
970 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
971 haltOnFailure = True,
972 ))
973
974 factory.addStep(FileDownload(
975 name = "dlrsync.sh",
976 mastersrc = scripts_dir + '/rsync.sh',
977 workerdest = "../rsync.sh",
978 mode = 0o755
979 ))
980
981 # upload new files and update existing ones
982 factory.addStep(ShellCommand(
983 name = "targetupload",
984 description = "Uploading target files",
985 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
986 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
987 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
988 env={'RSYNC_PASSWORD': rsync_bin_key},
989 haltOnFailure = True,
990 logEnviron = False,
991 ))
992
993 # delete files which don't exist locally
994 factory.addStep(ShellCommand(
995 name = "targetprune",
996 description = "Pruning target files",
997 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
998 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
999 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1000 env={'RSYNC_PASSWORD': rsync_bin_key},
1001 haltOnFailure = True,
1002 logEnviron = False,
1003 locks = NetLockUl,
1004 ))
1005
1006 if enable_kmod_archive:
1007 factory.addStep(ShellCommand(
1008 name = "kmodupload",
1009 description = "Uploading kmod archive",
1010 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1011 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1012 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1013 env={'RSYNC_PASSWORD': rsync_bin_key},
1014 haltOnFailure = True,
1015 logEnviron = False,
1016 locks = NetLockUl,
1017 ))
1018
1019 if rsync_src_url is not None:
1020 factory.addStep(ShellCommand(
1021 name = "sourcelist",
1022 description = "Finding source archives to upload",
1023 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1024 haltOnFailure = True
1025 ))
1026
1027 factory.addStep(ShellCommand(
1028 name = "sourceupload",
1029 description = "Uploading source archives",
1030 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1031 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1032 env={'RSYNC_PASSWORD': rsync_src_key},
1033 haltOnFailure = True,
1034 logEnviron = False,
1035 locks = NetLockUl,
1036 ))
1037
1038 factory.addStep(ShellCommand(
1039 name = "df",
1040 description = "Reporting disk usage",
1041 command=["df", "-h", "."],
1042 env={'LC_ALL': 'C'},
1043 haltOnFailure = False,
1044 flunkOnFailure = False,
1045 warnOnFailure = False,
1046 alwaysRun = True
1047 ))
1048
1049 factory.addStep(ShellCommand(
1050 name = "du",
1051 description = "Reporting estimated file space usage",
1052 command=["du", "-sh", "."],
1053 env={'LC_ALL': 'C'},
1054 haltOnFailure = False,
1055 flunkOnFailure = False,
1056 warnOnFailure = False,
1057 alwaysRun = True
1058 ))
1059
1060 factory.addStep(ShellCommand(
1061 name = "ccachestat",
1062 description = "Reporting ccache stats",
1063 command=["ccache", "-s"],
1064 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1065 want_stderr = False,
1066 haltOnFailure = False,
1067 flunkOnFailure = False,
1068 warnOnFailure = False,
1069 alwaysRun = True,
1070 ))
1071
1072 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1073
1074 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1075 force_factory.addStep(steps.Trigger(
1076 name = "trigger_%s" % target,
1077 description = "Triggering %s build" % target,
1078 schedulerNames = [ "trigger_%s" % target ],
1079 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1080 doStepIf = IsTargetSelected(target)
1081 ))
1082
1083
1084 ####### STATUS TARGETS
1085
1086 # 'status' is a list of Status Targets. The results of each build will be
1087 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1088 # including web pages, email senders, and IRC bots.
1089
1090 if "status_bind" in inip1:
1091 c['www'] = {
1092 'port': inip1.get("status_bind"),
1093 'plugins': {
1094 'waterfall_view': True,
1095 'console_view': True,
1096 'grid_view': True
1097 }
1098 }
1099
1100 if "status_user" in inip1 and "status_password" in inip1:
1101 c['www']['auth'] = util.UserPasswordAuth([
1102 (inip1.get("status_user"), inip1.get("status_password"))
1103 ])
1104 c['www']['authz'] = util.Authz(
1105 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1106 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1107 )
1108
1109 c['services'] = []
1110 if ini.has_section("irc"):
1111 iniirc = ini['irc']
1112 irc_host = iniirc.get("host", None)
1113 irc_port = iniirc.getint("port", 6667)
1114 irc_chan = iniirc.get("channel", None)
1115 irc_nick = iniirc.get("nickname", None)
1116 irc_pass = iniirc.get("password", None)
1117
1118 if irc_host and irc_nick and irc_chan:
1119 irc = reporters.IRC(irc_host, irc_nick,
1120 port = irc_port,
1121 password = irc_pass,
1122 channels = [ irc_chan ],
1123 notify_events = [ 'exception', 'problem', 'recovery' ]
1124 )
1125
1126 c['services'].append(irc)
1127
1128 c['revlink'] = util.RevlinkMatch([
1129 r'https://git.openwrt.org/openwrt/(.*).git'
1130 ],
1131 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1132
1133 ####### DB URL
1134
1135 c['db'] = {
1136 # This specifies what database buildbot uses to store its state. You can leave
1137 # this at its default for all but the largest installations.
1138 'db_url' : "sqlite:///state.sqlite",
1139 }
1140
1141 c['buildbotNetUsageData'] = None