phase1: max_builds and shared_wd are always set to 1
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand, SetProperty
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
62
63 ####### PROJECT IDENTITY
64
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
68
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
71
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
76 # without some help.
77
78 c['buildbotURL'] = inip1.get("buildbot_url")
79
80 ####### BUILDWORKERS
81
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'shared_wd':True }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 if ini.has_option(section, "cleanup"):
97 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
98 if ini.has_option(section, "dl_lock"):
99 lockname = ini.get(section, "dl_lock")
100 sl_props['dl_lock'] = lockname
101 if lockname not in NetLocks:
102 NetLocks[lockname] = locks.MasterLock(lockname)
103 if ini.has_option(section, "ul_lock"):
104 lockname = ini.get(section, "dl_lock")
105 sl_props['ul_lock'] = lockname
106 if lockname not in NetLocks:
107 NetLocks[lockname] = locks.MasterLock(lockname)
108 if ini.has_option(section, "shared_wd"):
109 shared_wd = ini.getboolean(section, "shared_wd")
110 sl_props['shared_wd'] = shared_wd
111 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
112
113 # PB port can be either a numeric port or a connection string
114 pb_port = inip1.get("port") or 9989
115 c['protocols'] = {'pb': {'port': pb_port}}
116
117 # coalesce builds
118 c['collapseRequests'] = True
119
120 # Reduce amount of backlog data
121 c['configurators'] = [util.JanitorConfigurator(
122 logHorizon=timedelta(days=3),
123 hour=6,
124 )]
125
126 @defer.inlineCallbacks
127 def getNewestCompleteTime(bldr):
128 """Returns the complete_at of the latest completed and not SKIPPED
129 build request for this builder, or None if there are no such build
130 requests. We need to filter out SKIPPED requests because we're
131 using collapseRequests=True which is unfortunately marking all
132 previous requests as complete when new buildset is created.
133
134 @returns: datetime instance or None, via Deferred
135 """
136
137 bldrid = yield bldr.getBuilderId()
138 completed = yield bldr.master.data.get(
139 ('builders', bldrid, 'buildrequests'),
140 [
141 resultspec.Filter('complete', 'eq', [True]),
142 resultspec.Filter('results', 'ne', [results.SKIPPED]),
143 ],
144 order=['-complete_at'], limit=1)
145 if not completed:
146 return
147
148 complete_at = completed[0]['complete_at']
149
150 last_build = yield bldr.master.data.get(
151 ('builds', ),
152 [
153 resultspec.Filter('builderid', 'eq', [bldrid]),
154 ],
155 order=['-started_at'], limit=1)
156
157 if last_build and last_build[0]:
158 last_complete_at = last_build[0]['complete_at']
159 if last_complete_at and (last_complete_at > complete_at):
160 return last_complete_at
161
162 return complete_at
163
164 @defer.inlineCallbacks
165 def prioritizeBuilders(master, builders):
166 """Returns sorted list of builders by their last timestamp of completed and
167 not skipped build.
168
169 @returns: list of sorted builders
170 """
171
172 def is_building(bldr):
173 return bool(bldr.building) or bool(bldr.old_building)
174
175 def bldr_info(bldr):
176 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
177 d.addCallback(lambda complete_at: (complete_at, bldr))
178 return d
179
180 def bldr_sort(item):
181 (complete_at, bldr) = item
182
183 if not complete_at:
184 date = datetime.min
185 complete_at = date.replace(tzinfo=tzutc())
186
187 if is_building(bldr):
188 date = datetime.max
189 complete_at = date.replace(tzinfo=tzutc())
190
191 return (complete_at, bldr.name)
192
193 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
194 results.sort(key=bldr_sort)
195
196 for r in results:
197 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
198
199 return [r[1] for r in results]
200
201 c['prioritizeBuilders'] = prioritizeBuilders
202
203 ####### CHANGESOURCES
204
205 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
206 scripts_dir = os.path.abspath("../scripts")
207
208 tree_expire = inip1.getint("expire", 0)
209 config_seed = inip1.get("config_seed", "")
210
211 repo_url = ini['repo'].get("url")
212 repo_branch = ini['repo'].get("branch", "master")
213
214 rsync_bin_url = ini['rsync'].get("binary_url")
215 rsync_bin_key = ini['rsync'].get("binary_password")
216 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
217
218 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
219 rsync_bin_defopts += ["--contimeout=20"]
220
221 rsync_src_url = ini['rsync'].get("source_url")
222 rsync_src_key = ini['rsync'].get("source_password")
223 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
224
225 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
226 rsync_src_defopts += ["--contimeout=20"]
227
228 usign_key = None
229 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
230
231 if ini.has_section("usign"):
232 usign_key = ini['usign'].get("key")
233 usign_comment = ini['usign'].get("comment", usign_comment)
234
235 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
236
237
238 # find targets
239 targets = [ ]
240
241 if not os.path.isdir(work_dir+'/source.git'):
242 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
243 else:
244 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
245
246 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
247 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
248 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
249
250 while True:
251 line = findtargets.stdout.readline()
252 if not line:
253 break
254 ta = line.decode().strip().split(' ')
255 targets.append(ta[0])
256
257
258 # the 'change_source' setting tells the buildmaster how it should find out
259 # about source code changes. Here we point to the buildbot clone of pyflakes.
260
261 c['change_source'] = []
262 c['change_source'].append(GitPoller(
263 repo_url,
264 workdir=work_dir+'/work.git', branch=repo_branch,
265 pollinterval=300))
266
267 ####### SCHEDULERS
268
269 # Configure the Schedulers, which decide how to react to incoming changes. In this
270 # case, just kick off a 'basebuild' build
271
272 class TagChoiceParameter(BaseParameter):
273 spec_attributes = ["strict", "choices"]
274 type = "list"
275 strict = True
276
277 def __init__(self, name, label=None, **kw):
278 super().__init__(name, label, **kw)
279 self._choice_list = []
280
281 @property
282 def choices(self):
283 taglist = []
284 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
285
286 if basever:
287 findtags = subprocess.Popen(
288 ['git', 'ls-remote', '--tags', repo_url],
289 stdout = subprocess.PIPE)
290
291 while True:
292 line = findtags.stdout.readline()
293
294 if not line:
295 break
296
297 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
298
299 if tagver and tagver[1].find(basever[1]) == 0:
300 taglist.append(tagver[1])
301
302 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
303 taglist.insert(0, '')
304
305 self._choice_list = taglist
306
307 return self._choice_list
308
309 def parse_from_arg(self, s):
310 if self.strict and s not in self._choice_list:
311 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
312 return s
313
314 c['schedulers'] = []
315 c['schedulers'].append(SingleBranchScheduler(
316 name = "all",
317 change_filter = filter.ChangeFilter(branch=repo_branch),
318 treeStableTimer = 60,
319 builderNames = targets))
320
321 c['schedulers'].append(ForceScheduler(
322 name = "force",
323 buttonName = "Force builds",
324 label = "Force build details",
325 builderNames = [ "00_force_build" ],
326
327 codebases = [
328 util.CodebaseParameter(
329 "",
330 label = "Repository",
331 branch = util.FixedParameter(name = "branch", default = ""),
332 revision = util.FixedParameter(name = "revision", default = ""),
333 repository = util.FixedParameter(name = "repository", default = ""),
334 project = util.FixedParameter(name = "project", default = "")
335 )
336 ],
337
338 reason = util.StringParameter(
339 name = "reason",
340 label = "Reason",
341 default = "Trigger build",
342 required = True,
343 size = 80
344 ),
345
346 properties = [
347 util.NestedParameter(
348 name="options",
349 label="Build Options",
350 layout="vertical",
351 fields=[
352 util.ChoiceStringParameter(
353 name = "target",
354 label = "Build target",
355 default = "all",
356 choices = [ "all" ] + targets
357 ),
358 TagChoiceParameter(
359 name = "tag",
360 label = "Build tag",
361 default = ""
362 )
363 ]
364 )
365 ]
366 ))
367
368 ####### BUILDERS
369
370 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
371 # what steps, and which workers can execute them. Note that any particular build will
372 # only take place on one worker.
373
374 def IsSharedWorkdir(step):
375 return bool(step.getProperty("shared_wd"))
376
377 def IsCleanupRequested(step):
378 if IsSharedWorkdir(step):
379 return False
380 do_cleanup = step.getProperty("do_cleanup")
381 if do_cleanup:
382 return True
383 else:
384 return False
385
386 def IsExpireRequested(step):
387 if IsSharedWorkdir(step):
388 return False
389 else:
390 return not IsCleanupRequested(step)
391
392 def IsTaggingRequested(step):
393 val = step.getProperty("tag")
394 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
395 return True
396 else:
397 return False
398
399 def IsNoMasterBuild(step):
400 return repo_branch != "master"
401
402 def GetBaseVersion():
403 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
404 return repo_branch.split('-')[1]
405 else:
406 return "master"
407
408 @properties.renderer
409 def GetVersionPrefix(props):
410 basever = GetBaseVersion()
411 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
412 return "%s/" % props["tag"]
413 elif basever != "master":
414 return "%s-SNAPSHOT/" % basever
415 else:
416 return ""
417
418 def GetNextBuild(builder, requests):
419 for r in requests:
420 if r.properties and r.properties.hasProperty("tag"):
421 return r
422
423 r = requests[0]
424 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
425 return r
426
427 def MakeEnv(overrides=None, tryccache=False):
428 env = {
429 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
430 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
431 }
432 if tryccache:
433 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
434 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
435 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
436 else:
437 env['CC'] = env['CCC']
438 env['CXX'] = env['CCXX']
439 env['CCACHE'] = ''
440 if overrides is not None:
441 env.update(overrides)
442 return env
443
444 @properties.renderer
445 def NetLockDl(props):
446 lock = None
447 if props.hasProperty("dl_lock"):
448 lock = NetLocks[props["dl_lock"]]
449 if lock is not None:
450 return [lock.access('exclusive')]
451 else:
452 return []
453
454 @properties.renderer
455 def NetLockUl(props):
456 lock = None
457 if props.hasProperty("ul_lock"):
458 lock = NetLocks[props["ul_lock"]]
459 if lock is not None:
460 return [lock.access('exclusive')]
461 else:
462 return []
463
464 @util.renderer
465 def TagPropertyValue(props):
466 if props.hasProperty("options"):
467 options = props.getProperty("options")
468 if type(options) is dict:
469 return options.get("tag")
470 return None
471
472 def IsTargetSelected(target):
473 def CheckTargetProperty(step):
474 try:
475 options = step.getProperty("options")
476 if type(options) is dict:
477 selected_target = options.get("target", "all")
478 if selected_target != "all" and selected_target != target:
479 return False
480 except KeyError:
481 pass
482
483 return True
484
485 return CheckTargetProperty
486
487 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
488 try:
489 seckey = base64.b64decode(seckey)
490 except:
491 return None
492
493 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
494 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
495
496
497 c['builders'] = []
498
499 dlLock = locks.WorkerLock("worker_dl")
500
501 workerNames = [ ]
502
503 for worker in c['workers']:
504 workerNames.append(worker.workername)
505
506 force_factory = BuildFactory()
507
508 c['builders'].append(BuilderConfig(
509 name = "00_force_build",
510 workernames = workerNames,
511 factory = force_factory))
512
513 for target in targets:
514 ts = target.split('/')
515
516 factory = BuildFactory()
517
518 # setup shared work directory if required
519 factory.addStep(ShellCommand(
520 name = "sharedwd",
521 description = "Setting up shared work directory",
522 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
523 workdir = ".",
524 haltOnFailure = True,
525 doStepIf = IsSharedWorkdir))
526
527 # find number of cores
528 factory.addStep(SetPropertyFromCommand(
529 name = "nproc",
530 property = "nproc",
531 description = "Finding number of CPUs",
532 command = ["nproc"]))
533
534 # set number of jobs
535 factory.addStep(SetProperty(
536 name = "njobs",
537 property = "njobs",
538 description = "Set max concurrency",
539 value = Interpolate("%(prop:nproc:-1)s")))
540
541 # find gcc and g++ compilers
542 factory.addStep(FileDownload(
543 name = "dlfindbinpl",
544 mastersrc = scripts_dir + '/findbin.pl',
545 workerdest = "../findbin.pl",
546 mode = 0o755))
547
548 factory.addStep(SetPropertyFromCommand(
549 name = "gcc",
550 property = "cc_command",
551 description = "Finding gcc command",
552 command = [
553 "../findbin.pl", "gcc", "", "",
554 ],
555 haltOnFailure = True))
556
557 factory.addStep(SetPropertyFromCommand(
558 name = "g++",
559 property = "cxx_command",
560 description = "Finding g++ command",
561 command = [
562 "../findbin.pl", "g++", "", "",
563 ],
564 haltOnFailure = True))
565
566 # see if ccache is available
567 factory.addStep(SetPropertyFromCommand(
568 property = "ccache_command",
569 command = ["which", "ccache"],
570 description = "Testing for ccache command",
571 haltOnFailure = False,
572 flunkOnFailure = False,
573 warnOnFailure = False,
574 ))
575
576 # expire tree if needed
577 if tree_expire > 0:
578 factory.addStep(FileDownload(
579 name = "dlexpiresh",
580 doStepIf = IsExpireRequested,
581 mastersrc = scripts_dir + '/expire.sh',
582 workerdest = "../expire.sh",
583 mode = 0o755))
584
585 factory.addStep(ShellCommand(
586 name = "expire",
587 description = "Checking for build tree expiry",
588 command = ["./expire.sh", str(tree_expire)],
589 workdir = ".",
590 haltOnFailure = True,
591 doStepIf = IsExpireRequested,
592 timeout = 2400))
593
594 # cleanup.sh if needed
595 factory.addStep(FileDownload(
596 name = "dlcleanupsh",
597 mastersrc = scripts_dir + '/cleanup.sh',
598 workerdest = "../cleanup.sh",
599 mode = 0o755,
600 doStepIf = IsCleanupRequested))
601
602 factory.addStep(ShellCommand(
603 name = "cleanold",
604 description = "Cleaning previous builds",
605 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
606 workdir = ".",
607 haltOnFailure = True,
608 doStepIf = IsCleanupRequested,
609 timeout = 2400))
610
611 factory.addStep(ShellCommand(
612 name = "cleanup",
613 description = "Cleaning work area",
614 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
615 workdir = ".",
616 haltOnFailure = True,
617 doStepIf = IsCleanupRequested,
618 timeout = 2400))
619
620 # Workaround bug when switching from a checked out tag back to a branch
621 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
622 factory.addStep(ShellCommand(
623 name = "gitcheckout",
624 description = "Ensure that Git HEAD is sane",
625 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
626 haltOnFailure = True))
627
628 # check out the source
629 # Git() runs:
630 # if repo doesn't exist: 'git clone repourl'
631 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
632 # 'git fetch -t repourl branch; git reset --hard revision'
633 factory.addStep(Git(
634 name = "git",
635 repourl = repo_url,
636 branch = repo_branch,
637 mode = 'full',
638 method = Interpolate("%(prop:do_cleanup:#?|fresh|clean)s"),
639 locks = NetLockDl,
640 haltOnFailure = True,
641 ))
642
643 # update remote refs
644 factory.addStep(ShellCommand(
645 name = "fetchrefs",
646 description = "Fetching Git remote refs",
647 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
648 haltOnFailure = True
649 ))
650
651 # switch to tag
652 factory.addStep(ShellCommand(
653 name = "switchtag",
654 description = "Checking out Git tag",
655 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
656 haltOnFailure = True,
657 doStepIf = IsTaggingRequested
658 ))
659
660 # Verify that Git HEAD points to a tag or branch
661 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
662 factory.addStep(ShellCommand(
663 name = "gitverify",
664 description = "Ensure that Git HEAD is pointing to a branch or tag",
665 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
666 haltOnFailure = True))
667
668 factory.addStep(ShellCommand(
669 name = "rmtmp",
670 description = "Remove tmp folder",
671 command=["rm", "-rf", "tmp/"]))
672
673 # feed
674 factory.addStep(ShellCommand(
675 name = "rmfeedlinks",
676 description = "Remove feed symlinks",
677 command=["rm", "-rf", "package/feeds/"]))
678
679 factory.addStep(StringDownload(
680 name = "ccachecc",
681 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
682 workerdest = "../ccache_cc.sh",
683 mode = 0o755,
684 ))
685
686 factory.addStep(StringDownload(
687 name = "ccachecxx",
688 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
689 workerdest = "../ccache_cxx.sh",
690 mode = 0o755,
691 ))
692
693 # feed
694 factory.addStep(ShellCommand(
695 name = "updatefeeds",
696 description = "Updating feeds",
697 command=["./scripts/feeds", "update"],
698 env = MakeEnv(tryccache=True),
699 haltOnFailure = True,
700 locks = NetLockDl,
701 ))
702
703 # feed
704 factory.addStep(ShellCommand(
705 name = "installfeeds",
706 description = "Installing feeds",
707 command=["./scripts/feeds", "install", "-a"],
708 env = MakeEnv(tryccache=True),
709 haltOnFailure = True
710 ))
711
712 # seed config
713 if config_seed is not None:
714 factory.addStep(StringDownload(
715 name = "dlconfigseed",
716 s = config_seed + '\n',
717 workerdest = ".config",
718 mode = 0o644
719 ))
720
721 # configure
722 factory.addStep(ShellCommand(
723 name = "newconfig",
724 description = "Seeding .config",
725 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
726 ))
727
728 factory.addStep(ShellCommand(
729 name = "delbin",
730 description = "Removing output directory",
731 command = ["rm", "-rf", "bin/"]
732 ))
733
734 factory.addStep(ShellCommand(
735 name = "defconfig",
736 description = "Populating .config",
737 command = ["make", "defconfig"],
738 env = MakeEnv()
739 ))
740
741 # check arch
742 factory.addStep(ShellCommand(
743 name = "checkarch",
744 description = "Checking architecture",
745 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
746 logEnviron = False,
747 want_stdout = False,
748 want_stderr = False,
749 haltOnFailure = True
750 ))
751
752 # find libc suffix
753 factory.addStep(SetPropertyFromCommand(
754 name = "libc",
755 property = "libc",
756 description = "Finding libc suffix",
757 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
758
759 # install build key
760 if usign_key is not None:
761 factory.addStep(StringDownload(
762 name = "dlkeybuildpub",
763 s = UsignSec2Pub(usign_key, usign_comment),
764 workerdest = "key-build.pub",
765 mode = 0o600,
766 ))
767
768 factory.addStep(StringDownload(
769 name = "dlkeybuild",
770 s = "# fake private key",
771 workerdest = "key-build",
772 mode = 0o600,
773 ))
774
775 factory.addStep(StringDownload(
776 name = "dlkeybuilducert",
777 s = "# fake certificate",
778 workerdest = "key-build.ucert",
779 mode = 0o600,
780 ))
781
782 # prepare dl
783 factory.addStep(ShellCommand(
784 name = "dldir",
785 description = "Preparing dl/",
786 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
787 logEnviron = False,
788 want_stdout = False
789 ))
790
791 # prepare tar
792 factory.addStep(ShellCommand(
793 name = "dltar",
794 description = "Building and installing GNU tar",
795 command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/tar/compile", "V=s"],
796 env = MakeEnv(tryccache=True),
797 haltOnFailure = True
798 ))
799
800 # populate dl
801 factory.addStep(ShellCommand(
802 name = "dlrun",
803 description = "Populating dl/",
804 command = ["make", Interpolate("-j%(prop:njobs)s"), "download", "V=s"],
805 env = MakeEnv(),
806 logEnviron = False,
807 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
808 ))
809
810 factory.addStep(ShellCommand(
811 name = "cleanbase",
812 description = "Cleaning base-files",
813 command=["make", "package/base-files/clean", "V=s"]
814 ))
815
816 # build
817 factory.addStep(ShellCommand(
818 name = "tools",
819 description = "Building and installing tools",
820 command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/install", "V=s"],
821 env = MakeEnv(tryccache=True),
822 haltOnFailure = True
823 ))
824
825 factory.addStep(ShellCommand(
826 name = "toolchain",
827 description = "Building and installing toolchain",
828 command=["make", Interpolate("-j%(prop:njobs)s"), "toolchain/install", "V=s"],
829 env = MakeEnv(),
830 haltOnFailure = True
831 ))
832
833 factory.addStep(ShellCommand(
834 name = "kmods",
835 description = "Building kmods",
836 command=["make", Interpolate("-j%(prop:njobs)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
837 env = MakeEnv(),
838 haltOnFailure = True
839 ))
840
841 # find kernel version
842 factory.addStep(SetPropertyFromCommand(
843 name = "kernelversion",
844 property = "kernelversion",
845 description = "Finding the effective Kernel version",
846 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
847 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
848 ))
849
850 factory.addStep(ShellCommand(
851 name = "pkgclean",
852 description = "Cleaning up package build",
853 command=["make", "package/cleanup", "V=s"]
854 ))
855
856 factory.addStep(ShellCommand(
857 name = "pkgbuild",
858 description = "Building packages",
859 command=["make", Interpolate("-j%(prop:njobs)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
860 env = MakeEnv(),
861 haltOnFailure = True
862 ))
863
864 factory.addStep(ShellCommand(
865 name = "pkginstall",
866 description = "Installing packages",
867 command=["make", Interpolate("-j%(prop:njobs)s"), "package/install", "V=s"],
868 env = MakeEnv(),
869 haltOnFailure = True
870 ))
871
872 factory.addStep(ShellCommand(
873 name = "pkgindex",
874 description = "Indexing packages",
875 command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
876 env = MakeEnv(),
877 haltOnFailure = True
878 ))
879
880 factory.addStep(ShellCommand(
881 name = "images",
882 description = "Building and installing images",
883 command=["make", Interpolate("-j%(prop:njobs)s"), "target/install", "V=s"],
884 env = MakeEnv(),
885 haltOnFailure = True
886 ))
887
888 factory.addStep(ShellCommand(
889 name = "buildinfo",
890 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
891 command = "make -j1 buildinfo V=s || true",
892 env = MakeEnv(),
893 haltOnFailure = True
894 ))
895
896 factory.addStep(ShellCommand(
897 name = "json_overview_image_info",
898 description = "Generate profiles.json in target folder",
899 command = "make -j1 json_overview_image_info V=s || true",
900 env = MakeEnv(),
901 haltOnFailure = True
902 ))
903
904 factory.addStep(ShellCommand(
905 name = "checksums",
906 description = "Calculating checksums",
907 command=["make", "-j1", "checksum", "V=s"],
908 env = MakeEnv(),
909 haltOnFailure = True
910 ))
911
912 if enable_kmod_archive:
913 factory.addStep(ShellCommand(
914 name = "kmoddir",
915 description = "Creating kmod directory",
916 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
917 haltOnFailure = True
918 ))
919
920 factory.addStep(ShellCommand(
921 name = "kmodprepare",
922 description = "Preparing kmod archive",
923 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
924 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
925 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
926 haltOnFailure = True
927 ))
928
929 factory.addStep(ShellCommand(
930 name = "kmodindex",
931 description = "Indexing kmod archive",
932 command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
933 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
934 env = MakeEnv(),
935 haltOnFailure = True
936 ))
937
938 # sign
939 if ini.has_option("gpg", "key") or usign_key is not None:
940 factory.addStep(MasterShellCommand(
941 name = "signprepare",
942 description = "Preparing temporary signing directory",
943 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
944 haltOnFailure = True
945 ))
946
947 factory.addStep(ShellCommand(
948 name = "signpack",
949 description = "Packing files to sign",
950 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
951 haltOnFailure = True
952 ))
953
954 factory.addStep(FileUpload(
955 workersrc = "sign.tar.gz",
956 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
957 haltOnFailure = True
958 ))
959
960 factory.addStep(MasterShellCommand(
961 name = "signfiles",
962 description = "Signing files",
963 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
964 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
965 haltOnFailure = True
966 ))
967
968 factory.addStep(FileDownload(
969 name = "dlsigntargz",
970 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
971 workerdest = "sign.tar.gz",
972 haltOnFailure = True
973 ))
974
975 factory.addStep(ShellCommand(
976 name = "signunpack",
977 description = "Unpacking signed files",
978 command = ["tar", "-xzf", "sign.tar.gz"],
979 haltOnFailure = True
980 ))
981
982 # upload
983 factory.addStep(ShellCommand(
984 name = "dirprepare",
985 description = "Preparing upload directory structure",
986 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
987 haltOnFailure = True
988 ))
989
990 factory.addStep(ShellCommand(
991 name = "linkprepare",
992 description = "Preparing repository symlink",
993 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
994 doStepIf = IsNoMasterBuild,
995 haltOnFailure = True
996 ))
997
998 if enable_kmod_archive:
999 factory.addStep(ShellCommand(
1000 name = "kmoddirprepare",
1001 description = "Preparing kmod archive upload directory",
1002 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1003 haltOnFailure = True
1004 ))
1005
1006 factory.addStep(ShellCommand(
1007 name = "dirupload",
1008 description = "Uploading directory structure",
1009 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1010 env={'RSYNC_PASSWORD': rsync_bin_key},
1011 haltOnFailure = True,
1012 logEnviron = False,
1013 locks = NetLockUl,
1014 ))
1015
1016 # download remote sha256sums to 'target-sha256sums'
1017 factory.addStep(ShellCommand(
1018 name = "target-sha256sums",
1019 description = "Fetching remote sha256sums for target",
1020 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1021 env={'RSYNC_PASSWORD': rsync_bin_key},
1022 logEnviron = False,
1023 haltOnFailure = False,
1024 flunkOnFailure = False,
1025 warnOnFailure = False,
1026 ))
1027
1028 # build list of files to upload
1029 factory.addStep(FileDownload(
1030 name = "dlsha2rsyncpl",
1031 mastersrc = scripts_dir + '/sha2rsync.pl',
1032 workerdest = "../sha2rsync.pl",
1033 mode = 0o755,
1034 ))
1035
1036 factory.addStep(ShellCommand(
1037 name = "buildlist",
1038 description = "Building list of files to upload",
1039 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1040 haltOnFailure = True,
1041 ))
1042
1043 factory.addStep(FileDownload(
1044 name = "dlrsync.sh",
1045 mastersrc = scripts_dir + '/rsync.sh',
1046 workerdest = "../rsync.sh",
1047 mode = 0o755
1048 ))
1049
1050 # upload new files and update existing ones
1051 factory.addStep(ShellCommand(
1052 name = "targetupload",
1053 description = "Uploading target files",
1054 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1055 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1056 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1057 env={'RSYNC_PASSWORD': rsync_bin_key},
1058 haltOnFailure = True,
1059 logEnviron = False,
1060 ))
1061
1062 # delete files which don't exist locally
1063 factory.addStep(ShellCommand(
1064 name = "targetprune",
1065 description = "Pruning target files",
1066 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1067 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1068 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1069 env={'RSYNC_PASSWORD': rsync_bin_key},
1070 haltOnFailure = True,
1071 logEnviron = False,
1072 locks = NetLockUl,
1073 ))
1074
1075 if enable_kmod_archive:
1076 factory.addStep(ShellCommand(
1077 name = "kmodupload",
1078 description = "Uploading kmod archive",
1079 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1080 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1081 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1082 env={'RSYNC_PASSWORD': rsync_bin_key},
1083 haltOnFailure = True,
1084 logEnviron = False,
1085 locks = NetLockUl,
1086 ))
1087
1088 if rsync_src_url is not None:
1089 factory.addStep(ShellCommand(
1090 name = "sourcelist",
1091 description = "Finding source archives to upload",
1092 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1093 haltOnFailure = True
1094 ))
1095
1096 factory.addStep(ShellCommand(
1097 name = "sourceupload",
1098 description = "Uploading source archives",
1099 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1100 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1101 env={'RSYNC_PASSWORD': rsync_src_key},
1102 haltOnFailure = True,
1103 logEnviron = False,
1104 locks = NetLockUl,
1105 ))
1106
1107 factory.addStep(ShellCommand(
1108 name = "df",
1109 description = "Reporting disk usage",
1110 command=["df", "-h", "."],
1111 env={'LC_ALL': 'C'},
1112 haltOnFailure = False,
1113 flunkOnFailure = False,
1114 warnOnFailure = False,
1115 alwaysRun = True
1116 ))
1117
1118 factory.addStep(ShellCommand(
1119 name = "du",
1120 description = "Reporting estimated file space usage",
1121 command=["du", "-sh", "."],
1122 env={'LC_ALL': 'C'},
1123 haltOnFailure = False,
1124 flunkOnFailure = False,
1125 warnOnFailure = False,
1126 alwaysRun = True
1127 ))
1128
1129 factory.addStep(ShellCommand(
1130 name = "ccachestat",
1131 description = "Reporting ccache stats",
1132 command=["ccache", "-s"],
1133 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1134 want_stderr = False,
1135 haltOnFailure = False,
1136 flunkOnFailure = False,
1137 warnOnFailure = False,
1138 alwaysRun = True,
1139 ))
1140
1141 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1142
1143 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1144 force_factory.addStep(steps.Trigger(
1145 name = "trigger_%s" % target,
1146 description = "Triggering %s build" % target,
1147 schedulerNames = [ "trigger_%s" % target ],
1148 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1149 doStepIf = IsTargetSelected(target)
1150 ))
1151
1152
1153 ####### STATUS TARGETS
1154
1155 # 'status' is a list of Status Targets. The results of each build will be
1156 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1157 # including web pages, email senders, and IRC bots.
1158
1159 if "status_bind" in inip1:
1160 c['www'] = {
1161 'port': inip1.get("status_bind"),
1162 'plugins': {
1163 'waterfall_view': True,
1164 'console_view': True,
1165 'grid_view': True
1166 }
1167 }
1168
1169 if "status_user" in inip1 and "status_password" in inip1:
1170 c['www']['auth'] = util.UserPasswordAuth([
1171 (inip1.get("status_user"), inip1.get("status_password"))
1172 ])
1173 c['www']['authz'] = util.Authz(
1174 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1175 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1176 )
1177
1178 c['services'] = []
1179 if ini.has_section("irc"):
1180 iniirc = ini['irc']
1181 irc_host = iniirc.get("host", None)
1182 irc_port = iniirc.getint("port", 6667)
1183 irc_chan = iniirc.get("channel", None)
1184 irc_nick = iniirc.get("nickname", None)
1185 irc_pass = iniirc.get("password", None)
1186
1187 if irc_host and irc_nick and irc_chan:
1188 irc = reporters.IRC(irc_host, irc_nick,
1189 port = irc_port,
1190 password = irc_pass,
1191 channels = [ irc_chan ],
1192 notify_events = [ 'exception', 'problem', 'recovery' ]
1193 )
1194
1195 c['services'].append(irc)
1196
1197 c['revlink'] = util.RevlinkMatch([
1198 r'https://git.openwrt.org/openwrt/(.*).git'
1199 ],
1200 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1201
1202 ####### DB URL
1203
1204 c['db'] = {
1205 # This specifies what database buildbot uses to store its state. You can leave
1206 # this at its default for all but the largest installations.
1207 'db_url' : "sqlite:///state.sqlite",
1208 }
1209
1210 c['buildbotNetUsageData'] = None