phase1: rework GetNumJobs()
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand, SetProperty
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
62
63 ####### PROJECT IDENTITY
64
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
68
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
71
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
76 # without some help.
77
78 c['buildbotURL'] = inip1.get("buildbot_url")
79
80 ####### BUILDWORKERS
81
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
124
125 # coalesce builds
126 c['collapseRequests'] = True
127
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
131 hour=6,
132 )]
133
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
141
142 @returns: datetime instance or None, via Deferred
143 """
144
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
148 [
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
151 ],
152 order=['-complete_at'], limit=1)
153 if not completed:
154 return
155
156 complete_at = completed[0]['complete_at']
157
158 last_build = yield bldr.master.data.get(
159 ('builds', ),
160 [
161 resultspec.Filter('builderid', 'eq', [bldrid]),
162 ],
163 order=['-started_at'], limit=1)
164
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
169
170 return complete_at
171
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
175 not skipped build.
176
177 @returns: list of sorted builders
178 """
179
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
182
183 def bldr_info(bldr):
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
186 return d
187
188 def bldr_sort(item):
189 (complete_at, bldr) = item
190
191 if not complete_at:
192 date = datetime.min
193 complete_at = date.replace(tzinfo=tzutc())
194
195 if is_building(bldr):
196 date = datetime.max
197 complete_at = date.replace(tzinfo=tzutc())
198
199 return (complete_at, bldr.name)
200
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
203
204 for r in results:
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
206
207 return [r[1] for r in results]
208
209 c['prioritizeBuilders'] = prioritizeBuilders
210
211 ####### CHANGESOURCES
212
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
215
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
218
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
221
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
225
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
228
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
232
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
235
236 usign_key = None
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
238
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
242
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
244
245
246 # find targets
247 targets = [ ]
248
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
251 else:
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
253
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
257
258 while True:
259 line = findtargets.stdout.readline()
260 if not line:
261 break
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
264
265
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
268
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
271 repo_url,
272 workdir=work_dir+'/work.git', branch=repo_branch,
273 pollinterval=300))
274
275 ####### SCHEDULERS
276
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
279
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
282 type = "list"
283 strict = True
284
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
288
289 @property
290 def choices(self):
291 taglist = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
293
294 if basever:
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
298
299 while True:
300 line = findtags.stdout.readline()
301
302 if not line:
303 break
304
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
306
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
309
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
312
313 self._choice_list = taglist
314
315 return self._choice_list
316
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
320 return s
321
322 c['schedulers'] = []
323 c['schedulers'].append(SingleBranchScheduler(
324 name = "all",
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
328
329 c['schedulers'].append(ForceScheduler(
330 name = "force",
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
334
335 codebases = [
336 util.CodebaseParameter(
337 "",
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
343 )
344 ],
345
346 reason = util.StringParameter(
347 name = "reason",
348 label = "Reason",
349 default = "Trigger build",
350 required = True,
351 size = 80
352 ),
353
354 properties = [
355 util.NestedParameter(
356 name="options",
357 label="Build Options",
358 layout="vertical",
359 fields=[
360 util.ChoiceStringParameter(
361 name = "target",
362 label = "Build target",
363 default = "all",
364 choices = [ "all" ] + targets
365 ),
366 TagChoiceParameter(
367 name = "tag",
368 label = "Build tag",
369 default = ""
370 )
371 ]
372 )
373 ]
374 ))
375
376 ####### BUILDERS
377
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
381
382 def IsSharedWorkdir(step):
383 return bool(step.getProperty("shared_wd"))
384
385 def IsCleanupRequested(step):
386 if IsSharedWorkdir(step):
387 return False
388 do_cleanup = step.getProperty("do_cleanup")
389 if do_cleanup:
390 return True
391 else:
392 return False
393
394 def IsExpireRequested(step):
395 if IsSharedWorkdir(step):
396 return False
397 else:
398 return not IsCleanupRequested(step)
399
400 def IsTaggingRequested(step):
401 val = step.getProperty("tag")
402 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
403 return True
404 else:
405 return False
406
407 def IsNoMasterBuild(step):
408 return repo_branch != "master"
409
410 def GetBaseVersion():
411 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
412 return repo_branch.split('-')[1]
413 else:
414 return "master"
415
416 @properties.renderer
417 def GetVersionPrefix(props):
418 basever = GetBaseVersion()
419 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
420 return "%s/" % props["tag"]
421 elif basever != "master":
422 return "%s-SNAPSHOT/" % basever
423 else:
424 return ""
425
426 @properties.renderer
427 def GetNumJobs(props):
428 nproc = int(props.getProperty("nproc", "1"))
429 njobs = int(nproc / props.getProperty("max_builds", 1)) or 1
430 return str(njobs)
431
432 def GetNextBuild(builder, requests):
433 for r in requests:
434 if r.properties and r.properties.hasProperty("tag"):
435 return r
436
437 r = requests[0]
438 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
439 return r
440
441 def MakeEnv(overrides=None, tryccache=False):
442 env = {
443 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
444 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
445 }
446 if tryccache:
447 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
448 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
449 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
450 else:
451 env['CC'] = env['CCC']
452 env['CXX'] = env['CCXX']
453 env['CCACHE'] = ''
454 if overrides is not None:
455 env.update(overrides)
456 return env
457
458 @properties.renderer
459 def NetLockDl(props):
460 lock = None
461 if props.hasProperty("dl_lock"):
462 lock = NetLocks[props["dl_lock"]]
463 if lock is not None:
464 return [lock.access('exclusive')]
465 else:
466 return []
467
468 @properties.renderer
469 def NetLockUl(props):
470 lock = None
471 if props.hasProperty("ul_lock"):
472 lock = NetLocks[props["ul_lock"]]
473 if lock is not None:
474 return [lock.access('exclusive')]
475 else:
476 return []
477
478 @util.renderer
479 def TagPropertyValue(props):
480 if props.hasProperty("options"):
481 options = props.getProperty("options")
482 if type(options) is dict:
483 return options.get("tag")
484 return None
485
486 def IsTargetSelected(target):
487 def CheckTargetProperty(step):
488 try:
489 options = step.getProperty("options")
490 if type(options) is dict:
491 selected_target = options.get("target", "all")
492 if selected_target != "all" and selected_target != target:
493 return False
494 except KeyError:
495 pass
496
497 return True
498
499 return CheckTargetProperty
500
501 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
502 try:
503 seckey = base64.b64decode(seckey)
504 except:
505 return None
506
507 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
508 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
509
510
511 c['builders'] = []
512
513 dlLock = locks.WorkerLock("worker_dl")
514
515 workerNames = [ ]
516
517 for worker in c['workers']:
518 workerNames.append(worker.workername)
519
520 force_factory = BuildFactory()
521
522 c['builders'].append(BuilderConfig(
523 name = "00_force_build",
524 workernames = workerNames,
525 factory = force_factory))
526
527 for target in targets:
528 ts = target.split('/')
529
530 factory = BuildFactory()
531
532 # setup shared work directory if required
533 factory.addStep(ShellCommand(
534 name = "sharedwd",
535 description = "Setting up shared work directory",
536 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
537 workdir = ".",
538 haltOnFailure = True,
539 doStepIf = IsSharedWorkdir))
540
541 # find number of cores
542 factory.addStep(SetPropertyFromCommand(
543 name = "nproc",
544 property = "nproc",
545 description = "Finding number of CPUs",
546 command = ["nproc"]))
547
548 # set number of jobs
549 factory.addStep(SetProperty(
550 name = "njobs",
551 property = "njobs",
552 description = "Set max concurrency",
553 value = Interpolate("%(kw:jobs)s", jobs=GetNumJobs)))
554
555 # find gcc and g++ compilers
556 factory.addStep(FileDownload(
557 name = "dlfindbinpl",
558 mastersrc = scripts_dir + '/findbin.pl',
559 workerdest = "../findbin.pl",
560 mode = 0o755))
561
562 factory.addStep(SetPropertyFromCommand(
563 name = "gcc",
564 property = "cc_command",
565 description = "Finding gcc command",
566 command = [
567 "../findbin.pl", "gcc", "", "",
568 ],
569 haltOnFailure = True))
570
571 factory.addStep(SetPropertyFromCommand(
572 name = "g++",
573 property = "cxx_command",
574 description = "Finding g++ command",
575 command = [
576 "../findbin.pl", "g++", "", "",
577 ],
578 haltOnFailure = True))
579
580 # see if ccache is available
581 factory.addStep(SetPropertyFromCommand(
582 property = "ccache_command",
583 command = ["which", "ccache"],
584 description = "Testing for ccache command",
585 haltOnFailure = False,
586 flunkOnFailure = False,
587 warnOnFailure = False,
588 ))
589
590 # expire tree if needed
591 if tree_expire > 0:
592 factory.addStep(FileDownload(
593 name = "dlexpiresh",
594 doStepIf = IsExpireRequested,
595 mastersrc = scripts_dir + '/expire.sh',
596 workerdest = "../expire.sh",
597 mode = 0o755))
598
599 factory.addStep(ShellCommand(
600 name = "expire",
601 description = "Checking for build tree expiry",
602 command = ["./expire.sh", str(tree_expire)],
603 workdir = ".",
604 haltOnFailure = True,
605 doStepIf = IsExpireRequested,
606 timeout = 2400))
607
608 # cleanup.sh if needed
609 factory.addStep(FileDownload(
610 name = "dlcleanupsh",
611 mastersrc = scripts_dir + '/cleanup.sh',
612 workerdest = "../cleanup.sh",
613 mode = 0o755,
614 doStepIf = IsCleanupRequested))
615
616 factory.addStep(ShellCommand(
617 name = "cleanold",
618 description = "Cleaning previous builds",
619 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
620 workdir = ".",
621 haltOnFailure = True,
622 doStepIf = IsCleanupRequested,
623 timeout = 2400))
624
625 factory.addStep(ShellCommand(
626 name = "cleanup",
627 description = "Cleaning work area",
628 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
629 workdir = ".",
630 haltOnFailure = True,
631 doStepIf = IsCleanupRequested,
632 timeout = 2400))
633
634 # Workaround bug when switching from a checked out tag back to a branch
635 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
636 factory.addStep(ShellCommand(
637 name = "gitcheckout",
638 description = "Ensure that Git HEAD is sane",
639 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
640 haltOnFailure = True))
641
642 # check out the source
643 # Git() runs:
644 # if repo doesn't exist: 'git clone repourl'
645 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
646 # 'git fetch -t repourl branch; git reset --hard revision'
647 factory.addStep(Git(
648 name = "git",
649 repourl = repo_url,
650 branch = repo_branch,
651 mode = 'full',
652 method = Interpolate("%(prop:do_cleanup:#?|fresh|clean)s"),
653 locks = NetLockDl,
654 haltOnFailure = True,
655 ))
656
657 # update remote refs
658 factory.addStep(ShellCommand(
659 name = "fetchrefs",
660 description = "Fetching Git remote refs",
661 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
662 haltOnFailure = True
663 ))
664
665 # switch to tag
666 factory.addStep(ShellCommand(
667 name = "switchtag",
668 description = "Checking out Git tag",
669 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
670 haltOnFailure = True,
671 doStepIf = IsTaggingRequested
672 ))
673
674 # Verify that Git HEAD points to a tag or branch
675 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
676 factory.addStep(ShellCommand(
677 name = "gitverify",
678 description = "Ensure that Git HEAD is pointing to a branch or tag",
679 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
680 haltOnFailure = True))
681
682 factory.addStep(ShellCommand(
683 name = "rmtmp",
684 description = "Remove tmp folder",
685 command=["rm", "-rf", "tmp/"]))
686
687 # feed
688 factory.addStep(ShellCommand(
689 name = "rmfeedlinks",
690 description = "Remove feed symlinks",
691 command=["rm", "-rf", "package/feeds/"]))
692
693 factory.addStep(StringDownload(
694 name = "ccachecc",
695 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
696 workerdest = "../ccache_cc.sh",
697 mode = 0o755,
698 ))
699
700 factory.addStep(StringDownload(
701 name = "ccachecxx",
702 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
703 workerdest = "../ccache_cxx.sh",
704 mode = 0o755,
705 ))
706
707 # feed
708 factory.addStep(ShellCommand(
709 name = "updatefeeds",
710 description = "Updating feeds",
711 command=["./scripts/feeds", "update"],
712 env = MakeEnv(tryccache=True),
713 haltOnFailure = True,
714 locks = NetLockDl,
715 ))
716
717 # feed
718 factory.addStep(ShellCommand(
719 name = "installfeeds",
720 description = "Installing feeds",
721 command=["./scripts/feeds", "install", "-a"],
722 env = MakeEnv(tryccache=True),
723 haltOnFailure = True
724 ))
725
726 # seed config
727 if config_seed is not None:
728 factory.addStep(StringDownload(
729 name = "dlconfigseed",
730 s = config_seed + '\n',
731 workerdest = ".config",
732 mode = 0o644
733 ))
734
735 # configure
736 factory.addStep(ShellCommand(
737 name = "newconfig",
738 description = "Seeding .config",
739 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
740 ))
741
742 factory.addStep(ShellCommand(
743 name = "delbin",
744 description = "Removing output directory",
745 command = ["rm", "-rf", "bin/"]
746 ))
747
748 factory.addStep(ShellCommand(
749 name = "defconfig",
750 description = "Populating .config",
751 command = ["make", "defconfig"],
752 env = MakeEnv()
753 ))
754
755 # check arch
756 factory.addStep(ShellCommand(
757 name = "checkarch",
758 description = "Checking architecture",
759 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
760 logEnviron = False,
761 want_stdout = False,
762 want_stderr = False,
763 haltOnFailure = True
764 ))
765
766 # find libc suffix
767 factory.addStep(SetPropertyFromCommand(
768 name = "libc",
769 property = "libc",
770 description = "Finding libc suffix",
771 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
772
773 # install build key
774 if usign_key is not None:
775 factory.addStep(StringDownload(
776 name = "dlkeybuildpub",
777 s = UsignSec2Pub(usign_key, usign_comment),
778 workerdest = "key-build.pub",
779 mode = 0o600,
780 ))
781
782 factory.addStep(StringDownload(
783 name = "dlkeybuild",
784 s = "# fake private key",
785 workerdest = "key-build",
786 mode = 0o600,
787 ))
788
789 factory.addStep(StringDownload(
790 name = "dlkeybuilducert",
791 s = "# fake certificate",
792 workerdest = "key-build.ucert",
793 mode = 0o600,
794 ))
795
796 # prepare dl
797 factory.addStep(ShellCommand(
798 name = "dldir",
799 description = "Preparing dl/",
800 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
801 logEnviron = False,
802 want_stdout = False
803 ))
804
805 # prepare tar
806 factory.addStep(ShellCommand(
807 name = "dltar",
808 description = "Building and installing GNU tar",
809 command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/tar/compile", "V=s"],
810 env = MakeEnv(tryccache=True),
811 haltOnFailure = True
812 ))
813
814 # populate dl
815 factory.addStep(ShellCommand(
816 name = "dlrun",
817 description = "Populating dl/",
818 command = ["make", Interpolate("-j%(prop:njobs)s"), "download", "V=s"],
819 env = MakeEnv(),
820 logEnviron = False,
821 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
822 ))
823
824 factory.addStep(ShellCommand(
825 name = "cleanbase",
826 description = "Cleaning base-files",
827 command=["make", "package/base-files/clean", "V=s"]
828 ))
829
830 # build
831 factory.addStep(ShellCommand(
832 name = "tools",
833 description = "Building and installing tools",
834 command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/install", "V=s"],
835 env = MakeEnv(tryccache=True),
836 haltOnFailure = True
837 ))
838
839 factory.addStep(ShellCommand(
840 name = "toolchain",
841 description = "Building and installing toolchain",
842 command=["make", Interpolate("-j%(prop:njobs)s"), "toolchain/install", "V=s"],
843 env = MakeEnv(),
844 haltOnFailure = True
845 ))
846
847 factory.addStep(ShellCommand(
848 name = "kmods",
849 description = "Building kmods",
850 command=["make", Interpolate("-j%(prop:njobs)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
851 env = MakeEnv(),
852 haltOnFailure = True
853 ))
854
855 # find kernel version
856 factory.addStep(SetPropertyFromCommand(
857 name = "kernelversion",
858 property = "kernelversion",
859 description = "Finding the effective Kernel version",
860 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
861 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
862 ))
863
864 factory.addStep(ShellCommand(
865 name = "pkgclean",
866 description = "Cleaning up package build",
867 command=["make", "package/cleanup", "V=s"]
868 ))
869
870 factory.addStep(ShellCommand(
871 name = "pkgbuild",
872 description = "Building packages",
873 command=["make", Interpolate("-j%(prop:njobs)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
874 env = MakeEnv(),
875 haltOnFailure = True
876 ))
877
878 factory.addStep(ShellCommand(
879 name = "pkginstall",
880 description = "Installing packages",
881 command=["make", Interpolate("-j%(prop:njobs)s"), "package/install", "V=s"],
882 env = MakeEnv(),
883 haltOnFailure = True
884 ))
885
886 factory.addStep(ShellCommand(
887 name = "pkgindex",
888 description = "Indexing packages",
889 command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
890 env = MakeEnv(),
891 haltOnFailure = True
892 ))
893
894 factory.addStep(ShellCommand(
895 name = "images",
896 description = "Building and installing images",
897 command=["make", Interpolate("-j%(prop:njobs)s"), "target/install", "V=s"],
898 env = MakeEnv(),
899 haltOnFailure = True
900 ))
901
902 factory.addStep(ShellCommand(
903 name = "buildinfo",
904 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
905 command = "make -j1 buildinfo V=s || true",
906 env = MakeEnv(),
907 haltOnFailure = True
908 ))
909
910 factory.addStep(ShellCommand(
911 name = "json_overview_image_info",
912 description = "Generate profiles.json in target folder",
913 command = "make -j1 json_overview_image_info V=s || true",
914 env = MakeEnv(),
915 haltOnFailure = True
916 ))
917
918 factory.addStep(ShellCommand(
919 name = "checksums",
920 description = "Calculating checksums",
921 command=["make", "-j1", "checksum", "V=s"],
922 env = MakeEnv(),
923 haltOnFailure = True
924 ))
925
926 if enable_kmod_archive:
927 factory.addStep(ShellCommand(
928 name = "kmoddir",
929 description = "Creating kmod directory",
930 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
931 haltOnFailure = True
932 ))
933
934 factory.addStep(ShellCommand(
935 name = "kmodprepare",
936 description = "Preparing kmod archive",
937 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
938 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
939 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
940 haltOnFailure = True
941 ))
942
943 factory.addStep(ShellCommand(
944 name = "kmodindex",
945 description = "Indexing kmod archive",
946 command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
947 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
948 env = MakeEnv(),
949 haltOnFailure = True
950 ))
951
952 # sign
953 if ini.has_option("gpg", "key") or usign_key is not None:
954 factory.addStep(MasterShellCommand(
955 name = "signprepare",
956 description = "Preparing temporary signing directory",
957 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
958 haltOnFailure = True
959 ))
960
961 factory.addStep(ShellCommand(
962 name = "signpack",
963 description = "Packing files to sign",
964 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
965 haltOnFailure = True
966 ))
967
968 factory.addStep(FileUpload(
969 workersrc = "sign.tar.gz",
970 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
971 haltOnFailure = True
972 ))
973
974 factory.addStep(MasterShellCommand(
975 name = "signfiles",
976 description = "Signing files",
977 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
978 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
979 haltOnFailure = True
980 ))
981
982 factory.addStep(FileDownload(
983 name = "dlsigntargz",
984 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
985 workerdest = "sign.tar.gz",
986 haltOnFailure = True
987 ))
988
989 factory.addStep(ShellCommand(
990 name = "signunpack",
991 description = "Unpacking signed files",
992 command = ["tar", "-xzf", "sign.tar.gz"],
993 haltOnFailure = True
994 ))
995
996 # upload
997 factory.addStep(ShellCommand(
998 name = "dirprepare",
999 description = "Preparing upload directory structure",
1000 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1001 haltOnFailure = True
1002 ))
1003
1004 factory.addStep(ShellCommand(
1005 name = "linkprepare",
1006 description = "Preparing repository symlink",
1007 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1008 doStepIf = IsNoMasterBuild,
1009 haltOnFailure = True
1010 ))
1011
1012 if enable_kmod_archive:
1013 factory.addStep(ShellCommand(
1014 name = "kmoddirprepare",
1015 description = "Preparing kmod archive upload directory",
1016 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1017 haltOnFailure = True
1018 ))
1019
1020 factory.addStep(ShellCommand(
1021 name = "dirupload",
1022 description = "Uploading directory structure",
1023 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1024 env={'RSYNC_PASSWORD': rsync_bin_key},
1025 haltOnFailure = True,
1026 logEnviron = False,
1027 locks = NetLockUl,
1028 ))
1029
1030 # download remote sha256sums to 'target-sha256sums'
1031 factory.addStep(ShellCommand(
1032 name = "target-sha256sums",
1033 description = "Fetching remote sha256sums for target",
1034 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1035 env={'RSYNC_PASSWORD': rsync_bin_key},
1036 logEnviron = False,
1037 haltOnFailure = False,
1038 flunkOnFailure = False,
1039 warnOnFailure = False,
1040 ))
1041
1042 # build list of files to upload
1043 factory.addStep(FileDownload(
1044 name = "dlsha2rsyncpl",
1045 mastersrc = scripts_dir + '/sha2rsync.pl',
1046 workerdest = "../sha2rsync.pl",
1047 mode = 0o755,
1048 ))
1049
1050 factory.addStep(ShellCommand(
1051 name = "buildlist",
1052 description = "Building list of files to upload",
1053 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1054 haltOnFailure = True,
1055 ))
1056
1057 factory.addStep(FileDownload(
1058 name = "dlrsync.sh",
1059 mastersrc = scripts_dir + '/rsync.sh',
1060 workerdest = "../rsync.sh",
1061 mode = 0o755
1062 ))
1063
1064 # upload new files and update existing ones
1065 factory.addStep(ShellCommand(
1066 name = "targetupload",
1067 description = "Uploading target files",
1068 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1069 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1070 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1071 env={'RSYNC_PASSWORD': rsync_bin_key},
1072 haltOnFailure = True,
1073 logEnviron = False,
1074 ))
1075
1076 # delete files which don't exist locally
1077 factory.addStep(ShellCommand(
1078 name = "targetprune",
1079 description = "Pruning target files",
1080 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1081 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1082 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1083 env={'RSYNC_PASSWORD': rsync_bin_key},
1084 haltOnFailure = True,
1085 logEnviron = False,
1086 locks = NetLockUl,
1087 ))
1088
1089 if enable_kmod_archive:
1090 factory.addStep(ShellCommand(
1091 name = "kmodupload",
1092 description = "Uploading kmod archive",
1093 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1094 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1095 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1096 env={'RSYNC_PASSWORD': rsync_bin_key},
1097 haltOnFailure = True,
1098 logEnviron = False,
1099 locks = NetLockUl,
1100 ))
1101
1102 if rsync_src_url is not None:
1103 factory.addStep(ShellCommand(
1104 name = "sourcelist",
1105 description = "Finding source archives to upload",
1106 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1107 haltOnFailure = True
1108 ))
1109
1110 factory.addStep(ShellCommand(
1111 name = "sourceupload",
1112 description = "Uploading source archives",
1113 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1114 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1115 env={'RSYNC_PASSWORD': rsync_src_key},
1116 haltOnFailure = True,
1117 logEnviron = False,
1118 locks = NetLockUl,
1119 ))
1120
1121 factory.addStep(ShellCommand(
1122 name = "df",
1123 description = "Reporting disk usage",
1124 command=["df", "-h", "."],
1125 env={'LC_ALL': 'C'},
1126 haltOnFailure = False,
1127 flunkOnFailure = False,
1128 warnOnFailure = False,
1129 alwaysRun = True
1130 ))
1131
1132 factory.addStep(ShellCommand(
1133 name = "du",
1134 description = "Reporting estimated file space usage",
1135 command=["du", "-sh", "."],
1136 env={'LC_ALL': 'C'},
1137 haltOnFailure = False,
1138 flunkOnFailure = False,
1139 warnOnFailure = False,
1140 alwaysRun = True
1141 ))
1142
1143 factory.addStep(ShellCommand(
1144 name = "ccachestat",
1145 description = "Reporting ccache stats",
1146 command=["ccache", "-s"],
1147 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1148 want_stderr = False,
1149 haltOnFailure = False,
1150 flunkOnFailure = False,
1151 warnOnFailure = False,
1152 alwaysRun = True,
1153 ))
1154
1155 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1156
1157 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1158 force_factory.addStep(steps.Trigger(
1159 name = "trigger_%s" % target,
1160 description = "Triggering %s build" % target,
1161 schedulerNames = [ "trigger_%s" % target ],
1162 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1163 doStepIf = IsTargetSelected(target)
1164 ))
1165
1166
1167 ####### STATUS TARGETS
1168
1169 # 'status' is a list of Status Targets. The results of each build will be
1170 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1171 # including web pages, email senders, and IRC bots.
1172
1173 if "status_bind" in inip1:
1174 c['www'] = {
1175 'port': inip1.get("status_bind"),
1176 'plugins': {
1177 'waterfall_view': True,
1178 'console_view': True,
1179 'grid_view': True
1180 }
1181 }
1182
1183 if "status_user" in inip1 and "status_password" in inip1:
1184 c['www']['auth'] = util.UserPasswordAuth([
1185 (inip1.get("status_user"), inip1.get("status_password"))
1186 ])
1187 c['www']['authz'] = util.Authz(
1188 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1189 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1190 )
1191
1192 c['services'] = []
1193 if ini.has_section("irc"):
1194 iniirc = ini['irc']
1195 irc_host = iniirc.get("host", None)
1196 irc_port = iniirc.getint("port", 6667)
1197 irc_chan = iniirc.get("channel", None)
1198 irc_nick = iniirc.get("nickname", None)
1199 irc_pass = iniirc.get("password", None)
1200
1201 if irc_host and irc_nick and irc_chan:
1202 irc = reporters.IRC(irc_host, irc_nick,
1203 port = irc_port,
1204 password = irc_pass,
1205 channels = [ irc_chan ],
1206 notify_events = [ 'exception', 'problem', 'recovery' ]
1207 )
1208
1209 c['services'].append(irc)
1210
1211 c['revlink'] = util.RevlinkMatch([
1212 r'https://git.openwrt.org/openwrt/(.*).git'
1213 ],
1214 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1215
1216 ####### DB URL
1217
1218 c['db'] = {
1219 # This specifies what database buildbot uses to store its state. You can leave
1220 # this at its default for all but the largest installations.
1221 'db_url' : "sqlite:///state.sqlite",
1222 }
1223
1224 c['buildbotNetUsageData'] = None