phase1: rsync_src_url is always set
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini['repo'].get("url")
64 repo_branch = ini['repo'].get("branch", "master")
65
66 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
67
68 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
69 # rsync_bin_defopts += ["--contimeout=20"]
70
71 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
72
73 #if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
74 # rsync_src_defopts += ["--contimeout=20"]
75
76 branches = {}
77
78 def ini_parse_branch(section):
79 b = {}
80 name = section.get("name")
81
82 if not name:
83 raise ValueError("missing 'name' in " + repr(section))
84 if name in branches:
85 raise ValueError("duplicate branch name in " + repr(section))
86
87 b["name"] = name
88 b["bin_url"] = section.get("binary_url")
89 b["bin_key"] = section.get("binary_password")
90
91 b["src_url"] = section.get("source_url")
92 b["src_key"] = section.get("source_password")
93
94 b["gpg_key"] = section.get("gpg_key")
95
96 b["usign_key"] = section.get("usign_key")
97 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
98 b["usign_comment"] = section.get("usign_comment", usign_comment)
99
100 b["config_seed"] = section.get("config_seed")
101
102 b["kmod_archive"] = section.getboolean("kmod_archive", False)
103
104 branches[name] = b
105 log.msg("Configured branch: {}".format(name))
106
107 # PB port can be either a numeric port or a connection string
108 pb_port = inip1.get("port") or 9989
109
110 # This is the dictionary that the buildmaster pays attention to. We also use
111 # a shorter alias to save typing.
112 c = BuildmasterConfig = {}
113
114 ####### PROJECT IDENTITY
115
116 # the 'title' string will appear at the top of this buildbot
117 # installation's html.WebStatus home page (linked to the
118 # 'titleURL') and is embedded in the title of the waterfall HTML page.
119
120 c['title'] = ini['general'].get("title")
121 c['titleURL'] = ini['general'].get("title_url")
122
123 # the 'buildbotURL' string should point to the location where the buildbot's
124 # internal web server (usually the html.WebStatus page) is visible. This
125 # typically uses the port number set in the Waterfall 'status' entry, but
126 # with an externally-visible host name which the buildbot cannot figure out
127 # without some help.
128
129 c['buildbotURL'] = inip1.get("buildbot_url")
130
131 ####### BUILDWORKERS
132
133 # The 'workers' list defines the set of recognized buildworkers. Each element is
134 # a Worker object, specifying a unique worker name and password. The same
135 # worker name and password must be configured on the worker.
136
137 c['workers'] = []
138 NetLocks = dict()
139
140 for section in ini.sections():
141 if section.startswith("branch "):
142 ini_parse_branch(ini[section])
143
144 if section.startswith("worker "):
145 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
146 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
147 sl_props = { 'dl_lock':None, 'ul_lock':None }
148 name = ini.get(section, "name")
149 password = ini.get(section, "password")
150 if ini.has_option(section, "dl_lock"):
151 lockname = ini.get(section, "dl_lock")
152 sl_props['dl_lock'] = lockname
153 if lockname not in NetLocks:
154 NetLocks[lockname] = locks.MasterLock(lockname)
155 if ini.has_option(section, "ul_lock"):
156 lockname = ini.get(section, "ul_lock")
157 sl_props['ul_lock'] = lockname
158 if lockname not in NetLocks:
159 NetLocks[lockname] = locks.MasterLock(lockname)
160 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
161
162 c['protocols'] = {'pb': {'port': pb_port}}
163
164 # coalesce builds
165 c['collapseRequests'] = True
166
167 # Reduce amount of backlog data
168 c['configurators'] = [util.JanitorConfigurator(
169 logHorizon=timedelta(days=3),
170 hour=6,
171 )]
172
173 @defer.inlineCallbacks
174 def getNewestCompleteTime(bldr):
175 """Returns the complete_at of the latest completed and not SKIPPED
176 build request for this builder, or None if there are no such build
177 requests. We need to filter out SKIPPED requests because we're
178 using collapseRequests=True which is unfortunately marking all
179 previous requests as complete when new buildset is created.
180
181 @returns: datetime instance or None, via Deferred
182 """
183
184 bldrid = yield bldr.getBuilderId()
185 completed = yield bldr.master.data.get(
186 ('builders', bldrid, 'buildrequests'),
187 [
188 resultspec.Filter('complete', 'eq', [True]),
189 resultspec.Filter('results', 'ne', [results.SKIPPED]),
190 ],
191 order=['-complete_at'], limit=1)
192 if not completed:
193 return
194
195 complete_at = completed[0]['complete_at']
196
197 last_build = yield bldr.master.data.get(
198 ('builds', ),
199 [
200 resultspec.Filter('builderid', 'eq', [bldrid]),
201 ],
202 order=['-started_at'], limit=1)
203
204 if last_build and last_build[0]:
205 last_complete_at = last_build[0]['complete_at']
206 if last_complete_at and (last_complete_at > complete_at):
207 return last_complete_at
208
209 return complete_at
210
211 @defer.inlineCallbacks
212 def prioritizeBuilders(master, builders):
213 """Returns sorted list of builders by their last timestamp of completed and
214 not skipped build.
215
216 @returns: list of sorted builders
217 """
218
219 def is_building(bldr):
220 return bool(bldr.building) or bool(bldr.old_building)
221
222 def bldr_info(bldr):
223 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
224 d.addCallback(lambda complete_at: (complete_at, bldr))
225 return d
226
227 def bldr_sort(item):
228 (complete_at, bldr) = item
229
230 if not complete_at:
231 date = datetime.min
232 complete_at = date.replace(tzinfo=tzutc())
233
234 if is_building(bldr):
235 date = datetime.max
236 complete_at = date.replace(tzinfo=tzutc())
237
238 return (complete_at, bldr.name)
239
240 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
241 results.sort(key=bldr_sort)
242
243 for r in results:
244 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
245
246 return [r[1] for r in results]
247
248 c['prioritizeBuilders'] = prioritizeBuilders
249
250 ####### CHANGESOURCES
251
252
253 # find targets
254 targets = [ ]
255
256 def populateTargets():
257 sourcegit = work_dir + '/source.git'
258 if os.path.isdir(sourcegit):
259 subprocess.call(["rm", "-rf", sourcegit])
260
261 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, sourcegit])
262
263 os.makedirs(sourcegit + '/tmp', exist_ok=True)
264 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
265 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
266
267 while True:
268 line = findtargets.stdout.readline()
269 if not line:
270 break
271 ta = line.decode().strip().split(' ')
272 targets.append(ta[0])
273
274 subprocess.call(["rm", "-rf", sourcegit])
275
276 populateTargets()
277
278 # the 'change_source' setting tells the buildmaster how it should find out
279 # about source code changes. Here we point to the buildbot clone of pyflakes.
280
281 c['change_source'] = []
282 c['change_source'].append(GitPoller(
283 repo_url,
284 workdir=work_dir+'/work.git', branch=repo_branch,
285 pollinterval=300))
286
287 ####### SCHEDULERS
288
289 # Configure the Schedulers, which decide how to react to incoming changes. In this
290 # case, just kick off a 'basebuild' build
291
292 class TagChoiceParameter(BaseParameter):
293 spec_attributes = ["strict", "choices"]
294 type = "list"
295 strict = True
296
297 def __init__(self, name, label=None, **kw):
298 super().__init__(name, label, **kw)
299 self._choice_list = []
300
301 @property
302 def choices(self):
303 taglist = []
304 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
305
306 if basever:
307 findtags = subprocess.Popen(
308 ['git', 'ls-remote', '--tags', repo_url],
309 stdout = subprocess.PIPE)
310
311 while True:
312 line = findtags.stdout.readline()
313
314 if not line:
315 break
316
317 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
318
319 if tagver and tagver[1].find(basever[1]) == 0:
320 taglist.append(tagver[1])
321
322 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
323 taglist.insert(0, '')
324
325 self._choice_list = taglist
326
327 return self._choice_list
328
329 def parse_from_arg(self, s):
330 if self.strict and s not in self._choice_list:
331 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
332 return s
333
334 c['schedulers'] = []
335 c['schedulers'].append(SingleBranchScheduler(
336 name = "all",
337 change_filter = filter.ChangeFilter(branch=repo_branch),
338 treeStableTimer = 60,
339 builderNames = targets))
340
341 c['schedulers'].append(ForceScheduler(
342 name = "force",
343 buttonName = "Force builds",
344 label = "Force build details",
345 builderNames = [ "00_force_build" ],
346
347 codebases = [
348 util.CodebaseParameter(
349 "",
350 label = "Repository",
351 branch = util.FixedParameter(name = "branch", default = ""),
352 revision = util.FixedParameter(name = "revision", default = ""),
353 repository = util.FixedParameter(name = "repository", default = ""),
354 project = util.FixedParameter(name = "project", default = "")
355 )
356 ],
357
358 reason = util.StringParameter(
359 name = "reason",
360 label = "Reason",
361 default = "Trigger build",
362 required = True,
363 size = 80
364 ),
365
366 properties = [
367 util.NestedParameter(
368 name="options",
369 label="Build Options",
370 layout="vertical",
371 fields=[
372 util.ChoiceStringParameter(
373 name = "target",
374 label = "Build target",
375 default = "all",
376 choices = [ "all" ] + targets
377 ),
378 TagChoiceParameter(
379 name = "tag",
380 label = "Build tag",
381 default = ""
382 )
383 ]
384 )
385 ]
386 ))
387
388 ####### BUILDERS
389
390 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
391 # what steps, and which workers can execute them. Note that any particular build will
392 # only take place on one worker.
393
394 def IsTaggingRequested(step):
395 val = step.getProperty("tag")
396 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
397 return True
398 else:
399 return False
400
401 def IsNoMasterBuild(step):
402 return step.getProperty("branch") != "master"
403
404 def GetBaseVersion(branch):
405 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
406 return branch.split('-')[1]
407 else:
408 return "master"
409
410 @properties.renderer
411 def GetVersionPrefix(props):
412 branch = props.getProperty("branch")
413 basever = GetBaseVersion(branch)
414 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
415 return "%s/" % props["tag"]
416 elif basever != "master":
417 return "%s-SNAPSHOT/" % basever
418 else:
419 return ""
420
421 def GetNextBuild(builder, requests):
422 for r in requests:
423 if r.properties and r.properties.hasProperty("tag"):
424 return r
425
426 r = requests[0]
427 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
428 return r
429
430 def MakeEnv(overrides=None, tryccache=False):
431 env = {
432 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
433 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
434 }
435 if tryccache:
436 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
437 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
438 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
439 else:
440 env['CC'] = env['CCC']
441 env['CXX'] = env['CCXX']
442 env['CCACHE'] = ''
443 if overrides is not None:
444 env.update(overrides)
445 return env
446
447 @properties.renderer
448 def NetLockDl(props):
449 lock = None
450 if props.hasProperty("dl_lock"):
451 lock = NetLocks[props["dl_lock"]]
452 if lock is not None:
453 return [lock.access('exclusive')]
454 else:
455 return []
456
457 @properties.renderer
458 def NetLockUl(props):
459 lock = None
460 if props.hasProperty("ul_lock"):
461 lock = NetLocks[props["ul_lock"]]
462 if lock is not None:
463 return [lock.access('exclusive')]
464 else:
465 return []
466
467 @util.renderer
468 def TagPropertyValue(props):
469 if props.hasProperty("options"):
470 options = props.getProperty("options")
471 if type(options) is dict:
472 return options.get("tag")
473 return None
474
475 def IsTargetSelected(target):
476 def CheckTargetProperty(step):
477 try:
478 options = step.getProperty("options")
479 if type(options) is dict:
480 selected_target = options.get("target", "all")
481 if selected_target != "all" and selected_target != target:
482 return False
483 except KeyError:
484 pass
485
486 return True
487
488 return CheckTargetProperty
489
490 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
491 try:
492 seckey = base64.b64decode(seckey)
493 except:
494 return None
495
496 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
497 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
498
499
500 c['builders'] = []
501
502 dlLock = locks.WorkerLock("worker_dl")
503
504 workerNames = [ ]
505
506 for worker in c['workers']:
507 workerNames.append(worker.workername)
508
509 force_factory = BuildFactory()
510
511 c['builders'].append(BuilderConfig(
512 name = "00_force_build",
513 workernames = workerNames,
514 factory = force_factory))
515
516 for target in targets:
517 ts = target.split('/')
518
519 factory = BuildFactory()
520
521 # setup shared work directory if required
522 factory.addStep(ShellCommand(
523 name = "sharedwd",
524 description = "Setting up shared work directory",
525 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
526 workdir = ".",
527 haltOnFailure = True))
528
529 # find number of cores
530 factory.addStep(SetPropertyFromCommand(
531 name = "nproc",
532 property = "nproc",
533 description = "Finding number of CPUs",
534 command = ["nproc"]))
535
536 # find gcc and g++ compilers
537 factory.addStep(FileDownload(
538 name = "dlfindbinpl",
539 mastersrc = scripts_dir + '/findbin.pl',
540 workerdest = "../findbin.pl",
541 mode = 0o755))
542
543 factory.addStep(SetPropertyFromCommand(
544 name = "gcc",
545 property = "cc_command",
546 description = "Finding gcc command",
547 command = [
548 "../findbin.pl", "gcc", "", "",
549 ],
550 haltOnFailure = True))
551
552 factory.addStep(SetPropertyFromCommand(
553 name = "g++",
554 property = "cxx_command",
555 description = "Finding g++ command",
556 command = [
557 "../findbin.pl", "g++", "", "",
558 ],
559 haltOnFailure = True))
560
561 # see if ccache is available
562 factory.addStep(SetPropertyFromCommand(
563 property = "ccache_command",
564 command = ["which", "ccache"],
565 description = "Testing for ccache command",
566 haltOnFailure = False,
567 flunkOnFailure = False,
568 warnOnFailure = False,
569 ))
570
571 # Workaround bug when switching from a checked out tag back to a branch
572 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
573 factory.addStep(ShellCommand(
574 name = "gitcheckout",
575 description = "Ensure that Git HEAD is sane",
576 command = Interpolate("if [ -d .git ]; then git checkout -f %(prop:branch)s && git branch --set-upstream-to origin/%(prop:branch)s || rm -fr .git; else exit 0; fi"),
577 haltOnFailure = True))
578
579 # check out the source
580 # Git() runs:
581 # if repo doesn't exist: 'git clone repourl'
582 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
583 # 'git fetch -t repourl branch; git reset --hard revision'
584 factory.addStep(Git(
585 name = "git",
586 repourl = repo_url,
587 mode = 'full',
588 method = 'fresh',
589 locks = NetLockDl,
590 haltOnFailure = True,
591 ))
592
593 # update remote refs
594 factory.addStep(ShellCommand(
595 name = "fetchrefs",
596 description = "Fetching Git remote refs",
597 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
598 haltOnFailure = True
599 ))
600
601 # switch to tag
602 factory.addStep(ShellCommand(
603 name = "switchtag",
604 description = "Checking out Git tag",
605 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
606 haltOnFailure = True,
607 doStepIf = IsTaggingRequested
608 ))
609
610 # Verify that Git HEAD points to a tag or branch
611 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
612 factory.addStep(ShellCommand(
613 name = "gitverify",
614 description = "Ensure that Git HEAD is pointing to a branch or tag",
615 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
616 haltOnFailure = True))
617
618 factory.addStep(ShellCommand(
619 name = "rmtmp",
620 description = "Remove tmp folder",
621 command=["rm", "-rf", "tmp/"]))
622
623 # feed
624 factory.addStep(ShellCommand(
625 name = "rmfeedlinks",
626 description = "Remove feed symlinks",
627 command=["rm", "-rf", "package/feeds/"]))
628
629 factory.addStep(StringDownload(
630 name = "ccachecc",
631 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
632 workerdest = "../ccache_cc.sh",
633 mode = 0o755,
634 ))
635
636 factory.addStep(StringDownload(
637 name = "ccachecxx",
638 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
639 workerdest = "../ccache_cxx.sh",
640 mode = 0o755,
641 ))
642
643 # feed
644 factory.addStep(ShellCommand(
645 name = "updatefeeds",
646 description = "Updating feeds",
647 command=["./scripts/feeds", "update"],
648 env = MakeEnv(tryccache=True),
649 haltOnFailure = True,
650 locks = NetLockDl,
651 ))
652
653 # feed
654 factory.addStep(ShellCommand(
655 name = "installfeeds",
656 description = "Installing feeds",
657 command=["./scripts/feeds", "install", "-a"],
658 env = MakeEnv(tryccache=True),
659 haltOnFailure = True
660 ))
661
662 # seed config
663 if config_seed is not None:
664 factory.addStep(StringDownload(
665 name = "dlconfigseed",
666 s = config_seed + '\n',
667 workerdest = ".config",
668 mode = 0o644
669 ))
670
671 # configure
672 factory.addStep(ShellCommand(
673 name = "newconfig",
674 description = "Seeding .config",
675 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
676 ))
677
678 factory.addStep(ShellCommand(
679 name = "delbin",
680 description = "Removing output directory",
681 command = ["rm", "-rf", "bin/"]
682 ))
683
684 factory.addStep(ShellCommand(
685 name = "defconfig",
686 description = "Populating .config",
687 command = ["make", "defconfig"],
688 env = MakeEnv()
689 ))
690
691 # check arch
692 factory.addStep(ShellCommand(
693 name = "checkarch",
694 description = "Checking architecture",
695 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
696 logEnviron = False,
697 want_stdout = False,
698 want_stderr = False,
699 haltOnFailure = True
700 ))
701
702 # find libc suffix
703 factory.addStep(SetPropertyFromCommand(
704 name = "libc",
705 property = "libc",
706 description = "Finding libc suffix",
707 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
708
709 # install build key
710 if usign_key is not None:
711 factory.addStep(StringDownload(
712 name = "dlkeybuildpub",
713 s = UsignSec2Pub(usign_key, usign_comment),
714 workerdest = "key-build.pub",
715 mode = 0o600,
716 ))
717
718 factory.addStep(StringDownload(
719 name = "dlkeybuild",
720 s = "# fake private key",
721 workerdest = "key-build",
722 mode = 0o600,
723 ))
724
725 factory.addStep(StringDownload(
726 name = "dlkeybuilducert",
727 s = "# fake certificate",
728 workerdest = "key-build.ucert",
729 mode = 0o600,
730 ))
731
732 # prepare dl
733 factory.addStep(ShellCommand(
734 name = "dldir",
735 description = "Preparing dl/",
736 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
737 logEnviron = False,
738 want_stdout = False
739 ))
740
741 # prepare tar
742 factory.addStep(ShellCommand(
743 name = "dltar",
744 description = "Building and installing GNU tar",
745 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
746 env = MakeEnv(tryccache=True),
747 haltOnFailure = True
748 ))
749
750 # populate dl
751 factory.addStep(ShellCommand(
752 name = "dlrun",
753 description = "Populating dl/",
754 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
755 env = MakeEnv(),
756 logEnviron = False,
757 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
758 ))
759
760 factory.addStep(ShellCommand(
761 name = "cleanbase",
762 description = "Cleaning base-files",
763 command=["make", "package/base-files/clean", "V=s"]
764 ))
765
766 # build
767 factory.addStep(ShellCommand(
768 name = "tools",
769 description = "Building and installing tools",
770 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
771 env = MakeEnv(tryccache=True),
772 haltOnFailure = True
773 ))
774
775 factory.addStep(ShellCommand(
776 name = "toolchain",
777 description = "Building and installing toolchain",
778 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
779 env = MakeEnv(),
780 haltOnFailure = True
781 ))
782
783 factory.addStep(ShellCommand(
784 name = "kmods",
785 description = "Building kmods",
786 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
787 env = MakeEnv(),
788 haltOnFailure = True
789 ))
790
791 # find kernel version
792 factory.addStep(SetPropertyFromCommand(
793 name = "kernelversion",
794 property = "kernelversion",
795 description = "Finding the effective Kernel version",
796 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
797 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
798 ))
799
800 factory.addStep(ShellCommand(
801 name = "pkgclean",
802 description = "Cleaning up package build",
803 command=["make", "package/cleanup", "V=s"]
804 ))
805
806 factory.addStep(ShellCommand(
807 name = "pkgbuild",
808 description = "Building packages",
809 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
810 env = MakeEnv(),
811 haltOnFailure = True
812 ))
813
814 factory.addStep(ShellCommand(
815 name = "pkginstall",
816 description = "Installing packages",
817 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
818 env = MakeEnv(),
819 haltOnFailure = True
820 ))
821
822 factory.addStep(ShellCommand(
823 name = "pkgindex",
824 description = "Indexing packages",
825 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
826 env = MakeEnv(),
827 haltOnFailure = True
828 ))
829
830 factory.addStep(ShellCommand(
831 name = "images",
832 description = "Building and installing images",
833 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
834 env = MakeEnv(),
835 haltOnFailure = True
836 ))
837
838 factory.addStep(ShellCommand(
839 name = "buildinfo",
840 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
841 command = "make -j1 buildinfo V=s || true",
842 env = MakeEnv(),
843 haltOnFailure = True
844 ))
845
846 factory.addStep(ShellCommand(
847 name = "json_overview_image_info",
848 description = "Generate profiles.json in target folder",
849 command = "make -j1 json_overview_image_info V=s || true",
850 env = MakeEnv(),
851 haltOnFailure = True
852 ))
853
854 factory.addStep(ShellCommand(
855 name = "checksums",
856 description = "Calculating checksums",
857 command=["make", "-j1", "checksum", "V=s"],
858 env = MakeEnv(),
859 haltOnFailure = True
860 ))
861
862 if enable_kmod_archive:
863 factory.addStep(ShellCommand(
864 name = "kmoddir",
865 description = "Creating kmod directory",
866 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
867 haltOnFailure = True
868 ))
869
870 factory.addStep(ShellCommand(
871 name = "kmodprepare",
872 description = "Preparing kmod archive",
873 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
874 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
875 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
876 haltOnFailure = True
877 ))
878
879 factory.addStep(ShellCommand(
880 name = "kmodindex",
881 description = "Indexing kmod archive",
882 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
883 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
884 env = MakeEnv(),
885 haltOnFailure = True
886 ))
887
888 # sign
889 if ini.has_option("gpg", "key") or usign_key is not None:
890 factory.addStep(MasterShellCommand(
891 name = "signprepare",
892 description = "Preparing temporary signing directory",
893 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
894 haltOnFailure = True
895 ))
896
897 factory.addStep(ShellCommand(
898 name = "signpack",
899 description = "Packing files to sign",
900 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
901 haltOnFailure = True
902 ))
903
904 factory.addStep(FileUpload(
905 workersrc = "sign.tar.gz",
906 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
907 haltOnFailure = True
908 ))
909
910 factory.addStep(MasterShellCommand(
911 name = "signfiles",
912 description = "Signing files",
913 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
914 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
915 haltOnFailure = True
916 ))
917
918 factory.addStep(FileDownload(
919 name = "dlsigntargz",
920 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
921 workerdest = "sign.tar.gz",
922 haltOnFailure = True
923 ))
924
925 factory.addStep(ShellCommand(
926 name = "signunpack",
927 description = "Unpacking signed files",
928 command = ["tar", "-xzf", "sign.tar.gz"],
929 haltOnFailure = True
930 ))
931
932 # upload
933 factory.addStep(ShellCommand(
934 name = "dirprepare",
935 description = "Preparing upload directory structure",
936 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
937 haltOnFailure = True
938 ))
939
940 factory.addStep(ShellCommand(
941 name = "linkprepare",
942 description = "Preparing repository symlink",
943 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
944 doStepIf = IsNoMasterBuild,
945 haltOnFailure = True
946 ))
947
948 if enable_kmod_archive:
949 factory.addStep(ShellCommand(
950 name = "kmoddirprepare",
951 description = "Preparing kmod archive upload directory",
952 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
953 haltOnFailure = True
954 ))
955
956 factory.addStep(ShellCommand(
957 name = "dirupload",
958 description = "Uploading directory structure",
959 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
960 env={'RSYNC_PASSWORD': rsync_bin_key},
961 haltOnFailure = True,
962 logEnviron = False,
963 locks = NetLockUl,
964 ))
965
966 # download remote sha256sums to 'target-sha256sums'
967 factory.addStep(ShellCommand(
968 name = "target-sha256sums",
969 description = "Fetching remote sha256sums for target",
970 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
971 env={'RSYNC_PASSWORD': rsync_bin_key},
972 logEnviron = False,
973 haltOnFailure = False,
974 flunkOnFailure = False,
975 warnOnFailure = False,
976 ))
977
978 # build list of files to upload
979 factory.addStep(FileDownload(
980 name = "dlsha2rsyncpl",
981 mastersrc = scripts_dir + '/sha2rsync.pl',
982 workerdest = "../sha2rsync.pl",
983 mode = 0o755,
984 ))
985
986 factory.addStep(ShellCommand(
987 name = "buildlist",
988 description = "Building list of files to upload",
989 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
990 haltOnFailure = True,
991 ))
992
993 factory.addStep(FileDownload(
994 name = "dlrsync.sh",
995 mastersrc = scripts_dir + '/rsync.sh',
996 workerdest = "../rsync.sh",
997 mode = 0o755
998 ))
999
1000 # upload new files and update existing ones
1001 factory.addStep(ShellCommand(
1002 name = "targetupload",
1003 description = "Uploading target files",
1004 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1005 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1006 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1007 env={'RSYNC_PASSWORD': rsync_bin_key},
1008 haltOnFailure = True,
1009 logEnviron = False,
1010 ))
1011
1012 # delete files which don't exist locally
1013 factory.addStep(ShellCommand(
1014 name = "targetprune",
1015 description = "Pruning target files",
1016 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1017 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1018 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1019 env={'RSYNC_PASSWORD': rsync_bin_key},
1020 haltOnFailure = True,
1021 logEnviron = False,
1022 locks = NetLockUl,
1023 ))
1024
1025 if enable_kmod_archive:
1026 factory.addStep(ShellCommand(
1027 name = "kmodupload",
1028 description = "Uploading kmod archive",
1029 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1030 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1031 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1032 env={'RSYNC_PASSWORD': rsync_bin_key},
1033 haltOnFailure = True,
1034 logEnviron = False,
1035 locks = NetLockUl,
1036 ))
1037
1038 factory.addStep(ShellCommand(
1039 name = "sourcelist",
1040 description = "Finding source archives to upload",
1041 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1042 haltOnFailure = True
1043 ))
1044
1045 factory.addStep(ShellCommand(
1046 name = "sourceupload",
1047 description = "Uploading source archives",
1048 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1049 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1050 env={'RSYNC_PASSWORD': rsync_src_key},
1051 haltOnFailure = True,
1052 logEnviron = False,
1053 locks = NetLockUl,
1054 ))
1055
1056 factory.addStep(ShellCommand(
1057 name = "df",
1058 description = "Reporting disk usage",
1059 command=["df", "-h", "."],
1060 env={'LC_ALL': 'C'},
1061 haltOnFailure = False,
1062 flunkOnFailure = False,
1063 warnOnFailure = False,
1064 alwaysRun = True
1065 ))
1066
1067 factory.addStep(ShellCommand(
1068 name = "du",
1069 description = "Reporting estimated file space usage",
1070 command=["du", "-sh", "."],
1071 env={'LC_ALL': 'C'},
1072 haltOnFailure = False,
1073 flunkOnFailure = False,
1074 warnOnFailure = False,
1075 alwaysRun = True
1076 ))
1077
1078 factory.addStep(ShellCommand(
1079 name = "ccachestat",
1080 description = "Reporting ccache stats",
1081 command=["ccache", "-s"],
1082 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1083 want_stderr = False,
1084 haltOnFailure = False,
1085 flunkOnFailure = False,
1086 warnOnFailure = False,
1087 alwaysRun = True,
1088 ))
1089
1090 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1091
1092 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1093 force_factory.addStep(steps.Trigger(
1094 name = "trigger_%s" % target,
1095 description = "Triggering %s build" % target,
1096 schedulerNames = [ "trigger_%s" % target ],
1097 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1098 doStepIf = IsTargetSelected(target)
1099 ))
1100
1101
1102 ####### STATUS TARGETS
1103
1104 # 'status' is a list of Status Targets. The results of each build will be
1105 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1106 # including web pages, email senders, and IRC bots.
1107
1108 if "status_bind" in inip1:
1109 c['www'] = {
1110 'port': inip1.get("status_bind"),
1111 'plugins': {
1112 'waterfall_view': True,
1113 'console_view': True,
1114 'grid_view': True
1115 }
1116 }
1117
1118 if "status_user" in inip1 and "status_password" in inip1:
1119 c['www']['auth'] = util.UserPasswordAuth([
1120 (inip1.get("status_user"), inip1.get("status_password"))
1121 ])
1122 c['www']['authz'] = util.Authz(
1123 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1124 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1125 )
1126
1127 c['services'] = []
1128 if ini.has_section("irc"):
1129 iniirc = ini['irc']
1130 irc_host = iniirc.get("host", None)
1131 irc_port = iniirc.getint("port", 6667)
1132 irc_chan = iniirc.get("channel", None)
1133 irc_nick = iniirc.get("nickname", None)
1134 irc_pass = iniirc.get("password", None)
1135
1136 if irc_host and irc_nick and irc_chan:
1137 irc = reporters.IRC(irc_host, irc_nick,
1138 port = irc_port,
1139 password = irc_pass,
1140 channels = [ irc_chan ],
1141 notify_events = [ 'exception', 'problem', 'recovery' ]
1142 )
1143
1144 c['services'].append(irc)
1145
1146 c['revlink'] = util.RevlinkMatch([
1147 r'https://git.openwrt.org/openwrt/(.*).git'
1148 ],
1149 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1150
1151 ####### DB URL
1152
1153 c['db'] = {
1154 # This specifies what database buildbot uses to store its state. You can leave
1155 # this at its default for all but the largest installations.
1156 'db_url' : "sqlite:///state.sqlite",
1157 }
1158
1159 c['buildbotNetUsageData'] = None