phase1: move phase1 specific options into separate section
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import ConfigParser
9
10 from buildbot import locks
11
12 # This is a sample buildmaster config file. It must be installed as
13 # 'master.cfg' in your buildmaster's base directory.
14
15 ini = ConfigParser.ConfigParser()
16 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
17
18 # This is the dictionary that the buildmaster pays attention to. We also use
19 # a shorter alias to save typing.
20 c = BuildmasterConfig = {}
21
22 ####### PROJECT IDENTITY
23
24 # the 'title' string will appear at the top of this buildbot
25 # installation's html.WebStatus home page (linked to the
26 # 'titleURL') and is embedded in the title of the waterfall HTML page.
27
28 c['title'] = ini.get("general", "title")
29 c['titleURL'] = ini.get("general", "title_url")
30
31 # the 'buildbotURL' string should point to the location where the buildbot's
32 # internal web server (usually the html.WebStatus page) is visible. This
33 # typically uses the port number set in the Waterfall 'status' entry, but
34 # with an externally-visible host name which the buildbot cannot figure out
35 # without some help.
36
37 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
38
39 ####### BUILDSLAVES
40
41 # The 'slaves' list defines the set of recognized buildslaves. Each element is
42 # a BuildSlave object, specifying a unique slave name and password. The same
43 # slave name and password must be configured on the slave.
44 from buildbot.buildslave import BuildSlave
45
46 slave_port = 9989
47
48 if ini.has_option("phase1", "port"):
49 slave_port = ini.getint("phase1", "port")
50
51 c['slaves'] = []
52 NetLocks = dict()
53
54 for section in ini.sections():
55 if section.startswith("slave "):
56 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
57 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
58 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
59 name = ini.get(section, "name")
60 password = ini.get(section, "password")
61 max_builds = 1
62 if ini.has_option(section, "builds"):
63 max_builds = ini.getint(section, "builds")
64 sl_props['max_builds'] = max_builds
65 if ini.has_option(section, "cleanup"):
66 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
67 if ini.has_option(section, "dl_lock"):
68 lockname = ini.get(section, "dl_lock")
69 sl_props['dl_lock'] = lockname
70 if lockname not in NetLocks:
71 NetLocks[lockname] = locks.MasterLock(lockname)
72 if ini.has_option(section, "ul_lock"):
73 lockname = ini.get(section, "dl_lock")
74 sl_props['ul_lock'] = lockname
75 if lockname not in NetLocks:
76 NetLocks[lockname] = locks.MasterLock(lockname)
77 if ini.has_option(section, "shared_wd"):
78 shared_wd = ini.getboolean(section, "shared_wd")
79 sl_props['shared_wd'] = shared_wd
80 if shared_wd and (max_builds != 1):
81 raise ValueError('max_builds must be 1 with shared workdir!')
82 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
83
84 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
85 # This must match the value configured into the buildslaves (with their
86 # --master option)
87 c['slavePortnum'] = slave_port
88
89 # coalesce builds
90 c['mergeRequests'] = True
91
92 # Reduce amount of backlog data
93 c['buildHorizon'] = 30
94 c['logHorizon'] = 20
95
96 ####### CHANGESOURCES
97
98 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
99 scripts_dir = os.path.abspath("../scripts")
100 tree_expire = 0
101 other_builds = 0
102 cc_version = None
103
104 cc_command = "gcc"
105 cxx_command = "g++"
106
107 config_seed = ""
108
109 git_ssh = False
110 git_ssh_key = None
111
112 if ini.has_option("phase1", "expire"):
113 tree_expire = ini.getint("phase1", "expire")
114
115 if ini.has_option("phase1", "other_builds"):
116 other_builds = ini.getint("phase1", "other_builds")
117
118 if ini.has_option("phase1", "cc_version"):
119 cc_version = ini.get("phase1", "cc_version").split()
120 if len(cc_version) == 1:
121 cc_version = ["eq", cc_version[0]]
122
123 if ini.has_option("general", "git_ssh"):
124 git_ssh = ini.getboolean("general", "git_ssh")
125
126 if ini.has_option("general", "git_ssh_key"):
127 git_ssh_key = ini.get("general", "git_ssh_key")
128 else:
129 git_ssh = False
130
131 if ini.has_option("phase1", "config_seed"):
132 config_seed = ini.get("phase1", "config_seed")
133
134 repo_url = ini.get("repo", "url")
135 repo_branch = "master"
136
137 if ini.has_option("repo", "branch"):
138 repo_branch = ini.get("repo", "branch")
139
140 rsync_bin_url = ini.get("rsync", "binary_url")
141 rsync_bin_key = ini.get("rsync", "binary_password")
142 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
143
144 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
145 rsync_bin_defopts += ["--contimeout=20"]
146
147 rsync_src_url = None
148 rsync_src_key = None
149 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
150
151 if ini.has_option("rsync", "source_url"):
152 rsync_src_url = ini.get("rsync", "source_url")
153 rsync_src_key = ini.get("rsync", "source_password")
154
155 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
156 rsync_src_defopts += ["--contimeout=20"]
157
158 usign_key = None
159 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
160
161 if ini.has_option("usign", "key"):
162 usign_key = ini.get("usign", "key")
163
164 if ini.has_option("usign", "comment"):
165 usign_comment = ini.get("usign", "comment")
166
167 enable_kmod_archive = True
168
169
170 # find targets
171 targets = [ ]
172
173 if not os.path.isdir(work_dir+'/source.git'):
174 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
175 else:
176 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
177
178 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
179 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
180
181 while True:
182 line = findtargets.stdout.readline()
183 if not line:
184 break
185 ta = line.strip().split(' ')
186 targets.append(ta[0])
187
188
189 # the 'change_source' setting tells the buildmaster how it should find out
190 # about source code changes. Here we point to the buildbot clone of pyflakes.
191
192 from buildbot.changes.gitpoller import GitPoller
193 c['change_source'] = []
194 c['change_source'].append(GitPoller(
195 repo_url,
196 workdir=work_dir+'/work.git', branch=repo_branch,
197 pollinterval=300))
198
199 ####### SCHEDULERS
200
201 # Configure the Schedulers, which decide how to react to incoming changes. In this
202 # case, just kick off a 'basebuild' build
203
204 from buildbot.schedulers.basic import SingleBranchScheduler
205 from buildbot.schedulers.forcesched import ForceScheduler
206 from buildbot.changes import filter
207 c['schedulers'] = []
208 c['schedulers'].append(SingleBranchScheduler(
209 name="all",
210 change_filter=filter.ChangeFilter(branch=repo_branch),
211 treeStableTimer=60,
212 builderNames=targets))
213
214 c['schedulers'].append(ForceScheduler(
215 name="force",
216 builderNames=targets))
217
218 ####### BUILDERS
219
220 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
221 # what steps, and which slaves can execute them. Note that any particular build will
222 # only take place on one slave.
223
224 from buildbot.process.factory import BuildFactory
225 from buildbot.steps.source.git import Git
226 from buildbot.steps.shell import ShellCommand
227 from buildbot.steps.shell import SetPropertyFromCommand
228 from buildbot.steps.transfer import FileUpload
229 from buildbot.steps.transfer import FileDownload
230 from buildbot.steps.transfer import StringDownload
231 from buildbot.steps.master import MasterShellCommand
232 from buildbot.process.properties import Interpolate
233 from buildbot.process import properties
234
235
236 CleanTargetMap = [
237 [ "tools", "tools/clean" ],
238 [ "chain", "toolchain/clean" ],
239 [ "linux", "target/linux/clean" ],
240 [ "dir", "dirclean" ],
241 [ "dist", "distclean" ]
242 ]
243
244 def IsMakeCleanRequested(pattern):
245 def CheckCleanProperty(step):
246 val = step.getProperty("clean")
247 if val and re.match(pattern, val):
248 return True
249 else:
250 return False
251
252 return CheckCleanProperty
253
254 def IsCleanupRequested(step):
255 shared_wd = step.getProperty("shared_wd")
256 if shared_wd:
257 return False
258 do_cleanup = step.getProperty("do_cleanup")
259 if do_cleanup:
260 return True
261 else:
262 return False
263
264 def IsExpireRequested(step):
265 shared_wd = step.getProperty("shared_wd")
266 if shared_wd:
267 return False
268 else:
269 return not IsCleanupRequested(step)
270
271 def IsGitFreshRequested(step):
272 do_cleanup = step.getProperty("do_cleanup")
273 if do_cleanup:
274 return True
275 else:
276 return False
277
278 def IsGitCleanRequested(step):
279 return not IsGitFreshRequested(step)
280
281 def IsTaggingRequested(step):
282 val = step.getProperty("tag")
283 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
284 return True
285 else:
286 return False
287
288 def IsNoTaggingRequested(step):
289 return not IsTaggingRequested(step)
290
291 def IsNoMasterBuild(step):
292 return repo_branch != "master"
293
294 def GetBaseVersion():
295 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
296 return repo_branch.split('-')[1]
297 else:
298 return "master"
299
300 @properties.renderer
301 def GetVersionPrefix(props):
302 basever = GetBaseVersion()
303 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
304 return "%s/" % props["tag"]
305 elif basever != "master":
306 return "%s-SNAPSHOT/" % basever
307 else:
308 return ""
309
310 @properties.renderer
311 def GetNumJobs(props):
312 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
313 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
314 else:
315 return "1"
316
317 @properties.renderer
318 def GetCC(props):
319 if props.hasProperty("cc_command"):
320 return props["cc_command"]
321 else:
322 return "gcc"
323
324 @properties.renderer
325 def GetCXX(props):
326 if props.hasProperty("cxx_command"):
327 return props["cxx_command"]
328 else:
329 return "g++"
330
331 @properties.renderer
332 def GetCwd(props):
333 if props.hasProperty("builddir"):
334 return props["builddir"]
335 elif props.hasProperty("workdir"):
336 return props["workdir"]
337 else:
338 return "/"
339
340 @properties.renderer
341 def GetCCache(props):
342 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
343 return props["ccache_command"]
344 else:
345 return ""
346
347 def GetNextBuild(builder, requests):
348 for r in requests:
349 if r.properties and r.properties.hasProperty("tag"):
350 return r
351 return requests[0]
352
353 def MakeEnv(overrides=None, tryccache=False):
354 env = {
355 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
356 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
357 }
358 if tryccache:
359 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
360 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
361 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
362 else:
363 env['CC'] = env['CCC']
364 env['CXX'] = env['CCXX']
365 env['CCACHE'] = ''
366 if overrides is not None:
367 env.update(overrides)
368 return env
369
370 @properties.renderer
371 def NetLockDl(props):
372 lock = None
373 if props.hasProperty("dl_lock"):
374 lock = NetLocks[props["dl_lock"]]
375 if lock is not None:
376 return [lock.access('exclusive')]
377 else:
378 return []
379
380 @properties.renderer
381 def NetLockUl(props):
382 lock = None
383 if props.hasProperty("ul_lock"):
384 lock = NetLocks[props["ul_lock"]]
385 if lock is not None:
386 return [lock.access('exclusive')]
387 else:
388 return []
389
390 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
391 try:
392 seckey = base64.b64decode(seckey)
393 except:
394 return None
395
396 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
397 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
398
399
400 c['builders'] = []
401
402 dlLock = locks.SlaveLock("slave_dl")
403
404 checkBuiltin = re.sub('[\t\n ]+', ' ', """
405 checkBuiltin() {
406 local symbol op path file;
407 for file in $CHANGED_FILES; do
408 case "$file" in
409 package/*/*) : ;;
410 *) return 0 ;;
411 esac;
412 done;
413 while read symbol op path; do
414 case "$symbol" in package-*)
415 symbol="${symbol##*(}";
416 symbol="${symbol%)}";
417 for file in $CHANGED_FILES; do
418 case "$file" in "package/$path/"*)
419 grep -qsx "$symbol=y" .config && return 0
420 ;; esac;
421 done;
422 esac;
423 done < tmp/.packagedeps;
424 return 1;
425 }
426 """).strip()
427
428
429 class IfBuiltinShellCommand(ShellCommand):
430 def _quote(self, str):
431 if re.search("[^a-zA-Z0-9/_.-]", str):
432 return "'%s'" %(re.sub("'", "'\"'\"'", str))
433 return str
434
435 def setCommand(self, command):
436 if not isinstance(command, (str, unicode)):
437 command = ' '.join(map(self._quote, command))
438 self.command = [
439 '/bin/sh', '-c',
440 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
441 ]
442
443 def setupEnvironment(self, cmd):
444 slaveEnv = self.slaveEnvironment
445 if slaveEnv is None:
446 slaveEnv = { }
447 changedFiles = { }
448 for request in self.build.requests:
449 for source in request.sources:
450 for change in source.changes:
451 for file in change.files:
452 changedFiles[file] = True
453 fullSlaveEnv = slaveEnv.copy()
454 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
455 cmd.args['env'] = fullSlaveEnv
456
457 slaveNames = [ ]
458
459 for slave in c['slaves']:
460 slaveNames.append(slave.slavename)
461
462 for target in targets:
463 ts = target.split('/')
464
465 factory = BuildFactory()
466
467 # find number of cores
468 factory.addStep(SetPropertyFromCommand(
469 name = "nproc",
470 property = "nproc",
471 description = "Finding number of CPUs",
472 command = ["nproc"]))
473
474 # find gcc and g++ compilers
475 factory.addStep(FileDownload(
476 name = "dlfindbinpl",
477 mastersrc = scripts_dir + '/findbin.pl',
478 slavedest = "../findbin.pl",
479 mode = 0755))
480
481 factory.addStep(SetPropertyFromCommand(
482 name = "gcc",
483 property = "cc_command",
484 description = "Finding gcc command",
485 command = [
486 "../findbin.pl", "gcc",
487 cc_version[0] if cc_version is not None else '',
488 cc_version[1] if cc_version is not None else ''
489 ],
490 haltOnFailure = True))
491
492 factory.addStep(SetPropertyFromCommand(
493 name = "g++",
494 property = "cxx_command",
495 description = "Finding g++ command",
496 command = [
497 "../findbin.pl", "g++",
498 cc_version[0] if cc_version is not None else '',
499 cc_version[1] if cc_version is not None else ''
500 ],
501 haltOnFailure = True))
502
503 # see if ccache is available
504 factory.addStep(SetPropertyFromCommand(
505 property = "ccache_command",
506 command = ["which", "ccache"],
507 description = "Testing for ccache command",
508 haltOnFailure = False,
509 flunkOnFailure = False,
510 warnOnFailure = False,
511 ))
512
513 # expire tree if needed
514 if tree_expire > 0:
515 factory.addStep(FileDownload(
516 name = "dlexpiresh",
517 doStepIf = IsExpireRequested,
518 mastersrc = scripts_dir + '/expire.sh',
519 slavedest = "../expire.sh",
520 mode = 0755))
521
522 factory.addStep(ShellCommand(
523 name = "expire",
524 description = "Checking for build tree expiry",
525 command = ["./expire.sh", str(tree_expire)],
526 workdir = ".",
527 haltOnFailure = True,
528 doStepIf = IsExpireRequested,
529 timeout = 2400))
530
531 # cleanup.sh if needed
532 factory.addStep(FileDownload(
533 name = "dlcleanupsh",
534 mastersrc = scripts_dir + '/cleanup-phase1.sh',
535 slavedest = "../cleanup.sh",
536 mode = 0755,
537 doStepIf = IsCleanupRequested))
538
539 factory.addStep(ShellCommand(
540 name = "cleanold",
541 description = "Cleaning previous builds",
542 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
543 workdir = ".",
544 haltOnFailure = True,
545 doStepIf = IsCleanupRequested,
546 timeout = 2400))
547
548 factory.addStep(ShellCommand(
549 name = "cleanup",
550 description = "Cleaning work area",
551 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
552 workdir = ".",
553 haltOnFailure = True,
554 doStepIf = IsCleanupRequested,
555 timeout = 2400))
556
557 # user-requested clean targets
558 for tuple in CleanTargetMap:
559 factory.addStep(ShellCommand(
560 name = tuple[1],
561 description = 'User-requested "make %s"' % tuple[1],
562 command = ["make", tuple[1], "V=s"],
563 env = MakeEnv(),
564 doStepIf = IsMakeCleanRequested(tuple[0])
565 ))
566
567 # Workaround bug when switching from a checked out tag back to a branch
568 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
569 factory.addStep(ShellCommand(
570 name = "gitcheckout",
571 description = "Ensure that Git HEAD is sane",
572 command = "if [ -d .git ]; then git checkout master; else exit 0; fi",
573 haltOnFailure = True))
574
575 # check out the source
576 # Git() runs:
577 # if repo doesn't exist: 'git clone repourl'
578 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
579 # 'git fetch -t repourl branch; git reset --hard revision'
580 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
581 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
582 factory.addStep(Git(
583 name = "gitclean",
584 repourl = repo_url,
585 branch = repo_branch,
586 mode = 'full',
587 method = 'clean',
588 haltOnFailure = True,
589 doStepIf = IsGitCleanRequested,
590 ))
591
592 factory.addStep(Git(
593 name = "gitfresh",
594 repourl = repo_url,
595 branch = repo_branch,
596 mode = 'full',
597 method = 'fresh',
598 haltOnFailure = True,
599 doStepIf = IsGitFreshRequested,
600 ))
601
602 # update remote refs
603 factory.addStep(ShellCommand(
604 name = "fetchrefs",
605 description = "Fetching Git remote refs",
606 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
607 haltOnFailure = True
608 ))
609
610 # switch to tag
611 factory.addStep(ShellCommand(
612 name = "switchtag",
613 description = "Checking out Git tag",
614 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
615 haltOnFailure = True,
616 doStepIf = IsTaggingRequested
617 ))
618
619 # Verify that Git HEAD points to a tag or branch
620 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
621 factory.addStep(ShellCommand(
622 name = "gitverify",
623 description = "Ensure that Git HEAD is pointing to a branch or tag",
624 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
625 haltOnFailure = True))
626
627 factory.addStep(ShellCommand(
628 name = "rmtmp",
629 description = "Remove tmp folder",
630 command=["rm", "-rf", "tmp/"]))
631
632 # feed
633 # factory.addStep(ShellCommand(
634 # name = "feedsconf",
635 # description = "Copy the feeds.conf",
636 # command='''cp ~/feeds.conf ./feeds.conf''' ))
637
638 # feed
639 factory.addStep(ShellCommand(
640 name = "rmfeedlinks",
641 description = "Remove feed symlinks",
642 command=["rm", "-rf", "package/feeds/"]))
643
644 factory.addStep(StringDownload(
645 name = "ccachecc",
646 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
647 slavedest = "../ccache_cc.sh",
648 mode = 0755,
649 ))
650
651 factory.addStep(StringDownload(
652 name = "ccachecxx",
653 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
654 slavedest = "../ccache_cxx.sh",
655 mode = 0755,
656 ))
657
658 # Git SSH
659 if git_ssh:
660 factory.addStep(StringDownload(
661 name = "dlgitclonekey",
662 s = git_ssh_key,
663 slavedest = "../git-clone.key",
664 mode = 0600,
665 ))
666
667 factory.addStep(ShellCommand(
668 name = "patchfeedsconf",
669 description = "Patching feeds.conf",
670 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
671 haltOnFailure = True
672 ))
673
674 # feed
675 factory.addStep(ShellCommand(
676 name = "updatefeeds",
677 description = "Updating feeds",
678 command=["./scripts/feeds", "update"],
679 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
680 haltOnFailure = True
681 ))
682
683 # Git SSH
684 if git_ssh:
685 factory.addStep(ShellCommand(
686 name = "rmfeedsconf",
687 description = "Removing feeds.conf",
688 command=["rm", "feeds.conf"],
689 haltOnFailure = True
690 ))
691
692 # feed
693 factory.addStep(ShellCommand(
694 name = "installfeeds",
695 description = "Installing feeds",
696 command=["./scripts/feeds", "install", "-a"],
697 env = MakeEnv(tryccache=True),
698 haltOnFailure = True
699 ))
700
701 # seed config
702 if config_seed is not None:
703 factory.addStep(StringDownload(
704 name = "dlconfigseed",
705 s = config_seed + '\n',
706 slavedest = ".config",
707 mode = 0644
708 ))
709
710 # configure
711 factory.addStep(ShellCommand(
712 name = "newconfig",
713 description = "Seeding .config",
714 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
715 ))
716
717 factory.addStep(ShellCommand(
718 name = "delbin",
719 description = "Removing output directory",
720 command = ["rm", "-rf", "bin/"]
721 ))
722
723 factory.addStep(ShellCommand(
724 name = "defconfig",
725 description = "Populating .config",
726 command = ["make", "defconfig"],
727 env = MakeEnv()
728 ))
729
730 # check arch
731 factory.addStep(ShellCommand(
732 name = "checkarch",
733 description = "Checking architecture",
734 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
735 logEnviron = False,
736 want_stdout = False,
737 want_stderr = False,
738 haltOnFailure = True
739 ))
740
741 # find libc suffix
742 factory.addStep(SetPropertyFromCommand(
743 name = "libc",
744 property = "libc",
745 description = "Finding libc suffix",
746 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
747
748 # install build key
749 if usign_key is not None:
750 factory.addStep(StringDownload(
751 name = "dlkeybuildpub",
752 s = UsignSec2Pub(usign_key, usign_comment),
753 slavedest = "key-build.pub",
754 mode = 0600,
755 ))
756
757 factory.addStep(StringDownload(
758 name = "dlkeybuild",
759 s = "# fake private key",
760 slavedest = "key-build",
761 mode = 0600,
762 ))
763
764 factory.addStep(StringDownload(
765 name = "dlkeybuilducert",
766 s = "# fake certificate",
767 slavedest = "key-build.ucert",
768 mode = 0600,
769 ))
770
771 # prepare dl
772 factory.addStep(ShellCommand(
773 name = "dldir",
774 description = "Preparing dl/",
775 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
776 logEnviron = False,
777 want_stdout = False
778 ))
779
780 # prepare tar
781 factory.addStep(ShellCommand(
782 name = "dltar",
783 description = "Building and installing GNU tar",
784 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
785 env = MakeEnv(tryccache=True),
786 haltOnFailure = True
787 ))
788
789 # populate dl
790 factory.addStep(ShellCommand(
791 name = "dlrun",
792 description = "Populating dl/",
793 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
794 env = MakeEnv(),
795 logEnviron = False,
796 locks = [dlLock.access('exclusive')],
797 ))
798
799 factory.addStep(ShellCommand(
800 name = "cleanbase",
801 description = "Cleaning base-files",
802 command=["make", "package/base-files/clean", "V=s"]
803 ))
804
805 # build
806 factory.addStep(ShellCommand(
807 name = "tools",
808 description = "Building and installing tools",
809 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
810 env = MakeEnv(tryccache=True),
811 haltOnFailure = True
812 ))
813
814 factory.addStep(ShellCommand(
815 name = "toolchain",
816 description = "Building and installing toolchain",
817 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
818 env = MakeEnv(),
819 haltOnFailure = True
820 ))
821
822 factory.addStep(ShellCommand(
823 name = "kmods",
824 description = "Building kmods",
825 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
826 env = MakeEnv(),
827 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
828 haltOnFailure = True
829 ))
830
831 # find kernel version
832 factory.addStep(SetPropertyFromCommand(
833 name = "kernelversion",
834 property = "kernelversion",
835 description = "Finding the effective Kernel version",
836 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
837 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
838 ))
839
840 factory.addStep(ShellCommand(
841 name = "pkgclean",
842 description = "Cleaning up package build",
843 command=["make", "package/cleanup", "V=s"]
844 ))
845
846 factory.addStep(ShellCommand(
847 name = "pkgbuild",
848 description = "Building packages",
849 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
850 env = MakeEnv(),
851 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
852 haltOnFailure = True
853 ))
854
855 # factory.addStep(IfBuiltinShellCommand(
856 factory.addStep(ShellCommand(
857 name = "pkginstall",
858 description = "Installing packages",
859 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
860 env = MakeEnv(),
861 haltOnFailure = True
862 ))
863
864 factory.addStep(ShellCommand(
865 name = "pkgindex",
866 description = "Indexing packages",
867 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
868 env = MakeEnv(),
869 haltOnFailure = True
870 ))
871
872 if enable_kmod_archive:
873 # embed kmod repository. Must happen before 'images'
874
875 # find rootfs staging directory
876 factory.addStep(SetPropertyFromCommand(
877 name = "stageroot",
878 property = "stageroot",
879 description = "Finding the rootfs staging directory",
880 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
881 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
882 ))
883
884 factory.addStep(ShellCommand(
885 name = "filesdir",
886 description = "Creating file overlay directory",
887 command=["mkdir", "-p", "files/etc/opkg"],
888 haltOnFailure = True
889 ))
890
891 factory.addStep(ShellCommand(
892 name = "kmodconfig",
893 description = "Embedding kmod repository configuration",
894 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
895 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
896 haltOnFailure = True
897 ))
898
899 #factory.addStep(IfBuiltinShellCommand(
900 factory.addStep(ShellCommand(
901 name = "images",
902 description = "Building and installing images",
903 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
904 env = MakeEnv(),
905 haltOnFailure = True
906 ))
907
908 factory.addStep(ShellCommand(
909 name = "diffconfig",
910 description = "Generating config.seed",
911 command=["make", "-j1", "diffconfig", "V=s"],
912 env = MakeEnv(),
913 haltOnFailure = True
914 ))
915
916 factory.addStep(ShellCommand(
917 name = "checksums",
918 description = "Calculating checksums",
919 command=["make", "-j1", "checksum", "V=s"],
920 env = MakeEnv(),
921 haltOnFailure = True
922 ))
923
924 if enable_kmod_archive:
925 factory.addStep(ShellCommand(
926 name = "kmoddir",
927 description = "Creating kmod directory",
928 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
929 haltOnFailure = True
930 ))
931
932 factory.addStep(ShellCommand(
933 name = "kmodprepare",
934 description = "Preparing kmod archive",
935 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
936 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
937 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
938 haltOnFailure = True
939 ))
940
941 factory.addStep(ShellCommand(
942 name = "kmodindex",
943 description = "Indexing kmod archive",
944 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
945 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
946 env = MakeEnv(),
947 haltOnFailure = True
948 ))
949
950 # sign
951 if ini.has_option("gpg", "key") or usign_key is not None:
952 factory.addStep(MasterShellCommand(
953 name = "signprepare",
954 description = "Preparing temporary signing directory",
955 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
956 haltOnFailure = True
957 ))
958
959 factory.addStep(ShellCommand(
960 name = "signpack",
961 description = "Packing files to sign",
962 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
963 haltOnFailure = True
964 ))
965
966 factory.addStep(FileUpload(
967 slavesrc = "sign.tar.gz",
968 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
969 haltOnFailure = True
970 ))
971
972 factory.addStep(MasterShellCommand(
973 name = "signfiles",
974 description = "Signing files",
975 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
976 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
977 haltOnFailure = True
978 ))
979
980 factory.addStep(FileDownload(
981 name = "dlsigntargz",
982 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
983 slavedest = "sign.tar.gz",
984 haltOnFailure = True
985 ))
986
987 factory.addStep(ShellCommand(
988 name = "signunpack",
989 description = "Unpacking signed files",
990 command = ["tar", "-xzf", "sign.tar.gz"],
991 haltOnFailure = True
992 ))
993
994 # upload
995 factory.addStep(ShellCommand(
996 name = "dirprepare",
997 description = "Preparing upload directory structure",
998 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
999 haltOnFailure = True
1000 ))
1001
1002 factory.addStep(ShellCommand(
1003 name = "linkprepare",
1004 description = "Preparing repository symlink",
1005 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1006 doStepIf = IsNoMasterBuild,
1007 haltOnFailure = True
1008 ))
1009
1010 if enable_kmod_archive:
1011 factory.addStep(ShellCommand(
1012 name = "kmoddirprepare",
1013 description = "Preparing kmod archive upload directory",
1014 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1015 haltOnFailure = True
1016 ))
1017
1018 factory.addStep(ShellCommand(
1019 name = "dirupload",
1020 description = "Uploading directory structure",
1021 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1022 env={'RSYNC_PASSWORD': rsync_bin_key},
1023 haltOnFailure = True,
1024 logEnviron = False,
1025 ))
1026
1027 # download remote sha256sums to 'target-sha256sums'
1028 factory.addStep(ShellCommand(
1029 name = "target-sha256sums",
1030 description = "Fetching remote sha256sums for target",
1031 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1032 env={'RSYNC_PASSWORD': rsync_bin_key},
1033 logEnviron = False,
1034 haltOnFailure = False,
1035 flunkOnFailure = False,
1036 warnOnFailure = False,
1037 ))
1038
1039 # build list of files to upload
1040 factory.addStep(FileDownload(
1041 name = "dlsha2rsyncpl",
1042 mastersrc = scripts_dir + '/sha2rsync.pl',
1043 slavedest = "../sha2rsync.pl",
1044 mode = 0755,
1045 ))
1046
1047 factory.addStep(ShellCommand(
1048 name = "buildlist",
1049 description = "Building list of files to upload",
1050 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1051 haltOnFailure = True,
1052 ))
1053
1054 factory.addStep(FileDownload(
1055 name = "dlrsync.sh",
1056 mastersrc = scripts_dir + '/rsync.sh',
1057 slavedest = "../rsync.sh",
1058 mode = 0755
1059 ))
1060
1061 # upload new files and update existing ones
1062 factory.addStep(ShellCommand(
1063 name = "targetupload",
1064 description = "Uploading target files",
1065 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1066 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1067 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1068 env={'RSYNC_PASSWORD': rsync_bin_key},
1069 haltOnFailure = True,
1070 logEnviron = False,
1071 ))
1072
1073 # delete files which don't exist locally
1074 factory.addStep(ShellCommand(
1075 name = "targetprune",
1076 description = "Pruning target files",
1077 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1078 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1079 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1080 env={'RSYNC_PASSWORD': rsync_bin_key},
1081 haltOnFailure = True,
1082 logEnviron = False,
1083 ))
1084
1085 if enable_kmod_archive:
1086 factory.addStep(ShellCommand(
1087 name = "kmodupload",
1088 description = "Uploading kmod archive",
1089 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1090 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1091 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1092 env={'RSYNC_PASSWORD': rsync_bin_key},
1093 haltOnFailure = True,
1094 logEnviron = False,
1095 ))
1096
1097 if rsync_src_url is not None:
1098 factory.addStep(ShellCommand(
1099 name = "sourcelist",
1100 description = "Finding source archives to upload",
1101 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1102 haltOnFailure = True
1103 ))
1104
1105 factory.addStep(ShellCommand(
1106 name = "sourceupload",
1107 description = "Uploading source archives",
1108 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1109 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1110 env={'RSYNC_PASSWORD': rsync_src_key},
1111 haltOnFailure = True,
1112 logEnviron = False,
1113 ))
1114
1115 if False:
1116 factory.addStep(ShellCommand(
1117 name = "packageupload",
1118 description = "Uploading package files",
1119 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1120 env={'RSYNC_PASSWORD': rsync_bin_key},
1121 haltOnFailure = False,
1122 logEnviron = False,
1123 ))
1124
1125 # logs
1126 if False:
1127 factory.addStep(ShellCommand(
1128 name = "upload",
1129 description = "Uploading logs",
1130 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1131 env={'RSYNC_PASSWORD': rsync_bin_key},
1132 haltOnFailure = False,
1133 alwaysRun = True,
1134 logEnviron = False,
1135 ))
1136
1137 factory.addStep(ShellCommand(
1138 name = "df",
1139 description = "Reporting disk usage",
1140 command=["df", "-h", "."],
1141 env={'LC_ALL': 'C'},
1142 haltOnFailure = False,
1143 alwaysRun = True
1144 ))
1145
1146 factory.addStep(ShellCommand(
1147 name = "ccachestat",
1148 description = "Reporting ccache stats",
1149 command=["ccache", "-s"],
1150 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1151 want_stderr = False,
1152 haltOnFailure = False,
1153 flunkOnFailure = False,
1154 warnOnFailure = False,
1155 alwaysRun = True,
1156 ))
1157
1158 from buildbot.config import BuilderConfig
1159
1160 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1161
1162
1163 ####### STATUS TARGETS
1164
1165 # 'status' is a list of Status Targets. The results of each build will be
1166 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1167 # including web pages, email senders, and IRC bots.
1168
1169 c['status'] = []
1170
1171 from buildbot.status import html
1172 from buildbot.status.web import authz, auth
1173
1174 if ini.has_option("phase1", "status_bind"):
1175 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1176 authz_cfg=authz.Authz(
1177 # change any of these to True to enable; see the manual for more
1178 # options
1179 auth=auth.BasicAuth([(ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))]),
1180 gracefulShutdown = 'auth',
1181 forceBuild = 'auth', # use this to test your slave once it is set up
1182 forceAllBuilds = 'auth',
1183 pingBuilder = False,
1184 stopBuild = 'auth',
1185 stopAllBuilds = 'auth',
1186 cancelPendingBuild = 'auth',
1187 )
1188 c['status'].append(html.WebStatus(http_port=ini.get("phase1", "status_bind"), authz=authz_cfg))
1189 else:
1190 c['status'].append(html.WebStatus(http_port=ini.get("phase1", "status_bind")))
1191
1192
1193 from buildbot.status import words
1194
1195 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1196 irc_host = ini.get("irc", "host")
1197 irc_port = 6667
1198 irc_chan = ini.get("irc", "channel")
1199 irc_nick = ini.get("irc", "nickname")
1200 irc_pass = None
1201
1202 if ini.has_option("irc", "port"):
1203 irc_port = ini.getint("irc", "port")
1204
1205 if ini.has_option("irc", "password"):
1206 irc_pass = ini.get("irc", "password")
1207
1208 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1209 channels = [{ "channel": irc_chan }],
1210 notify_events = {
1211 'exception': 1,
1212 'successToFailure': 1,
1213 'failureToSuccess': 1
1214 }
1215 )
1216
1217 c['status'].append(irc)
1218
1219 ####### DB URL
1220
1221 c['db'] = {
1222 # This specifies what database buildbot uses to store its state. You can leave
1223 # this at its default for all but the largest installations.
1224 'db_url' : "sqlite:///state.sqlite",
1225 }