phase1: move config.seed into config.ini
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import ConfigParser
9
10 from buildbot import locks
11
12 # This is a sample buildmaster config file. It must be installed as
13 # 'master.cfg' in your buildmaster's base directory.
14
15 ini = ConfigParser.ConfigParser()
16 ini.read("./config.ini")
17
18 # This is the dictionary that the buildmaster pays attention to. We also use
19 # a shorter alias to save typing.
20 c = BuildmasterConfig = {}
21
22 ####### PROJECT IDENTITY
23
24 # the 'title' string will appear at the top of this buildbot
25 # installation's html.WebStatus home page (linked to the
26 # 'titleURL') and is embedded in the title of the waterfall HTML page.
27
28 c['title'] = ini.get("general", "title")
29 c['titleURL'] = ini.get("general", "title_url")
30
31 # the 'buildbotURL' string should point to the location where the buildbot's
32 # internal web server (usually the html.WebStatus page) is visible. This
33 # typically uses the port number set in the Waterfall 'status' entry, but
34 # with an externally-visible host name which the buildbot cannot figure out
35 # without some help.
36
37 c['buildbotURL'] = ini.get("general", "buildbot_url")
38
39 ####### BUILDSLAVES
40
41 # The 'slaves' list defines the set of recognized buildslaves. Each element is
42 # a BuildSlave object, specifying a unique slave name and password. The same
43 # slave name and password must be configured on the slave.
44 from buildbot.buildslave import BuildSlave
45
46 slave_port = 9989
47
48 if ini.has_option("general", "port"):
49 slave_port = ini.getint("general", "port")
50
51 c['slaves'] = []
52 NetLocks = dict()
53
54 for section in ini.sections():
55 if section.startswith("slave "):
56 if ini.has_option(section, "name") and ini.has_option(section, "password"):
57 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
58 name = ini.get(section, "name")
59 password = ini.get(section, "password")
60 max_builds = 1
61 if ini.has_option(section, "builds"):
62 max_builds = ini.getint(section, "builds")
63 sl_props['max_builds'] = max_builds
64 if ini.has_option(section, "cleanup"):
65 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
66 if ini.has_option(section, "dl_lock"):
67 lockname = ini.get(section, "dl_lock")
68 sl_props['dl_lock'] = lockname
69 if lockname not in NetLocks:
70 NetLocks[lockname] = locks.MasterLock(lockname)
71 if ini.has_option(section, "ul_lock"):
72 lockname = ini.get(section, "dl_lock")
73 sl_props['ul_lock'] = lockname
74 if lockname not in NetLocks:
75 NetLocks[lockname] = locks.MasterLock(lockname)
76 if ini.has_option(section, "shared_wd"):
77 shared_wd = ini.getboolean(section, "shared_wd")
78 sl_props['shared_wd'] = shared_wd
79 if shared_wd and (max_builds != 1):
80 raise ValueError('max_builds must be 1 with shared workdir!')
81 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
82
83 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
84 # This must match the value configured into the buildslaves (with their
85 # --master option)
86 c['slavePortnum'] = slave_port
87
88 # coalesce builds
89 c['mergeRequests'] = True
90
91 # Reduce amount of backlog data
92 c['buildHorizon'] = 30
93 c['logHorizon'] = 20
94
95 ####### CHANGESOURCES
96
97 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
98 scripts_dir = os.path.abspath("../scripts")
99 tree_expire = 0
100 other_builds = 0
101 cc_version = None
102
103 cc_command = "gcc"
104 cxx_command = "g++"
105
106 config_seed = ""
107
108 git_ssh = False
109 git_ssh_key = None
110
111 if ini.has_option("general", "expire"):
112 tree_expire = ini.getint("general", "expire")
113
114 if ini.has_option("general", "other_builds"):
115 other_builds = ini.getint("general", "other_builds")
116
117 if ini.has_option("general", "cc_version"):
118 cc_version = ini.get("general", "cc_version").split()
119 if len(cc_version) == 1:
120 cc_version = ["eq", cc_version[0]]
121
122 if ini.has_option("general", "git_ssh"):
123 git_ssh = ini.getboolean("general", "git_ssh")
124
125 if ini.has_option("general", "git_ssh_key"):
126 git_ssh_key = ini.get("general", "git_ssh_key")
127 else:
128 git_ssh = False
129
130 if ini.has_option("general", "config_seed"):
131 config_seed = ini.get("general", "config_seed")
132
133 repo_url = ini.get("repo", "url")
134 repo_branch = "master"
135
136 if ini.has_option("repo", "branch"):
137 repo_branch = ini.get("repo", "branch")
138
139 rsync_bin_url = ini.get("rsync", "binary_url")
140 rsync_bin_key = ini.get("rsync", "binary_password")
141
142 rsync_src_url = None
143 rsync_src_key = None
144
145 if ini.has_option("rsync", "source_url"):
146 rsync_src_url = ini.get("rsync", "source_url")
147 rsync_src_key = ini.get("rsync", "source_password")
148
149 rsync_defopts = ["-4", "-v", "--timeout=120", "--contimeout=20"]
150
151 gpg_key = None
152 gpg_passphrase = None
153 gpg_comment = repo_branch.replace("-", " ").title() + " key"
154
155 if ini.has_option("gpg", "key"):
156 gpg_key = ini.get("gpg", "key")
157
158 if ini.has_option("gpg", "passphrase"):
159 gpg_passphrase = ini.get("gpg", "passphrase")
160
161 if ini.has_option("gpg", "comment"):
162 gpg_comment = ini.get("gpg", "comment")
163
164 usign_key = None
165 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
166
167 if ini.has_option("usign", "key"):
168 usign_key = ini.get("usign", "key")
169
170 if ini.has_option("usign", "comment"):
171 usign_comment = ini.get("usign", "comment")
172
173 enable_kmod_archive = True
174
175
176 # find targets
177 targets = [ ]
178
179 if not os.path.isdir(work_dir+'/source.git'):
180 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
181 else:
182 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
183
184 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
185 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
186
187 while True:
188 line = findtargets.stdout.readline()
189 if not line:
190 break
191 ta = line.strip().split(' ')
192 targets.append(ta[0])
193
194
195 # the 'change_source' setting tells the buildmaster how it should find out
196 # about source code changes. Here we point to the buildbot clone of pyflakes.
197
198 from buildbot.changes.gitpoller import GitPoller
199 c['change_source'] = []
200 c['change_source'].append(GitPoller(
201 repo_url,
202 workdir=work_dir+'/work.git', branch=repo_branch,
203 pollinterval=300))
204
205 ####### SCHEDULERS
206
207 # Configure the Schedulers, which decide how to react to incoming changes. In this
208 # case, just kick off a 'basebuild' build
209
210 from buildbot.schedulers.basic import SingleBranchScheduler
211 from buildbot.schedulers.forcesched import ForceScheduler
212 from buildbot.changes import filter
213 c['schedulers'] = []
214 c['schedulers'].append(SingleBranchScheduler(
215 name="all",
216 change_filter=filter.ChangeFilter(branch=repo_branch),
217 treeStableTimer=60,
218 builderNames=targets))
219
220 c['schedulers'].append(ForceScheduler(
221 name="force",
222 builderNames=targets))
223
224 ####### BUILDERS
225
226 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
227 # what steps, and which slaves can execute them. Note that any particular build will
228 # only take place on one slave.
229
230 from buildbot.process.factory import BuildFactory
231 from buildbot.steps.source.git import Git
232 from buildbot.steps.shell import ShellCommand
233 from buildbot.steps.shell import SetPropertyFromCommand
234 from buildbot.steps.transfer import FileUpload
235 from buildbot.steps.transfer import FileDownload
236 from buildbot.steps.transfer import StringDownload
237 from buildbot.steps.master import MasterShellCommand
238 from buildbot.process.properties import Interpolate
239 from buildbot.process import properties
240
241
242 CleanTargetMap = [
243 [ "tools", "tools/clean" ],
244 [ "chain", "toolchain/clean" ],
245 [ "linux", "target/linux/clean" ],
246 [ "dir", "dirclean" ],
247 [ "dist", "distclean" ]
248 ]
249
250 def IsMakeCleanRequested(pattern):
251 def CheckCleanProperty(step):
252 val = step.getProperty("clean")
253 if val and re.match(pattern, val):
254 return True
255 else:
256 return False
257
258 return CheckCleanProperty
259
260 def IsCleanupRequested(step):
261 shared_wd = step.getProperty("shared_wd")
262 if shared_wd:
263 return False
264 do_cleanup = step.getProperty("do_cleanup")
265 if do_cleanup:
266 return True
267 else:
268 return False
269
270 def IsExpireRequested(step):
271 shared_wd = step.getProperty("shared_wd")
272 if shared_wd:
273 return False
274 else:
275 return not IsCleanupRequested(step)
276
277 def IsGitFreshRequested(step):
278 do_cleanup = step.getProperty("do_cleanup")
279 if do_cleanup:
280 return True
281 else:
282 return False
283
284 def IsGitCleanRequested(step):
285 return not IsGitFreshRequested(step)
286
287 def IsTaggingRequested(step):
288 val = step.getProperty("tag")
289 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
290 return True
291 else:
292 return False
293
294 def IsNoTaggingRequested(step):
295 return not IsTaggingRequested(step)
296
297 def IsNoMasterBuild(step):
298 return repo_branch != "master"
299
300 def GetBaseVersion():
301 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
302 return repo_branch.split('-')[1]
303 else:
304 return "master"
305
306 @properties.renderer
307 def GetVersionPrefix(props):
308 basever = GetBaseVersion()
309 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
310 return "%s/" % props["tag"]
311 elif basever != "master":
312 return "%s-SNAPSHOT/" % basever
313 else:
314 return ""
315
316 @properties.renderer
317 def GetNumJobs(props):
318 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
319 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
320 else:
321 return "1"
322
323 @properties.renderer
324 def GetCC(props):
325 if props.hasProperty("cc_command"):
326 return props["cc_command"]
327 else:
328 return "gcc"
329
330 @properties.renderer
331 def GetCXX(props):
332 if props.hasProperty("cxx_command"):
333 return props["cxx_command"]
334 else:
335 return "g++"
336
337 @properties.renderer
338 def GetCwd(props):
339 if props.hasProperty("builddir"):
340 return props["builddir"]
341 elif props.hasProperty("workdir"):
342 return props["workdir"]
343 else:
344 return "/"
345
346 @properties.renderer
347 def GetCCache(props):
348 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
349 return props["ccache_command"]
350 else:
351 return ""
352
353 def GetNextBuild(builder, requests):
354 for r in requests:
355 if r.properties and r.properties.hasProperty("tag"):
356 return r
357 return requests[0]
358
359 def MakeEnv(overrides=None, tryccache=False):
360 env = {
361 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
362 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
363 }
364 if tryccache:
365 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
366 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
367 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
368 else:
369 env['CC'] = env['CCC']
370 env['CXX'] = env['CCXX']
371 env['CCACHE'] = ''
372 if overrides is not None:
373 env.update(overrides)
374 return env
375
376 @properties.renderer
377 def NetLockDl(props):
378 lock = None
379 if props.hasProperty("dl_lock"):
380 lock = NetLocks[props["dl_lock"]]
381 if lock is not None:
382 return [lock.access('exclusive')]
383 else:
384 return []
385
386 @properties.renderer
387 def NetLockUl(props):
388 lock = None
389 if props.hasProperty("ul_lock"):
390 lock = NetLocks[props["ul_lock"]]
391 if lock is not None:
392 return [lock.access('exclusive')]
393 else:
394 return []
395
396 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
397 try:
398 seckey = base64.b64decode(seckey)
399 except:
400 return None
401
402 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
403 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
404
405
406 c['builders'] = []
407
408 dlLock = locks.SlaveLock("slave_dl")
409
410 checkBuiltin = re.sub('[\t\n ]+', ' ', """
411 checkBuiltin() {
412 local symbol op path file;
413 for file in $CHANGED_FILES; do
414 case "$file" in
415 package/*/*) : ;;
416 *) return 0 ;;
417 esac;
418 done;
419 while read symbol op path; do
420 case "$symbol" in package-*)
421 symbol="${symbol##*(}";
422 symbol="${symbol%)}";
423 for file in $CHANGED_FILES; do
424 case "$file" in "package/$path/"*)
425 grep -qsx "$symbol=y" .config && return 0
426 ;; esac;
427 done;
428 esac;
429 done < tmp/.packagedeps;
430 return 1;
431 }
432 """).strip()
433
434
435 class IfBuiltinShellCommand(ShellCommand):
436 def _quote(self, str):
437 if re.search("[^a-zA-Z0-9/_.-]", str):
438 return "'%s'" %(re.sub("'", "'\"'\"'", str))
439 return str
440
441 def setCommand(self, command):
442 if not isinstance(command, (str, unicode)):
443 command = ' '.join(map(self._quote, command))
444 self.command = [
445 '/bin/sh', '-c',
446 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
447 ]
448
449 def setupEnvironment(self, cmd):
450 slaveEnv = self.slaveEnvironment
451 if slaveEnv is None:
452 slaveEnv = { }
453 changedFiles = { }
454 for request in self.build.requests:
455 for source in request.sources:
456 for change in source.changes:
457 for file in change.files:
458 changedFiles[file] = True
459 fullSlaveEnv = slaveEnv.copy()
460 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
461 cmd.args['env'] = fullSlaveEnv
462
463 slaveNames = [ ]
464
465 for slave in c['slaves']:
466 slaveNames.append(slave.slavename)
467
468 for target in targets:
469 ts = target.split('/')
470
471 factory = BuildFactory()
472
473 # find number of cores
474 factory.addStep(SetPropertyFromCommand(
475 name = "nproc",
476 property = "nproc",
477 description = "Finding number of CPUs",
478 command = ["nproc"]))
479
480 # find gcc and g++ compilers
481 if cc_version is not None:
482 factory.addStep(FileDownload(
483 name = "dlfindbinpl",
484 mastersrc = scripts_dir + '/findbin.pl',
485 slavedest = "../findbin.pl",
486 mode = 0755))
487
488 factory.addStep(SetPropertyFromCommand(
489 name = "gcc",
490 property = "cc_command",
491 description = "Finding gcc command",
492 command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
493 haltOnFailure = True))
494
495 factory.addStep(SetPropertyFromCommand(
496 name = "g++",
497 property = "cxx_command",
498 description = "Finding g++ command",
499 command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
500 haltOnFailure = True))
501
502 # see if ccache is available
503 factory.addStep(SetPropertyFromCommand(
504 property = "ccache_command",
505 command = ["which", "ccache"],
506 description = "Testing for ccache command",
507 haltOnFailure = False,
508 flunkOnFailure = False,
509 warnOnFailure = False,
510 ))
511
512 # expire tree if needed
513 if tree_expire > 0:
514 factory.addStep(FileDownload(
515 name = "dlexpiresh",
516 doStepIf = IsExpireRequested,
517 mastersrc = scripts_dir + '/expire.sh',
518 slavedest = "../expire.sh",
519 mode = 0755))
520
521 factory.addStep(ShellCommand(
522 name = "expire",
523 description = "Checking for build tree expiry",
524 command = ["./expire.sh", str(tree_expire)],
525 workdir = ".",
526 haltOnFailure = True,
527 doStepIf = IsExpireRequested,
528 timeout = 2400))
529
530 # cleanup.sh if needed
531 factory.addStep(FileDownload(
532 name = "dlcleanupsh",
533 mastersrc = scripts_dir + '/cleanup-phase1.sh',
534 slavedest = "../cleanup.sh",
535 mode = 0755,
536 doStepIf = IsCleanupRequested))
537
538 factory.addStep(ShellCommand(
539 name = "cleanold",
540 description = "Cleaning previous builds",
541 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
542 workdir = ".",
543 haltOnFailure = True,
544 doStepIf = IsCleanupRequested,
545 timeout = 2400))
546
547 factory.addStep(ShellCommand(
548 name = "cleanup",
549 description = "Cleaning work area",
550 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
551 workdir = ".",
552 haltOnFailure = True,
553 doStepIf = IsCleanupRequested,
554 timeout = 2400))
555
556 # user-requested clean targets
557 for tuple in CleanTargetMap:
558 factory.addStep(ShellCommand(
559 name = tuple[1],
560 description = 'User-requested "make %s"' % tuple[1],
561 command = ["make", tuple[1], "V=s"],
562 env = MakeEnv(),
563 doStepIf = IsMakeCleanRequested(tuple[0])
564 ))
565
566 # Workaround bug when switching from a checked out tag back to a branch
567 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
568 factory.addStep(ShellCommand(
569 name = "gitcheckout",
570 description = "Ensure that Git HEAD is sane",
571 command = "if [ -d .git ]; then git checkout master; else exit 0; fi",
572 haltOnFailure = True))
573
574 # check out the source
575 # Git() runs:
576 # if repo doesn't exist: 'git clone repourl'
577 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
578 # 'git fetch -t repourl branch; git reset --hard revision'
579 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
580 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
581 factory.addStep(Git(
582 name = "gitclean",
583 repourl = repo_url,
584 branch = repo_branch,
585 mode = 'full',
586 method = 'clean',
587 haltOnFailure = True,
588 doStepIf = IsGitCleanRequested,
589 ))
590
591 factory.addStep(Git(
592 name = "gitfresh",
593 repourl = repo_url,
594 branch = repo_branch,
595 mode = 'full',
596 method = 'fresh',
597 haltOnFailure = True,
598 doStepIf = IsGitFreshRequested,
599 ))
600
601 # update remote refs
602 factory.addStep(ShellCommand(
603 name = "fetchrefs",
604 description = "Fetching Git remote refs",
605 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
606 haltOnFailure = True
607 ))
608
609 # switch to tag
610 factory.addStep(ShellCommand(
611 name = "switchtag",
612 description = "Checking out Git tag",
613 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
614 haltOnFailure = True,
615 doStepIf = IsTaggingRequested
616 ))
617
618 # Verify that Git HEAD points to a tag or branch
619 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
620 factory.addStep(ShellCommand(
621 name = "gitverify",
622 description = "Ensure that Git HEAD is pointing to a branch or tag",
623 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
624 haltOnFailure = True))
625
626 factory.addStep(ShellCommand(
627 name = "rmtmp",
628 description = "Remove tmp folder",
629 command=["rm", "-rf", "tmp/"]))
630
631 # feed
632 # factory.addStep(ShellCommand(
633 # name = "feedsconf",
634 # description = "Copy the feeds.conf",
635 # command='''cp ~/feeds.conf ./feeds.conf''' ))
636
637 # feed
638 factory.addStep(ShellCommand(
639 name = "rmfeedlinks",
640 description = "Remove feed symlinks",
641 command=["rm", "-rf", "package/feeds/"]))
642
643 factory.addStep(StringDownload(
644 name = "ccachecc",
645 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
646 slavedest = "../ccache_cc.sh",
647 mode = 0755,
648 ))
649
650 factory.addStep(StringDownload(
651 name = "ccachecxx",
652 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
653 slavedest = "../ccache_cxx.sh",
654 mode = 0755,
655 ))
656
657 # Git SSH
658 if git_ssh:
659 factory.addStep(StringDownload(
660 name = "dlgitclonekey",
661 s = git_ssh_key,
662 slavedest = "../git-clone.key",
663 mode = 0600,
664 ))
665
666 factory.addStep(ShellCommand(
667 name = "patchfeedsconf",
668 description = "Patching feeds.conf",
669 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
670 haltOnFailure = True
671 ))
672
673 # feed
674 factory.addStep(ShellCommand(
675 name = "updatefeeds",
676 description = "Updating feeds",
677 command=["./scripts/feeds", "update"],
678 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
679 haltOnFailure = True
680 ))
681
682 # Git SSH
683 if git_ssh:
684 factory.addStep(ShellCommand(
685 name = "rmfeedsconf",
686 description = "Removing feeds.conf",
687 command=["rm", "feeds.conf"],
688 haltOnFailure = True
689 ))
690
691 # feed
692 factory.addStep(ShellCommand(
693 name = "installfeeds",
694 description = "Installing feeds",
695 command=["./scripts/feeds", "install", "-a"],
696 env = MakeEnv(tryccache=True),
697 haltOnFailure = True
698 ))
699
700 # seed config
701 if config_seed is not None:
702 factory.addStep(StringDownload(
703 name = "dlconfigseed",
704 s = config_seed + '\n',
705 slavedest = ".config",
706 mode = 0644
707 ))
708
709 # configure
710 factory.addStep(ShellCommand(
711 name = "newconfig",
712 description = "Seeding .config",
713 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
714 ))
715
716 factory.addStep(ShellCommand(
717 name = "delbin",
718 description = "Removing output directory",
719 command = ["rm", "-rf", "bin/"]
720 ))
721
722 factory.addStep(ShellCommand(
723 name = "defconfig",
724 description = "Populating .config",
725 command = ["make", "defconfig"],
726 env = MakeEnv()
727 ))
728
729 # check arch
730 factory.addStep(ShellCommand(
731 name = "checkarch",
732 description = "Checking architecture",
733 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
734 logEnviron = False,
735 want_stdout = False,
736 want_stderr = False,
737 haltOnFailure = True
738 ))
739
740 # find libc suffix
741 factory.addStep(SetPropertyFromCommand(
742 name = "libc",
743 property = "libc",
744 description = "Finding libc suffix",
745 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
746
747 # install build key
748 if usign_key is not None:
749 factory.addStep(StringDownload(
750 name = "dlkeybuildpub",
751 s = UsignSec2Pub(usign_key, usign_comment),
752 slavedest = "key-build.pub",
753 mode = 0600,
754 ))
755
756 factory.addStep(StringDownload(
757 name = "dlkeybuild",
758 s = "# fake private key",
759 slavedest = "key-build",
760 mode = 0600,
761 ))
762
763 factory.addStep(StringDownload(
764 name = "dlkeybuilducert",
765 s = "# fake certificate",
766 slavedest = "key-build.ucert",
767 mode = 0600,
768 ))
769
770 # prepare dl
771 factory.addStep(ShellCommand(
772 name = "dldir",
773 description = "Preparing dl/",
774 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
775 logEnviron = False,
776 want_stdout = False
777 ))
778
779 # prepare tar
780 factory.addStep(ShellCommand(
781 name = "dltar",
782 description = "Building and installing GNU tar",
783 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
784 env = MakeEnv(tryccache=True),
785 haltOnFailure = True
786 ))
787
788 # populate dl
789 factory.addStep(ShellCommand(
790 name = "dlrun",
791 description = "Populating dl/",
792 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
793 env = MakeEnv(),
794 logEnviron = False,
795 locks = [dlLock.access('exclusive')],
796 ))
797
798 factory.addStep(ShellCommand(
799 name = "cleanbase",
800 description = "Cleaning base-files",
801 command=["make", "package/base-files/clean", "V=s"]
802 ))
803
804 # build
805 factory.addStep(ShellCommand(
806 name = "tools",
807 description = "Building and installing tools",
808 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
809 env = MakeEnv(tryccache=True),
810 haltOnFailure = True
811 ))
812
813 factory.addStep(ShellCommand(
814 name = "toolchain",
815 description = "Building and installing toolchain",
816 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
817 env = MakeEnv(),
818 haltOnFailure = True
819 ))
820
821 factory.addStep(ShellCommand(
822 name = "kmods",
823 description = "Building kmods",
824 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
825 env = MakeEnv(),
826 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
827 haltOnFailure = True
828 ))
829
830 # find kernel version
831 factory.addStep(SetPropertyFromCommand(
832 name = "kernelversion",
833 property = "kernelversion",
834 description = "Finding the effective Kernel version",
835 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
836 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
837 ))
838
839 factory.addStep(ShellCommand(
840 name = "pkgclean",
841 description = "Cleaning up package build",
842 command=["make", "package/cleanup", "V=s"]
843 ))
844
845 factory.addStep(ShellCommand(
846 name = "pkgbuild",
847 description = "Building packages",
848 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
849 env = MakeEnv(),
850 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
851 haltOnFailure = True
852 ))
853
854 # factory.addStep(IfBuiltinShellCommand(
855 factory.addStep(ShellCommand(
856 name = "pkginstall",
857 description = "Installing packages",
858 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
859 env = MakeEnv(),
860 haltOnFailure = True
861 ))
862
863 factory.addStep(ShellCommand(
864 name = "pkgindex",
865 description = "Indexing packages",
866 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
867 env = MakeEnv(),
868 haltOnFailure = True
869 ))
870
871 if enable_kmod_archive:
872 # embed kmod repository. Must happen before 'images'
873
874 # find rootfs staging directory
875 factory.addStep(SetPropertyFromCommand(
876 name = "stageroot",
877 property = "stageroot",
878 description = "Finding the rootfs staging directory",
879 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
880 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
881 ))
882
883 factory.addStep(ShellCommand(
884 name = "filesdir",
885 description = "Creating file overlay directory",
886 command=["mkdir", "-p", "files/etc/opkg"],
887 haltOnFailure = True
888 ))
889
890 factory.addStep(ShellCommand(
891 name = "kmodconfig",
892 description = "Embedding kmod repository configuration",
893 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
894 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
895 haltOnFailure = True
896 ))
897
898 #factory.addStep(IfBuiltinShellCommand(
899 factory.addStep(ShellCommand(
900 name = "images",
901 description = "Building and installing images",
902 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
903 env = MakeEnv(),
904 haltOnFailure = True
905 ))
906
907 factory.addStep(ShellCommand(
908 name = "diffconfig",
909 description = "Generating config.seed",
910 command=["make", "-j1", "diffconfig", "V=s"],
911 env = MakeEnv(),
912 haltOnFailure = True
913 ))
914
915 factory.addStep(ShellCommand(
916 name = "checksums",
917 description = "Calculating checksums",
918 command=["make", "-j1", "checksum", "V=s"],
919 env = MakeEnv(),
920 haltOnFailure = True
921 ))
922
923 if enable_kmod_archive:
924 factory.addStep(ShellCommand(
925 name = "kmoddir",
926 description = "Creating kmod directory",
927 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
928 haltOnFailure = True
929 ))
930
931 factory.addStep(ShellCommand(
932 name = "kmodprepare",
933 description = "Preparing kmod archive",
934 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
935 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
936 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
937 haltOnFailure = True
938 ))
939
940 factory.addStep(ShellCommand(
941 name = "kmodindex",
942 description = "Indexing kmod archive",
943 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
944 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
945 env = MakeEnv(),
946 haltOnFailure = True
947 ))
948
949 # sign
950 if gpg_key is not None or usign_key is not None:
951 factory.addStep(MasterShellCommand(
952 name = "signprepare",
953 description = "Preparing temporary signing directory",
954 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
955 haltOnFailure = True
956 ))
957
958 factory.addStep(ShellCommand(
959 name = "signpack",
960 description = "Packing files to sign",
961 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
962 haltOnFailure = True
963 ))
964
965 factory.addStep(FileUpload(
966 slavesrc = "sign.tar.gz",
967 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
968 haltOnFailure = True
969 ))
970
971 factory.addStep(MasterShellCommand(
972 name = "signfiles",
973 description = "Signing files",
974 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
975 env = {
976 'GPGKEY': gpg_key,
977 'GPGPASS': gpg_passphrase,
978 'GPGCOMMENT': gpg_comment,
979 'USIGNKEY': usign_key,
980 'USIGNCOMMENT': usign_comment
981 },
982 haltOnFailure = True
983 ))
984
985 factory.addStep(FileDownload(
986 name = "dlsigntargz",
987 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
988 slavedest = "sign.tar.gz",
989 haltOnFailure = True
990 ))
991
992 factory.addStep(ShellCommand(
993 name = "signunpack",
994 description = "Unpacking signed files",
995 command = ["tar", "-xzf", "sign.tar.gz"],
996 haltOnFailure = True
997 ))
998
999 # upload
1000 factory.addStep(ShellCommand(
1001 name = "dirprepare",
1002 description = "Preparing upload directory structure",
1003 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1004 haltOnFailure = True
1005 ))
1006
1007 factory.addStep(ShellCommand(
1008 name = "linkprepare",
1009 description = "Preparing repository symlink",
1010 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1011 doStepIf = IsNoMasterBuild,
1012 haltOnFailure = True
1013 ))
1014
1015 if enable_kmod_archive:
1016 factory.addStep(ShellCommand(
1017 name = "kmoddirprepare",
1018 description = "Preparing kmod archive upload directory",
1019 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1020 haltOnFailure = True
1021 ))
1022
1023 factory.addStep(ShellCommand(
1024 name = "dirupload",
1025 description = "Uploading directory structure",
1026 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1027 env={'RSYNC_PASSWORD': rsync_bin_key},
1028 haltOnFailure = True,
1029 logEnviron = False,
1030 ))
1031
1032 # download remote sha256sums to 'target-sha256sums'
1033 factory.addStep(ShellCommand(
1034 name = "target-sha256sums",
1035 description = "Fetching remote sha256sums for target",
1036 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1037 env={'RSYNC_PASSWORD': rsync_bin_key},
1038 logEnviron = False,
1039 haltOnFailure = False,
1040 flunkOnFailure = False,
1041 warnOnFailure = False,
1042 ))
1043
1044 # build list of files to upload
1045 factory.addStep(FileDownload(
1046 name = "dlsha2rsyncpl",
1047 mastersrc = scripts_dir + '/sha2rsync.pl',
1048 slavedest = "../sha2rsync.pl",
1049 mode = 0755,
1050 ))
1051
1052 factory.addStep(ShellCommand(
1053 name = "buildlist",
1054 description = "Building list of files to upload",
1055 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1056 haltOnFailure = True,
1057 ))
1058
1059 factory.addStep(FileDownload(
1060 name = "dlrsync.sh",
1061 mastersrc = scripts_dir + '/rsync.sh',
1062 slavedest = "../rsync.sh",
1063 mode = 0755
1064 ))
1065
1066 # upload new files and update existing ones
1067 factory.addStep(ShellCommand(
1068 name = "targetupload",
1069 description = "Uploading target files",
1070 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1071 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1072 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1073 env={'RSYNC_PASSWORD': rsync_bin_key},
1074 haltOnFailure = True,
1075 logEnviron = False,
1076 ))
1077
1078 # delete files which don't exist locally
1079 factory.addStep(ShellCommand(
1080 name = "targetprune",
1081 description = "Pruning target files",
1082 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1083 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1084 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1085 env={'RSYNC_PASSWORD': rsync_bin_key},
1086 haltOnFailure = True,
1087 logEnviron = False,
1088 ))
1089
1090 if enable_kmod_archive:
1091 factory.addStep(ShellCommand(
1092 name = "kmodupload",
1093 description = "Uploading kmod archive",
1094 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1095 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1096 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1097 env={'RSYNC_PASSWORD': rsync_bin_key},
1098 haltOnFailure = True,
1099 logEnviron = False,
1100 ))
1101
1102 if rsync_src_url is not None:
1103 factory.addStep(ShellCommand(
1104 name = "sourcelist",
1105 description = "Finding source archives to upload",
1106 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1107 haltOnFailure = True
1108 ))
1109
1110 factory.addStep(ShellCommand(
1111 name = "sourceupload",
1112 description = "Uploading source archives",
1113 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1114 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1115 env={'RSYNC_PASSWORD': rsync_src_key},
1116 haltOnFailure = True,
1117 logEnviron = False,
1118 ))
1119
1120 if False:
1121 factory.addStep(ShellCommand(
1122 name = "packageupload",
1123 description = "Uploading package files",
1124 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1125 env={'RSYNC_PASSWORD': rsync_bin_key},
1126 haltOnFailure = False,
1127 logEnviron = False,
1128 ))
1129
1130 # logs
1131 if False:
1132 factory.addStep(ShellCommand(
1133 name = "upload",
1134 description = "Uploading logs",
1135 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1136 env={'RSYNC_PASSWORD': rsync_bin_key},
1137 haltOnFailure = False,
1138 alwaysRun = True,
1139 logEnviron = False,
1140 ))
1141
1142 factory.addStep(ShellCommand(
1143 name = "df",
1144 description = "Reporting disk usage",
1145 command=["df", "-h", "."],
1146 env={'LC_ALL': 'C'},
1147 haltOnFailure = False,
1148 alwaysRun = True
1149 ))
1150
1151 factory.addStep(ShellCommand(
1152 name = "ccachestat",
1153 description = "Reporting ccache stats",
1154 command=["ccache", "-s"],
1155 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1156 want_stderr = False,
1157 haltOnFailure = False,
1158 flunkOnFailure = False,
1159 warnOnFailure = False,
1160 alwaysRun = True,
1161 ))
1162
1163 from buildbot.config import BuilderConfig
1164
1165 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1166
1167
1168 ####### STATUS TARGETS
1169
1170 # 'status' is a list of Status Targets. The results of each build will be
1171 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1172 # including web pages, email senders, and IRC bots.
1173
1174 c['status'] = []
1175
1176 from buildbot.status import html
1177 from buildbot.status.web import authz, auth
1178
1179 if ini.has_option("status", "bind"):
1180 if ini.has_option("status", "user") and ini.has_option("status", "password"):
1181 authz_cfg=authz.Authz(
1182 # change any of these to True to enable; see the manual for more
1183 # options
1184 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
1185 gracefulShutdown = 'auth',
1186 forceBuild = 'auth', # use this to test your slave once it is set up
1187 forceAllBuilds = 'auth',
1188 pingBuilder = False,
1189 stopBuild = 'auth',
1190 stopAllBuilds = 'auth',
1191 cancelPendingBuild = 'auth',
1192 )
1193 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
1194 else:
1195 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
1196
1197
1198 from buildbot.status import words
1199
1200 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1201 irc_host = ini.get("irc", "host")
1202 irc_port = 6667
1203 irc_chan = ini.get("irc", "channel")
1204 irc_nick = ini.get("irc", "nickname")
1205 irc_pass = None
1206
1207 if ini.has_option("irc", "port"):
1208 irc_port = ini.getint("irc", "port")
1209
1210 if ini.has_option("irc", "password"):
1211 irc_pass = ini.get("irc", "password")
1212
1213 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1214 channels = [{ "channel": irc_chan }],
1215 notify_events = {
1216 'exception': 1,
1217 'successToFailure': 1,
1218 'failureToSuccess': 1
1219 }
1220 )
1221
1222 c['status'].append(irc)
1223
1224 ####### DB URL
1225
1226 c['db'] = {
1227 # This specifies what database buildbot uses to store its state. You can leave
1228 # this at its default for all but the largest installations.
1229 'db_url' : "sqlite:///state.sqlite",
1230 }