phase1: add ability to clone feeds via SSH
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 # This is a sample buildmaster config file. It must be installed as
12 # 'master.cfg' in your buildmaster's base directory.
13
14 ini = ConfigParser.ConfigParser()
15 ini.read("./config.ini")
16
17 # This is the dictionary that the buildmaster pays attention to. We also use
18 # a shorter alias to save typing.
19 c = BuildmasterConfig = {}
20
21 ####### PROJECT IDENTITY
22
23 # the 'title' string will appear at the top of this buildbot
24 # installation's html.WebStatus home page (linked to the
25 # 'titleURL') and is embedded in the title of the waterfall HTML page.
26
27 c['title'] = ini.get("general", "title")
28 c['titleURL'] = ini.get("general", "title_url")
29
30 # the 'buildbotURL' string should point to the location where the buildbot's
31 # internal web server (usually the html.WebStatus page) is visible. This
32 # typically uses the port number set in the Waterfall 'status' entry, but
33 # with an externally-visible host name which the buildbot cannot figure out
34 # without some help.
35
36 c['buildbotURL'] = ini.get("general", "buildbot_url")
37
38 ####### BUILDSLAVES
39
40 # The 'slaves' list defines the set of recognized buildslaves. Each element is
41 # a BuildSlave object, specifying a unique slave name and password. The same
42 # slave name and password must be configured on the slave.
43 from buildbot.buildslave import BuildSlave
44
45 slave_port = 9989
46
47 if ini.has_option("general", "port"):
48 slave_port = ini.getint("general", "port")
49
50 c['slaves'] = []
51 NetLocks = dict()
52
53 for section in ini.sections():
54 if section.startswith("slave "):
55 if ini.has_option(section, "name") and ini.has_option(section, "password"):
56 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
57 name = ini.get(section, "name")
58 password = ini.get(section, "password")
59 max_builds = 1
60 if ini.has_option(section, "builds"):
61 max_builds = ini.getint(section, "builds")
62 sl_props['max_builds'] = max_builds
63 if ini.has_option(section, "cleanup"):
64 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
65 if ini.has_option(section, "dl_lock"):
66 lockname = ini.get(section, "dl_lock")
67 sl_props['dl_lock'] = lockname
68 if lockname not in NetLocks:
69 NetLocks[lockname] = locks.MasterLock(lockname)
70 if ini.has_option(section, "ul_lock"):
71 lockname = ini.get(section, "dl_lock")
72 sl_props['ul_lock'] = lockname
73 if lockname not in NetLocks:
74 NetLocks[lockname] = locks.MasterLock(lockname)
75 if ini.has_option(section, "shared_wd"):
76 shared_wd = ini.getboolean(section, "shared_wd")
77 sl_props['shared_wd'] = shared_wd
78 if shared_wd and (max_builds != 1):
79 raise ValueError('max_builds must be 1 with shared workdir!')
80 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
81
82 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
83 # This must match the value configured into the buildslaves (with their
84 # --master option)
85 c['slavePortnum'] = slave_port
86
87 # coalesce builds
88 c['mergeRequests'] = True
89
90 # Reduce amount of backlog data
91 c['buildHorizon'] = 30
92 c['logHorizon'] = 20
93
94 ####### CHANGESOURCES
95
96 home_dir = os.path.abspath(ini.get("general", "homedir"))
97 tree_expire = 0
98 other_builds = 0
99 cc_version = None
100
101 cc_command = "gcc"
102 cxx_command = "g++"
103
104 git_ssh = False
105
106 if ini.has_option("general", "expire"):
107 tree_expire = ini.getint("general", "expire")
108
109 if ini.has_option("general", "other_builds"):
110 other_builds = ini.getint("general", "other_builds")
111
112 if ini.has_option("general", "cc_version"):
113 cc_version = ini.get("general", "cc_version").split()
114 if len(cc_version) == 1:
115 cc_version = ["eq", cc_version[0]]
116
117 if ini.has_option("general", "git_ssh"):
118 git_ssh = ini.getboolean("general", "git_ssh")
119
120 repo_url = ini.get("repo", "url")
121 repo_branch = "master"
122
123 if ini.has_option("repo", "branch"):
124 repo_branch = ini.get("repo", "branch")
125
126 rsync_bin_url = ini.get("rsync", "binary_url")
127 rsync_bin_key = ini.get("rsync", "binary_password")
128
129 rsync_src_url = None
130 rsync_src_key = None
131
132 if ini.has_option("rsync", "source_url"):
133 rsync_src_url = ini.get("rsync", "source_url")
134 rsync_src_key = ini.get("rsync", "source_password")
135
136 rsync_defopts = ["-4", "-v", "--timeout=120", "--contimeout=20"]
137
138 gpg_home = "~/.gnupg"
139 gpg_keyid = None
140 gpg_comment = "Unattended build signature"
141 gpg_passfile = "/dev/null"
142
143 if ini.has_option("gpg", "home"):
144 gpg_home = ini.get("gpg", "home")
145
146 if ini.has_option("gpg", "keyid"):
147 gpg_keyid = ini.get("gpg", "keyid")
148
149 if ini.has_option("gpg", "comment"):
150 gpg_comment = ini.get("gpg", "comment")
151
152 if ini.has_option("gpg", "passfile"):
153 gpg_passfile = ini.get("gpg", "passfile")
154
155 enable_kmod_archive = True
156
157
158 # find targets
159 targets = [ ]
160
161 if not os.path.isdir(home_dir+'/source.git'):
162 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, home_dir+'/source.git'])
163 else:
164 subprocess.call(["git", "pull"], cwd = home_dir+'/source.git')
165
166 findtargets = subprocess.Popen([home_dir+'/dumpinfo.pl', 'targets'],
167 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
168
169 while True:
170 line = findtargets.stdout.readline()
171 if not line:
172 break
173 ta = line.strip().split(' ')
174 targets.append(ta[0])
175
176
177 # the 'change_source' setting tells the buildmaster how it should find out
178 # about source code changes. Here we point to the buildbot clone of pyflakes.
179
180 from buildbot.changes.gitpoller import GitPoller
181 c['change_source'] = []
182 c['change_source'].append(GitPoller(
183 repo_url,
184 workdir=home_dir+'/work.git', branch=repo_branch,
185 pollinterval=300))
186
187 ####### SCHEDULERS
188
189 # Configure the Schedulers, which decide how to react to incoming changes. In this
190 # case, just kick off a 'basebuild' build
191
192 from buildbot.schedulers.basic import SingleBranchScheduler
193 from buildbot.schedulers.forcesched import ForceScheduler
194 from buildbot.changes import filter
195 c['schedulers'] = []
196 c['schedulers'].append(SingleBranchScheduler(
197 name="all",
198 change_filter=filter.ChangeFilter(branch=repo_branch),
199 treeStableTimer=60,
200 builderNames=targets))
201
202 c['schedulers'].append(ForceScheduler(
203 name="force",
204 builderNames=targets))
205
206 ####### BUILDERS
207
208 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
209 # what steps, and which slaves can execute them. Note that any particular build will
210 # only take place on one slave.
211
212 from buildbot.process.factory import BuildFactory
213 from buildbot.steps.source.git import Git
214 from buildbot.steps.shell import ShellCommand
215 from buildbot.steps.shell import SetPropertyFromCommand
216 from buildbot.steps.transfer import FileUpload
217 from buildbot.steps.transfer import FileDownload
218 from buildbot.steps.transfer import StringDownload
219 from buildbot.steps.master import MasterShellCommand
220 from buildbot.process.properties import Interpolate
221 from buildbot.process import properties
222
223
224 CleanTargetMap = [
225 [ "tools", "tools/clean" ],
226 [ "chain", "toolchain/clean" ],
227 [ "linux", "target/linux/clean" ],
228 [ "dir", "dirclean" ],
229 [ "dist", "distclean" ]
230 ]
231
232 def IsMakeCleanRequested(pattern):
233 def CheckCleanProperty(step):
234 val = step.getProperty("clean")
235 if val and re.match(pattern, val):
236 return True
237 else:
238 return False
239
240 return CheckCleanProperty
241
242 def IsCleanupRequested(step):
243 shared_wd = step.getProperty("shared_wd")
244 if shared_wd:
245 return False
246 do_cleanup = step.getProperty("do_cleanup")
247 if do_cleanup:
248 return True
249 else:
250 return False
251
252 def IsExpireRequested(step):
253 shared_wd = step.getProperty("shared_wd")
254 if shared_wd:
255 return False
256 else:
257 return not IsCleanupRequested(step)
258
259 def IsGitFreshRequested(step):
260 do_cleanup = step.getProperty("do_cleanup")
261 if do_cleanup:
262 return True
263 else:
264 return False
265
266 def IsGitCleanRequested(step):
267 return not IsGitFreshRequested(step)
268
269 def IsTaggingRequested(step):
270 val = step.getProperty("tag")
271 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
272 return True
273 else:
274 return False
275
276 def IsNoTaggingRequested(step):
277 return not IsTaggingRequested(step)
278
279 def IsNoMasterBuild(step):
280 return repo_branch != "master"
281
282 def GetBaseVersion():
283 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
284 return repo_branch.split('-')[1]
285 else:
286 return "master"
287
288 @properties.renderer
289 def GetVersionPrefix(props):
290 basever = GetBaseVersion()
291 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
292 return "%s/" % props["tag"]
293 elif basever != "master":
294 return "%s-SNAPSHOT/" % basever
295 else:
296 return ""
297
298 @properties.renderer
299 def GetNumJobs(props):
300 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
301 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
302 else:
303 return "1"
304
305 @properties.renderer
306 def GetCC(props):
307 if props.hasProperty("cc_command"):
308 return props["cc_command"]
309 else:
310 return "gcc"
311
312 @properties.renderer
313 def GetCXX(props):
314 if props.hasProperty("cxx_command"):
315 return props["cxx_command"]
316 else:
317 return "g++"
318
319 @properties.renderer
320 def GetCwd(props):
321 if props.hasProperty("builddir"):
322 return props["builddir"]
323 elif props.hasProperty("workdir"):
324 return props["workdir"]
325 else:
326 return "/"
327
328 @properties.renderer
329 def GetCCache(props):
330 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
331 return props["ccache_command"]
332 else:
333 return ""
334
335 def GetNextBuild(builder, requests):
336 for r in requests:
337 if r.properties and r.properties.hasProperty("tag"):
338 return r
339 return requests[0]
340
341 def MakeEnv(overrides=None, tryccache=False):
342 env = {
343 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
344 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
345 }
346 if tryccache:
347 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
348 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
349 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
350 else:
351 env['CC'] = env['CCC']
352 env['CXX'] = env['CCXX']
353 env['CCACHE'] = ''
354 if overrides is not None:
355 env.update(overrides)
356 return env
357
358 @properties.renderer
359 def NetLockDl(props):
360 lock = None
361 if props.hasProperty("dl_lock"):
362 lock = NetLocks[props["dl_lock"]]
363 if lock is not None:
364 return [lock.access('exclusive')]
365 else:
366 return []
367
368 @properties.renderer
369 def NetLockUl(props):
370 lock = None
371 if props.hasProperty("ul_lock"):
372 lock = NetLocks[props["ul_lock"]]
373 if lock is not None:
374 return [lock.access('exclusive')]
375 else:
376 return []
377
378 c['builders'] = []
379
380 dlLock = locks.SlaveLock("slave_dl")
381
382 checkBuiltin = re.sub('[\t\n ]+', ' ', """
383 checkBuiltin() {
384 local symbol op path file;
385 for file in $CHANGED_FILES; do
386 case "$file" in
387 package/*/*) : ;;
388 *) return 0 ;;
389 esac;
390 done;
391 while read symbol op path; do
392 case "$symbol" in package-*)
393 symbol="${symbol##*(}";
394 symbol="${symbol%)}";
395 for file in $CHANGED_FILES; do
396 case "$file" in "package/$path/"*)
397 grep -qsx "$symbol=y" .config && return 0
398 ;; esac;
399 done;
400 esac;
401 done < tmp/.packagedeps;
402 return 1;
403 }
404 """).strip()
405
406
407 class IfBuiltinShellCommand(ShellCommand):
408 def _quote(self, str):
409 if re.search("[^a-zA-Z0-9/_.-]", str):
410 return "'%s'" %(re.sub("'", "'\"'\"'", str))
411 return str
412
413 def setCommand(self, command):
414 if not isinstance(command, (str, unicode)):
415 command = ' '.join(map(self._quote, command))
416 self.command = [
417 '/bin/sh', '-c',
418 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
419 ]
420
421 def setupEnvironment(self, cmd):
422 slaveEnv = self.slaveEnvironment
423 if slaveEnv is None:
424 slaveEnv = { }
425 changedFiles = { }
426 for request in self.build.requests:
427 for source in request.sources:
428 for change in source.changes:
429 for file in change.files:
430 changedFiles[file] = True
431 fullSlaveEnv = slaveEnv.copy()
432 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
433 cmd.args['env'] = fullSlaveEnv
434
435 slaveNames = [ ]
436
437 for slave in c['slaves']:
438 slaveNames.append(slave.slavename)
439
440 for target in targets:
441 ts = target.split('/')
442
443 factory = BuildFactory()
444
445 # find number of cores
446 factory.addStep(SetPropertyFromCommand(
447 name = "nproc",
448 property = "nproc",
449 description = "Finding number of CPUs",
450 command = ["nproc"]))
451
452 # find gcc and g++ compilers
453 if cc_version is not None:
454 factory.addStep(FileDownload(
455 name = "dlfindbinpl",
456 mastersrc = "findbin.pl",
457 slavedest = "../findbin.pl",
458 mode = 0755))
459
460 factory.addStep(SetPropertyFromCommand(
461 name = "gcc",
462 property = "cc_command",
463 description = "Finding gcc command",
464 command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
465 haltOnFailure = True))
466
467 factory.addStep(SetPropertyFromCommand(
468 name = "g++",
469 property = "cxx_command",
470 description = "Finding g++ command",
471 command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
472 haltOnFailure = True))
473
474 # see if ccache is available
475 factory.addStep(SetPropertyFromCommand(
476 property = "ccache_command",
477 command = ["which", "ccache"],
478 description = "Testing for ccache command",
479 haltOnFailure = False,
480 flunkOnFailure = False,
481 warnOnFailure = False,
482 ))
483
484 # expire tree if needed
485 if tree_expire > 0:
486 factory.addStep(FileDownload(
487 name = "dlexpiresh",
488 doStepIf = IsExpireRequested,
489 mastersrc = "expire.sh",
490 slavedest = "../expire.sh",
491 mode = 0755))
492
493 factory.addStep(ShellCommand(
494 name = "expire",
495 description = "Checking for build tree expiry",
496 command = ["./expire.sh", str(tree_expire)],
497 workdir = ".",
498 haltOnFailure = True,
499 doStepIf = IsExpireRequested,
500 timeout = 2400))
501
502 # cleanup.sh if needed
503 factory.addStep(FileDownload(
504 name = "dlcleanupsh",
505 mastersrc = "cleanup.sh",
506 slavedest = "../cleanup.sh",
507 mode = 0755,
508 doStepIf = IsCleanupRequested))
509
510 factory.addStep(ShellCommand(
511 name = "cleanold",
512 description = "Cleaning previous builds",
513 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
514 workdir = ".",
515 haltOnFailure = True,
516 doStepIf = IsCleanupRequested,
517 timeout = 2400))
518
519 factory.addStep(ShellCommand(
520 name = "cleanup",
521 description = "Cleaning work area",
522 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
523 workdir = ".",
524 haltOnFailure = True,
525 doStepIf = IsCleanupRequested,
526 timeout = 2400))
527
528 # user-requested clean targets
529 for tuple in CleanTargetMap:
530 factory.addStep(ShellCommand(
531 name = tuple[1],
532 description = 'User-requested "make %s"' % tuple[1],
533 command = ["make", tuple[1], "V=s"],
534 env = MakeEnv(),
535 doStepIf = IsMakeCleanRequested(tuple[0])
536 ))
537
538 # check out the source
539 # Git() runs:
540 # if repo doesn't exist: 'git clone repourl'
541 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
542 # 'git fetch -t repourl branch; git reset --hard revision'
543 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
544 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
545 factory.addStep(Git(
546 name = "gitclean",
547 repourl = repo_url,
548 branch = repo_branch,
549 mode = 'full',
550 method = 'clean',
551 haltOnFailure = True,
552 doStepIf = IsGitCleanRequested,
553 ))
554
555 factory.addStep(Git(
556 name = "gitfresh",
557 repourl = repo_url,
558 branch = repo_branch,
559 mode = 'full',
560 method = 'fresh',
561 haltOnFailure = True,
562 doStepIf = IsGitFreshRequested,
563 ))
564
565 # update remote refs
566 factory.addStep(ShellCommand(
567 name = "fetchrefs",
568 description = "Fetching Git remote refs",
569 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
570 haltOnFailure = True
571 ))
572
573 # switch to tag
574 factory.addStep(ShellCommand(
575 name = "switchtag",
576 description = "Checking out Git tag",
577 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
578 haltOnFailure = True,
579 doStepIf = IsTaggingRequested
580 ))
581
582 factory.addStep(ShellCommand(
583 name = "rmtmp",
584 description = "Remove tmp folder",
585 command=["rm", "-rf", "tmp/"]))
586
587 # feed
588 # factory.addStep(ShellCommand(
589 # name = "feedsconf",
590 # description = "Copy the feeds.conf",
591 # command='''cp ~/feeds.conf ./feeds.conf''' ))
592
593 # feed
594 factory.addStep(ShellCommand(
595 name = "rmfeedlinks",
596 description = "Remove feed symlinks",
597 command=["rm", "-rf", "package/feeds/"]))
598
599 factory.addStep(StringDownload(
600 name = "ccachecc",
601 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
602 slavedest = "../ccache_cc.sh",
603 mode = 0755,
604 ))
605
606 factory.addStep(StringDownload(
607 name = "ccachecxx",
608 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
609 slavedest = "../ccache_cxx.sh",
610 mode = 0755,
611 ))
612
613 # Git SSH
614 if git_ssh:
615 factory.addStep(FileDownload(
616 name = "dlgitclonekey",
617 mastersrc = "git-clone.key",
618 slavedest = "../git-clone.key",
619 mode = 0600,
620 ))
621
622 factory.addStep(ShellCommand(
623 name = "patchfeedsconf",
624 description = "Patching feeds.conf",
625 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
626 haltOnFailure = True
627 ))
628
629 # feed
630 factory.addStep(ShellCommand(
631 name = "updatefeeds",
632 description = "Updating feeds",
633 command=["./scripts/feeds", "update"],
634 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
635 haltOnFailure = True
636 ))
637
638 # Git SSH
639 if git_ssh:
640 factory.addStep(ShellCommand(
641 name = "rmfeedsconf",
642 description = "Removing feeds.conf",
643 command=["rm", "feeds.conf"],
644 haltOnFailure = True
645 ))
646
647 # feed
648 factory.addStep(ShellCommand(
649 name = "installfeeds",
650 description = "Installing feeds",
651 command=["./scripts/feeds", "install", "-a"],
652 env = MakeEnv(tryccache=True),
653 haltOnFailure = True
654 ))
655
656 # seed config
657 factory.addStep(FileDownload(
658 name = "dlconfigseed",
659 mastersrc = "config.seed",
660 slavedest = ".config",
661 mode = 0644
662 ))
663
664 # configure
665 factory.addStep(ShellCommand(
666 name = "newconfig",
667 description = "Seeding .config",
668 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\n' >> .config" %(ts[0], ts[0], ts[1])
669 ))
670
671 factory.addStep(ShellCommand(
672 name = "delbin",
673 description = "Removing output directory",
674 command = ["rm", "-rf", "bin/"]
675 ))
676
677 factory.addStep(ShellCommand(
678 name = "defconfig",
679 description = "Populating .config",
680 command = ["make", "defconfig"],
681 env = MakeEnv()
682 ))
683
684 # check arch
685 factory.addStep(ShellCommand(
686 name = "checkarch",
687 description = "Checking architecture",
688 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
689 logEnviron = False,
690 want_stdout = False,
691 want_stderr = False,
692 haltOnFailure = True
693 ))
694
695 # find libc suffix
696 factory.addStep(SetPropertyFromCommand(
697 name = "libc",
698 property = "libc",
699 description = "Finding libc suffix",
700 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
701
702 # install build key
703 factory.addStep(FileDownload(name="dlkeybuild", mastersrc=home_dir+'/key-build', slavedest="key-build", mode=0600))
704 factory.addStep(FileDownload(name="dlkeybuildpub", mastersrc=home_dir+'/key-build.pub', slavedest="key-build.pub", mode=0600))
705
706 # prepare dl
707 factory.addStep(ShellCommand(
708 name = "dldir",
709 description = "Preparing dl/",
710 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
711 logEnviron = False,
712 want_stdout = False
713 ))
714
715 # prepare tar
716 factory.addStep(ShellCommand(
717 name = "dltar",
718 description = "Building and installing GNU tar",
719 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
720 env = MakeEnv(tryccache=True),
721 haltOnFailure = True
722 ))
723
724 # populate dl
725 factory.addStep(ShellCommand(
726 name = "dlrun",
727 description = "Populating dl/",
728 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
729 env = MakeEnv(),
730 logEnviron = False,
731 locks = [dlLock.access('exclusive')],
732 ))
733
734 factory.addStep(ShellCommand(
735 name = "cleanbase",
736 description = "Cleaning base-files",
737 command=["make", "package/base-files/clean", "V=s"]
738 ))
739
740 # build
741 factory.addStep(ShellCommand(
742 name = "tools",
743 description = "Building and installing tools",
744 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
745 env = MakeEnv(tryccache=True),
746 haltOnFailure = True
747 ))
748
749 factory.addStep(ShellCommand(
750 name = "toolchain",
751 description = "Building and installing toolchain",
752 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
753 env = MakeEnv(),
754 haltOnFailure = True
755 ))
756
757 factory.addStep(ShellCommand(
758 name = "kmods",
759 description = "Building kmods",
760 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
761 env = MakeEnv(),
762 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
763 haltOnFailure = True
764 ))
765
766 # find kernel version
767 factory.addStep(SetPropertyFromCommand(
768 name = "kernelversion",
769 property = "kernelversion",
770 description = "Finding the effective Kernel version",
771 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
772 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
773 ))
774
775 factory.addStep(ShellCommand(
776 name = "pkgclean",
777 description = "Cleaning up package build",
778 command=["make", "package/cleanup", "V=s"]
779 ))
780
781 factory.addStep(ShellCommand(
782 name = "pkgbuild",
783 description = "Building packages",
784 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
785 env = MakeEnv(),
786 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
787 haltOnFailure = True
788 ))
789
790 # factory.addStep(IfBuiltinShellCommand(
791 factory.addStep(ShellCommand(
792 name = "pkginstall",
793 description = "Installing packages",
794 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
795 env = MakeEnv(),
796 haltOnFailure = True
797 ))
798
799 factory.addStep(ShellCommand(
800 name = "pkgindex",
801 description = "Indexing packages",
802 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s"],
803 env = MakeEnv(),
804 haltOnFailure = True
805 ))
806
807 if enable_kmod_archive:
808 # embed kmod repository. Must happen before 'images'
809
810 # find rootfs staging directory
811 factory.addStep(SetPropertyFromCommand(
812 name = "stageroot",
813 property = "stageroot",
814 description = "Finding the rootfs staging directory",
815 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
816 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
817 ))
818
819 factory.addStep(ShellCommand(
820 name = "filesdir",
821 description = "Creating file overlay directory",
822 command=["mkdir", "-p", "files/etc/opkg"],
823 haltOnFailure = True
824 ))
825
826 factory.addStep(ShellCommand(
827 name = "kmodconfig",
828 description = "Embedding kmod repository configuration",
829 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
830 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
831 haltOnFailure = True
832 ))
833
834 #factory.addStep(IfBuiltinShellCommand(
835 factory.addStep(ShellCommand(
836 name = "images",
837 description = "Building and installing images",
838 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
839 env = MakeEnv(),
840 haltOnFailure = True
841 ))
842
843 factory.addStep(ShellCommand(
844 name = "diffconfig",
845 description = "Generating config.seed",
846 command=["make", "-j1", "diffconfig", "V=s"],
847 env = MakeEnv(),
848 haltOnFailure = True
849 ))
850
851 factory.addStep(ShellCommand(
852 name = "checksums",
853 description = "Calculating checksums",
854 command=["make", "-j1", "checksum", "V=s"],
855 env = MakeEnv(),
856 haltOnFailure = True
857 ))
858
859 if enable_kmod_archive:
860 factory.addStep(ShellCommand(
861 name = "kmoddir",
862 description = "Creating kmod directory",
863 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
864 haltOnFailure = True
865 ))
866
867 factory.addStep(ShellCommand(
868 name = "kmodprepare",
869 description = "Preparing kmod archive",
870 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
871 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
872 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
873 haltOnFailure = True
874 ))
875
876 factory.addStep(ShellCommand(
877 name = "kmodindex",
878 description = "Indexing kmod archive",
879 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s",
880 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
881 env = MakeEnv(),
882 haltOnFailure = True
883 ))
884
885 # sign
886 if gpg_keyid is not None:
887 factory.addStep(MasterShellCommand(
888 name = "signprepare",
889 description = "Preparing temporary signing directory",
890 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
891 haltOnFailure = True
892 ))
893
894 factory.addStep(ShellCommand(
895 name = "signpack",
896 description = "Packing files to sign",
897 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
898 haltOnFailure = True
899 ))
900
901 factory.addStep(FileUpload(
902 slavesrc = "sign.tar.gz",
903 masterdest = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
904 haltOnFailure = True
905 ))
906
907 factory.addStep(MasterShellCommand(
908 name = "signfiles",
909 description = "Signing files",
910 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]), gpg_keyid, gpg_comment],
911 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
912 haltOnFailure = True
913 ))
914
915 factory.addStep(FileDownload(
916 name = "dlsigntargz",
917 mastersrc = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
918 slavedest = "sign.tar.gz",
919 haltOnFailure = True
920 ))
921
922 factory.addStep(ShellCommand(
923 name = "signunpack",
924 description = "Unpacking signed files",
925 command = ["tar", "-xzf", "sign.tar.gz"],
926 haltOnFailure = True
927 ))
928
929 # upload
930 factory.addStep(ShellCommand(
931 name = "dirprepare",
932 description = "Preparing upload directory structure",
933 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
934 haltOnFailure = True
935 ))
936
937 factory.addStep(ShellCommand(
938 name = "linkprepare",
939 description = "Preparing repository symlink",
940 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
941 doStepIf = IsNoMasterBuild,
942 haltOnFailure = True
943 ))
944
945 if enable_kmod_archive:
946 factory.addStep(ShellCommand(
947 name = "kmoddirprepare",
948 description = "Preparing kmod archive upload directory",
949 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
950 haltOnFailure = True
951 ))
952
953 factory.addStep(ShellCommand(
954 name = "dirupload",
955 description = "Uploading directory structure",
956 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
957 env={'RSYNC_PASSWORD': rsync_bin_key},
958 haltOnFailure = True,
959 logEnviron = False,
960 ))
961
962 # download remote sha256sums to 'target-sha256sums'
963 factory.addStep(ShellCommand(
964 name = "target-sha256sums",
965 description = "Fetching remote sha256sums for target",
966 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
967 env={'RSYNC_PASSWORD': rsync_bin_key},
968 logEnviron = False,
969 haltOnFailure = False,
970 flunkOnFailure = False,
971 warnOnFailure = False,
972 ))
973
974 # build list of files to upload
975 factory.addStep(FileDownload(
976 name = "dlsha2rsyncpl",
977 mastersrc = "sha2rsync.pl",
978 slavedest = "../sha2rsync.pl",
979 mode = 0755,
980 ))
981
982 factory.addStep(ShellCommand(
983 name = "buildlist",
984 description = "Building list of files to upload",
985 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
986 haltOnFailure = True,
987 ))
988
989 factory.addStep(FileDownload(
990 name = "dlrsync.sh",
991 mastersrc = "rsync.sh",
992 slavedest = "../rsync.sh",
993 mode = 0755
994 ))
995
996 # upload new files and update existing ones
997 factory.addStep(ShellCommand(
998 name = "targetupload",
999 description = "Uploading target files",
1000 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1001 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1002 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1003 env={'RSYNC_PASSWORD': rsync_bin_key},
1004 haltOnFailure = True,
1005 logEnviron = False,
1006 ))
1007
1008 # delete files which don't exist locally
1009 factory.addStep(ShellCommand(
1010 name = "targetprune",
1011 description = "Pruning target files",
1012 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1013 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1014 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1015 env={'RSYNC_PASSWORD': rsync_bin_key},
1016 haltOnFailure = True,
1017 logEnviron = False,
1018 ))
1019
1020 if enable_kmod_archive:
1021 factory.addStep(ShellCommand(
1022 name = "kmodupload",
1023 description = "Uploading kmod archive",
1024 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1025 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1026 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1027 env={'RSYNC_PASSWORD': rsync_bin_key},
1028 haltOnFailure = True,
1029 logEnviron = False,
1030 ))
1031
1032 if rsync_src_url is not None:
1033 factory.addStep(ShellCommand(
1034 name = "sourcelist",
1035 description = "Finding source archives to upload",
1036 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1037 haltOnFailure = True
1038 ))
1039
1040 factory.addStep(ShellCommand(
1041 name = "sourceupload",
1042 description = "Uploading source archives",
1043 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1044 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1045 env={'RSYNC_PASSWORD': rsync_src_key},
1046 haltOnFailure = True,
1047 logEnviron = False,
1048 ))
1049
1050 if False:
1051 factory.addStep(ShellCommand(
1052 name = "packageupload",
1053 description = "Uploading package files",
1054 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1055 env={'RSYNC_PASSWORD': rsync_bin_key},
1056 haltOnFailure = False,
1057 logEnviron = False,
1058 ))
1059
1060 # logs
1061 if False:
1062 factory.addStep(ShellCommand(
1063 name = "upload",
1064 description = "Uploading logs",
1065 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1066 env={'RSYNC_PASSWORD': rsync_bin_key},
1067 haltOnFailure = False,
1068 alwaysRun = True,
1069 logEnviron = False,
1070 ))
1071
1072 factory.addStep(ShellCommand(
1073 name = "df",
1074 description = "Reporting disk usage",
1075 command=["df", "-h", "."],
1076 env={'LC_ALL': 'C'},
1077 haltOnFailure = False,
1078 alwaysRun = True
1079 ))
1080
1081 factory.addStep(ShellCommand(
1082 name = "ccachestat",
1083 description = "Reporting ccache stats",
1084 command=["ccache", "-s"],
1085 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1086 want_stderr = False,
1087 haltOnFailure = False,
1088 flunkOnFailure = False,
1089 warnOnFailure = False,
1090 alwaysRun = True,
1091 ))
1092
1093 from buildbot.config import BuilderConfig
1094
1095 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1096
1097
1098 ####### STATUS TARGETS
1099
1100 # 'status' is a list of Status Targets. The results of each build will be
1101 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1102 # including web pages, email senders, and IRC bots.
1103
1104 c['status'] = []
1105
1106 from buildbot.status import html
1107 from buildbot.status.web import authz, auth
1108
1109 if ini.has_option("status", "bind"):
1110 if ini.has_option("status", "user") and ini.has_option("status", "password"):
1111 authz_cfg=authz.Authz(
1112 # change any of these to True to enable; see the manual for more
1113 # options
1114 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
1115 gracefulShutdown = 'auth',
1116 forceBuild = 'auth', # use this to test your slave once it is set up
1117 forceAllBuilds = 'auth',
1118 pingBuilder = False,
1119 stopBuild = 'auth',
1120 stopAllBuilds = 'auth',
1121 cancelPendingBuild = 'auth',
1122 )
1123 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
1124 else:
1125 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
1126
1127
1128 from buildbot.status import words
1129
1130 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1131 irc_host = ini.get("irc", "host")
1132 irc_port = 6667
1133 irc_chan = ini.get("irc", "channel")
1134 irc_nick = ini.get("irc", "nickname")
1135 irc_pass = None
1136
1137 if ini.has_option("irc", "port"):
1138 irc_port = ini.getint("irc", "port")
1139
1140 if ini.has_option("irc", "password"):
1141 irc_pass = ini.get("irc", "password")
1142
1143 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1144 channels = [{ "channel": irc_chan }],
1145 notify_events = {
1146 'exception': 1,
1147 'successToFailure': 1,
1148 'failureToSuccess': 1
1149 }
1150 )
1151
1152 c['status'].append(irc)
1153
1154 ####### DB URL
1155
1156 c['db'] = {
1157 # This specifies what database buildbot uses to store its state. You can leave
1158 # this at its default for all but the largest installations.
1159 'db_url' : "sqlite:///state.sqlite",
1160 }