phase1: use sha2rsync.pl for 'targetupload'
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 # This is a sample buildmaster config file. It must be installed as
12 # 'master.cfg' in your buildmaster's base directory.
13
14 ini = ConfigParser.ConfigParser()
15 ini.read("./config.ini")
16
17 # This is the dictionary that the buildmaster pays attention to. We also use
18 # a shorter alias to save typing.
19 c = BuildmasterConfig = {}
20
21 ####### PROJECT IDENTITY
22
23 # the 'title' string will appear at the top of this buildbot
24 # installation's html.WebStatus home page (linked to the
25 # 'titleURL') and is embedded in the title of the waterfall HTML page.
26
27 c['title'] = ini.get("general", "title")
28 c['titleURL'] = ini.get("general", "title_url")
29
30 # the 'buildbotURL' string should point to the location where the buildbot's
31 # internal web server (usually the html.WebStatus page) is visible. This
32 # typically uses the port number set in the Waterfall 'status' entry, but
33 # with an externally-visible host name which the buildbot cannot figure out
34 # without some help.
35
36 c['buildbotURL'] = ini.get("general", "buildbot_url")
37
38 ####### BUILDSLAVES
39
40 # The 'slaves' list defines the set of recognized buildslaves. Each element is
41 # a BuildSlave object, specifying a unique slave name and password. The same
42 # slave name and password must be configured on the slave.
43 from buildbot.buildslave import BuildSlave
44
45 slave_port = 9989
46
47 if ini.has_option("general", "port"):
48 slave_port = ini.getint("general", "port")
49
50 c['slaves'] = []
51 NetLocks = dict()
52
53 for section in ini.sections():
54 if section.startswith("slave "):
55 if ini.has_option(section, "name") and ini.has_option(section, "password"):
56 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
57 name = ini.get(section, "name")
58 password = ini.get(section, "password")
59 max_builds = 1
60 if ini.has_option(section, "builds"):
61 max_builds = ini.getint(section, "builds")
62 sl_props['max_builds'] = max_builds
63 if ini.has_option(section, "cleanup"):
64 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
65 if ini.has_option(section, "dl_lock"):
66 lockname = ini.get(section, "dl_lock")
67 sl_props['dl_lock'] = lockname
68 if lockname not in NetLocks:
69 NetLocks[lockname] = locks.MasterLock(lockname)
70 if ini.has_option(section, "ul_lock"):
71 lockname = ini.get(section, "dl_lock")
72 sl_props['ul_lock'] = lockname
73 if lockname not in NetLocks:
74 NetLocks[lockname] = locks.MasterLock(lockname)
75 if ini.has_option(section, "shared_wd"):
76 shared_wd = ini.getboolean(section, "shared_wd")
77 sl_props['shared_wd'] = shared_wd
78 if shared_wd and (max_builds != 1):
79 raise ValueError('max_builds must be 1 with shared workdir!')
80 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
81
82 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
83 # This must match the value configured into the buildslaves (with their
84 # --master option)
85 c['slavePortnum'] = slave_port
86
87 # coalesce builds
88 c['mergeRequests'] = True
89
90 # Reduce amount of backlog data
91 c['buildHorizon'] = 30
92 c['logHorizon'] = 20
93
94 ####### CHANGESOURCES
95
96 home_dir = os.path.abspath(ini.get("general", "homedir"))
97 tree_expire = 0
98 other_builds = 0
99 cc_version = None
100
101 cc_command = "gcc"
102 cxx_command = "g++"
103
104 if ini.has_option("general", "expire"):
105 tree_expire = ini.getint("general", "expire")
106
107 if ini.has_option("general", "other_builds"):
108 other_builds = ini.getint("general", "other_builds")
109
110 if ini.has_option("general", "cc_version"):
111 cc_version = ini.get("general", "cc_version").split()
112 if len(cc_version) == 1:
113 cc_version = ["eq", cc_version[0]]
114
115 repo_url = ini.get("repo", "url")
116 repo_branch = "master"
117
118 if ini.has_option("repo", "branch"):
119 repo_branch = ini.get("repo", "branch")
120
121 rsync_bin_url = ini.get("rsync", "binary_url")
122 rsync_bin_key = ini.get("rsync", "binary_password")
123
124 rsync_src_url = None
125 rsync_src_key = None
126
127 if ini.has_option("rsync", "source_url"):
128 rsync_src_url = ini.get("rsync", "source_url")
129 rsync_src_key = ini.get("rsync", "source_password")
130
131 gpg_home = "~/.gnupg"
132 gpg_keyid = None
133 gpg_comment = "Unattended build signature"
134 gpg_passfile = "/dev/null"
135
136 if ini.has_option("gpg", "home"):
137 gpg_home = ini.get("gpg", "home")
138
139 if ini.has_option("gpg", "keyid"):
140 gpg_keyid = ini.get("gpg", "keyid")
141
142 if ini.has_option("gpg", "comment"):
143 gpg_comment = ini.get("gpg", "comment")
144
145 if ini.has_option("gpg", "passfile"):
146 gpg_passfile = ini.get("gpg", "passfile")
147
148 enable_kmod_archive = True
149
150
151 # find targets
152 targets = [ ]
153
154 if not os.path.isdir(home_dir+'/source.git'):
155 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, home_dir+'/source.git'])
156 else:
157 subprocess.call(["git", "pull"], cwd = home_dir+'/source.git')
158
159 findtargets = subprocess.Popen([home_dir+'/dumpinfo.pl', 'targets'],
160 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
161
162 while True:
163 line = findtargets.stdout.readline()
164 if not line:
165 break
166 ta = line.strip().split(' ')
167 targets.append(ta[0])
168
169
170 # the 'change_source' setting tells the buildmaster how it should find out
171 # about source code changes. Here we point to the buildbot clone of pyflakes.
172
173 from buildbot.changes.gitpoller import GitPoller
174 c['change_source'] = []
175 c['change_source'].append(GitPoller(
176 repo_url,
177 workdir=home_dir+'/work.git', branch=repo_branch,
178 pollinterval=300))
179
180 ####### SCHEDULERS
181
182 # Configure the Schedulers, which decide how to react to incoming changes. In this
183 # case, just kick off a 'basebuild' build
184
185 from buildbot.schedulers.basic import SingleBranchScheduler
186 from buildbot.schedulers.forcesched import ForceScheduler
187 from buildbot.changes import filter
188 c['schedulers'] = []
189 c['schedulers'].append(SingleBranchScheduler(
190 name="all",
191 change_filter=filter.ChangeFilter(branch=repo_branch),
192 treeStableTimer=60,
193 builderNames=targets))
194
195 c['schedulers'].append(ForceScheduler(
196 name="force",
197 builderNames=targets))
198
199 ####### BUILDERS
200
201 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
202 # what steps, and which slaves can execute them. Note that any particular build will
203 # only take place on one slave.
204
205 from buildbot.process.factory import BuildFactory
206 from buildbot.steps.source.git import Git
207 from buildbot.steps.shell import ShellCommand
208 from buildbot.steps.shell import SetPropertyFromCommand
209 from buildbot.steps.transfer import FileUpload
210 from buildbot.steps.transfer import FileDownload
211 from buildbot.steps.master import MasterShellCommand
212 from buildbot.process.properties import Interpolate
213 from buildbot.process import properties
214
215
216 CleanTargetMap = [
217 [ "tools", "tools/clean" ],
218 [ "chain", "toolchain/clean" ],
219 [ "linux", "target/linux/clean" ],
220 [ "dir", "dirclean" ],
221 [ "dist", "distclean" ]
222 ]
223
224 def IsMakeCleanRequested(pattern):
225 def CheckCleanProperty(step):
226 val = step.getProperty("clean")
227 if val and re.match(pattern, val):
228 return True
229 else:
230 return False
231
232 return CheckCleanProperty
233
234 def IsCleanupRequested(step):
235 shared_wd = step.getProperty("shared_wd")
236 if shared_wd:
237 return False
238 do_cleanup = step.getProperty("do_cleanup")
239 if do_cleanup:
240 return True
241 else:
242 return False
243
244 def IsExpireRequested(step):
245 shared_wd = step.getProperty("shared_wd")
246 if shared_wd:
247 return False
248 else:
249 return not IsCleanupRequested(step)
250
251 def IsGitFreshRequested(step):
252 do_cleanup = step.getProperty("do_cleanup")
253 if do_cleanup:
254 return True
255 else:
256 return False
257
258 def IsGitCleanRequested(step):
259 return not IsGitFreshRequested(step)
260
261 def IsTaggingRequested(step):
262 val = step.getProperty("tag")
263 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
264 return True
265 else:
266 return False
267
268 def IsNoTaggingRequested(step):
269 return not IsTaggingRequested(step)
270
271 def IsNoMasterBuild(step):
272 return repo_branch != "master"
273
274 def GetBaseVersion():
275 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
276 return repo_branch.split('-')[1]
277 else:
278 return "master"
279
280 @properties.renderer
281 def GetVersionPrefix(props):
282 basever = GetBaseVersion()
283 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
284 return "%s/" % props["tag"]
285 elif basever != "master":
286 return "%s-SNAPSHOT/" % basever
287 else:
288 return ""
289
290 @properties.renderer
291 def GetNumJobs(props):
292 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
293 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
294 else:
295 return "1"
296
297 @properties.renderer
298 def GetCC(props):
299 if props.hasProperty("cc_command"):
300 return props["cc_command"]
301 else:
302 return "gcc"
303
304 @properties.renderer
305 def GetCXX(props):
306 if props.hasProperty("cxx_command"):
307 return props["cxx_command"]
308 else:
309 return "g++"
310
311 @properties.renderer
312 def GetCwd(props):
313 if props.hasProperty("builddir"):
314 return props["builddir"]
315 elif props.hasProperty("workdir"):
316 return props["workdir"]
317 else:
318 return "/"
319
320 @properties.renderer
321 def GetCCache(props):
322 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
323 return props["ccache_command"] + " "
324 else:
325 return ""
326
327 def GetNextBuild(builder, requests):
328 for r in requests:
329 if r.properties and r.properties.hasProperty("tag"):
330 return r
331 return requests[0]
332
333 def MakeEnv(overrides=None, tryccache=False):
334 if tryccache:
335 envcc = Interpolate("%(kw:ccache)s%(kw:cc)s", ccache=GetCCache, cc=GetCC)
336 envcxx = Interpolate("%(kw:ccache)s%(kw:cxx)s", ccache=GetCCache, cxx=GetCXX)
337 else:
338 envcc = Interpolate("%(kw:cc)s", cc=GetCC)
339 envcxx = Interpolate("%(kw:cxx)s", cxx=GetCXX)
340 env = {
341 'CC': envcc,
342 'CXX': envcxx,
343 }
344 if overrides is not None:
345 env.update(overrides)
346 return env
347
348 @properties.renderer
349 def NetLockDl(props):
350 lock = None
351 if props.hasProperty("dl_lock"):
352 lock = NetLocks[props["dl_lock"]]
353 if lock is not None:
354 return [lock.access('exclusive')]
355 else:
356 return []
357
358 @properties.renderer
359 def NetLockUl(props):
360 lock = None
361 if props.hasProperty("ul_lock"):
362 lock = NetLocks[props["ul_lock"]]
363 if lock is not None:
364 return [lock.access('exclusive')]
365 else:
366 return []
367
368 c['builders'] = []
369
370 dlLock = locks.SlaveLock("slave_dl")
371
372 checkBuiltin = re.sub('[\t\n ]+', ' ', """
373 checkBuiltin() {
374 local symbol op path file;
375 for file in $CHANGED_FILES; do
376 case "$file" in
377 package/*/*) : ;;
378 *) return 0 ;;
379 esac;
380 done;
381 while read symbol op path; do
382 case "$symbol" in package-*)
383 symbol="${symbol##*(}";
384 symbol="${symbol%)}";
385 for file in $CHANGED_FILES; do
386 case "$file" in "package/$path/"*)
387 grep -qsx "$symbol=y" .config && return 0
388 ;; esac;
389 done;
390 esac;
391 done < tmp/.packagedeps;
392 return 1;
393 }
394 """).strip()
395
396
397 class IfBuiltinShellCommand(ShellCommand):
398 def _quote(self, str):
399 if re.search("[^a-zA-Z0-9/_.-]", str):
400 return "'%s'" %(re.sub("'", "'\"'\"'", str))
401 return str
402
403 def setCommand(self, command):
404 if not isinstance(command, (str, unicode)):
405 command = ' '.join(map(self._quote, command))
406 self.command = [
407 '/bin/sh', '-c',
408 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
409 ]
410
411 def setupEnvironment(self, cmd):
412 slaveEnv = self.slaveEnvironment
413 if slaveEnv is None:
414 slaveEnv = { }
415 changedFiles = { }
416 for request in self.build.requests:
417 for source in request.sources:
418 for change in source.changes:
419 for file in change.files:
420 changedFiles[file] = True
421 fullSlaveEnv = slaveEnv.copy()
422 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
423 cmd.args['env'] = fullSlaveEnv
424
425 slaveNames = [ ]
426
427 for slave in c['slaves']:
428 slaveNames.append(slave.slavename)
429
430 for target in targets:
431 ts = target.split('/')
432
433 factory = BuildFactory()
434
435 # find number of cores
436 factory.addStep(SetPropertyFromCommand(
437 name = "nproc",
438 property = "nproc",
439 description = "Finding number of CPUs",
440 command = ["nproc"]))
441
442 # find gcc and g++ compilers
443 if cc_version is not None:
444 factory.addStep(FileDownload(
445 name = "dlfindbinpl",
446 mastersrc = "findbin.pl",
447 slavedest = "../findbin.pl",
448 mode = 0755))
449
450 factory.addStep(SetPropertyFromCommand(
451 name = "gcc",
452 property = "cc_command",
453 description = "Finding gcc command",
454 command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
455 haltOnFailure = True))
456
457 factory.addStep(SetPropertyFromCommand(
458 name = "g++",
459 property = "cxx_command",
460 description = "Finding g++ command",
461 command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
462 haltOnFailure = True))
463
464 # see if ccache is available
465 factory.addStep(SetPropertyFromCommand(
466 property = "ccache_command",
467 command = ["which", "ccache"],
468 description = "Testing for ccache command",
469 haltOnFailure = False,
470 flunkOnFailure = False,
471 warnOnFailure = False,
472 ))
473
474 # expire tree if needed
475 if tree_expire > 0:
476 factory.addStep(FileDownload(
477 name = "dlexpiresh",
478 doStepIf = IsExpireRequested,
479 mastersrc = "expire.sh",
480 slavedest = "../expire.sh",
481 mode = 0755))
482
483 factory.addStep(ShellCommand(
484 name = "expire",
485 description = "Checking for build tree expiry",
486 command = ["./expire.sh", str(tree_expire)],
487 workdir = ".",
488 haltOnFailure = True,
489 doStepIf = IsExpireRequested,
490 timeout = 2400))
491
492 # cleanup.sh if needed
493 factory.addStep(FileDownload(
494 name = "dlcleanupsh",
495 mastersrc = "cleanup.sh",
496 slavedest = "../cleanup.sh",
497 mode = 0755,
498 doStepIf = IsCleanupRequested))
499
500 factory.addStep(ShellCommand(
501 name = "cleanold",
502 description = "Cleaning previous builds",
503 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
504 workdir = ".",
505 haltOnFailure = True,
506 doStepIf = IsCleanupRequested,
507 timeout = 2400))
508
509 factory.addStep(ShellCommand(
510 name = "cleanup",
511 description = "Cleaning work area",
512 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
513 workdir = ".",
514 haltOnFailure = True,
515 doStepIf = IsCleanupRequested,
516 timeout = 2400))
517
518 # user-requested clean targets
519 for tuple in CleanTargetMap:
520 factory.addStep(ShellCommand(
521 name = tuple[1],
522 description = 'User-requested "make %s"' % tuple[1],
523 command = ["make", tuple[1], "V=s"],
524 env = MakeEnv(),
525 doStepIf = IsMakeCleanRequested(tuple[0])
526 ))
527
528 # check out the source
529 # Git() runs:
530 # if repo doesn't exist: 'git clone repourl'
531 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
532 # 'git fetch -t repourl branch; git reset --hard revision'
533 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
534 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
535 factory.addStep(Git(
536 name = "gitclean",
537 repourl = repo_url,
538 branch = repo_branch,
539 mode = 'full',
540 method = 'clean',
541 haltOnFailure = True,
542 doStepIf = IsGitCleanRequested,
543 ))
544
545 factory.addStep(Git(
546 name = "gitfresh",
547 repourl = repo_url,
548 branch = repo_branch,
549 mode = 'full',
550 method = 'fresh',
551 haltOnFailure = True,
552 doStepIf = IsGitFreshRequested,
553 ))
554
555 # update remote refs
556 factory.addStep(ShellCommand(
557 name = "fetchrefs",
558 description = "Fetching Git remote refs",
559 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
560 haltOnFailure = True
561 ))
562
563 # switch to tag
564 factory.addStep(ShellCommand(
565 name = "switchtag",
566 description = "Checking out Git tag",
567 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
568 haltOnFailure = True,
569 doStepIf = IsTaggingRequested
570 ))
571
572 factory.addStep(ShellCommand(
573 name = "rmtmp",
574 description = "Remove tmp folder",
575 command=["rm", "-rf", "tmp/"]))
576
577 # feed
578 # factory.addStep(ShellCommand(
579 # name = "feedsconf",
580 # description = "Copy the feeds.conf",
581 # command='''cp ~/feeds.conf ./feeds.conf''' ))
582
583 # feed
584 factory.addStep(ShellCommand(
585 name = "rmfeedlinks",
586 description = "Remove feed symlinks",
587 command=["rm", "-rf", "package/feeds/"]))
588
589 # feed
590 factory.addStep(ShellCommand(
591 name = "updatefeeds",
592 description = "Updating feeds",
593 command=["./scripts/feeds", "update"],
594 env = MakeEnv(),
595 ))
596
597 # feed
598 factory.addStep(ShellCommand(
599 name = "installfeeds",
600 description = "Installing feeds",
601 command=["./scripts/feeds", "install", "-a"],
602 env = MakeEnv()))
603
604 # seed config
605 factory.addStep(FileDownload(
606 name = "dlconfigseed",
607 mastersrc = "config.seed",
608 slavedest = ".config",
609 mode = 0644
610 ))
611
612 # configure
613 factory.addStep(ShellCommand(
614 name = "newconfig",
615 description = "Seeding .config",
616 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\n' >> .config" %(ts[0], ts[0], ts[1])
617 ))
618
619 factory.addStep(ShellCommand(
620 name = "delbin",
621 description = "Removing output directory",
622 command = ["rm", "-rf", "bin/"]
623 ))
624
625 factory.addStep(ShellCommand(
626 name = "defconfig",
627 description = "Populating .config",
628 command = ["make", "defconfig"],
629 env = MakeEnv()
630 ))
631
632 # check arch
633 factory.addStep(ShellCommand(
634 name = "checkarch",
635 description = "Checking architecture",
636 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
637 logEnviron = False,
638 want_stdout = False,
639 want_stderr = False,
640 haltOnFailure = True
641 ))
642
643 # find libc suffix
644 factory.addStep(SetPropertyFromCommand(
645 name = "libc",
646 property = "libc",
647 description = "Finding libc suffix",
648 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
649
650 # install build key
651 factory.addStep(FileDownload(name="dlkeybuild", mastersrc=home_dir+'/key-build', slavedest="key-build", mode=0600))
652 factory.addStep(FileDownload(name="dlkeybuildpub", mastersrc=home_dir+'/key-build.pub', slavedest="key-build.pub", mode=0600))
653
654 # prepare dl
655 factory.addStep(ShellCommand(
656 name = "dldir",
657 description = "Preparing dl/",
658 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
659 logEnviron = False,
660 want_stdout = False
661 ))
662
663 # prepare tar
664 factory.addStep(ShellCommand(
665 name = "dltar",
666 description = "Building and installing GNU tar",
667 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
668 env = MakeEnv(tryccache=True),
669 haltOnFailure = True
670 ))
671
672 # populate dl
673 factory.addStep(ShellCommand(
674 name = "dlrun",
675 description = "Populating dl/",
676 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
677 env = MakeEnv(),
678 logEnviron = False,
679 locks = [dlLock.access('exclusive')],
680 ))
681
682 factory.addStep(ShellCommand(
683 name = "cleanbase",
684 description = "Cleaning base-files",
685 command=["make", "package/base-files/clean", "V=s"]
686 ))
687
688 # build
689 factory.addStep(ShellCommand(
690 name = "tools",
691 description = "Building and installing tools",
692 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
693 env = MakeEnv(tryccache=True),
694 haltOnFailure = True
695 ))
696
697 factory.addStep(ShellCommand(
698 name = "toolchain",
699 description = "Building and installing toolchain",
700 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
701 env = MakeEnv(),
702 haltOnFailure = True
703 ))
704
705 factory.addStep(ShellCommand(
706 name = "kmods",
707 description = "Building kmods",
708 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
709 env = MakeEnv(),
710 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
711 haltOnFailure = True
712 ))
713
714 # find kernel version
715 factory.addStep(SetPropertyFromCommand(
716 name = "kernelversion",
717 property = "kernelversion",
718 description = "Finding the effective Kernel version",
719 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
720 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
721 ))
722
723 factory.addStep(ShellCommand(
724 name = "pkgclean",
725 description = "Cleaning up package build",
726 command=["make", "package/cleanup", "V=s"]
727 ))
728
729 factory.addStep(ShellCommand(
730 name = "pkgbuild",
731 description = "Building packages",
732 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
733 env = MakeEnv(),
734 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
735 haltOnFailure = True
736 ))
737
738 # factory.addStep(IfBuiltinShellCommand(
739 factory.addStep(ShellCommand(
740 name = "pkginstall",
741 description = "Installing packages",
742 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
743 env = MakeEnv(),
744 haltOnFailure = True
745 ))
746
747 factory.addStep(ShellCommand(
748 name = "pkgindex",
749 description = "Indexing packages",
750 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s"],
751 env = MakeEnv(),
752 haltOnFailure = True
753 ))
754
755 if enable_kmod_archive:
756 # embed kmod repository. Must happen before 'images'
757
758 # find rootfs staging directory
759 factory.addStep(SetPropertyFromCommand(
760 name = "stageroot",
761 property = "stageroot",
762 description = "Finding the rootfs staging directory",
763 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
764 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
765 ))
766
767 factory.addStep(ShellCommand(
768 name = "filesdir",
769 description = "Creating file overlay directory",
770 command=["mkdir", "-p", "files/etc/opkg"],
771 haltOnFailure = True
772 ))
773
774 factory.addStep(ShellCommand(
775 name = "kmodconfig",
776 description = "Embedding kmod repository configuration",
777 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
778 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
779 haltOnFailure = True
780 ))
781
782 #factory.addStep(IfBuiltinShellCommand(
783 factory.addStep(ShellCommand(
784 name = "images",
785 description = "Building and installing images",
786 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
787 env = MakeEnv(),
788 haltOnFailure = True
789 ))
790
791 factory.addStep(ShellCommand(
792 name = "diffconfig",
793 description = "Generating config.seed",
794 command=["make", "-j1", "diffconfig", "V=s"],
795 env = MakeEnv(),
796 haltOnFailure = True
797 ))
798
799 factory.addStep(ShellCommand(
800 name = "checksums",
801 description = "Calculating checksums",
802 command=["make", "-j1", "checksum", "V=s"],
803 env = MakeEnv(),
804 haltOnFailure = True
805 ))
806
807 if enable_kmod_archive:
808 factory.addStep(ShellCommand(
809 name = "kmoddir",
810 description = "Creating kmod directory",
811 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
812 haltOnFailure = True
813 ))
814
815 factory.addStep(ShellCommand(
816 name = "kmodprepare",
817 description = "Preparing kmod archive",
818 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
819 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
820 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
821 haltOnFailure = True
822 ))
823
824 factory.addStep(ShellCommand(
825 name = "kmodindex",
826 description = "Indexing kmod archive",
827 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s",
828 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
829 env = MakeEnv(),
830 haltOnFailure = True
831 ))
832
833 # sign
834 if gpg_keyid is not None:
835 factory.addStep(MasterShellCommand(
836 name = "signprepare",
837 description = "Preparing temporary signing directory",
838 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
839 haltOnFailure = True
840 ))
841
842 factory.addStep(ShellCommand(
843 name = "signpack",
844 description = "Packing files to sign",
845 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
846 haltOnFailure = True
847 ))
848
849 factory.addStep(FileUpload(
850 slavesrc = "sign.tar.gz",
851 masterdest = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
852 haltOnFailure = True
853 ))
854
855 factory.addStep(MasterShellCommand(
856 name = "signfiles",
857 description = "Signing files",
858 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]), gpg_keyid, gpg_comment],
859 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
860 haltOnFailure = True
861 ))
862
863 factory.addStep(FileDownload(
864 name = "dlsigntargz",
865 mastersrc = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
866 slavedest = "sign.tar.gz",
867 haltOnFailure = True
868 ))
869
870 factory.addStep(ShellCommand(
871 name = "signunpack",
872 description = "Unpacking signed files",
873 command = ["tar", "-xzf", "sign.tar.gz"],
874 haltOnFailure = True
875 ))
876
877 # upload
878 factory.addStep(ShellCommand(
879 name = "dirprepare",
880 description = "Preparing upload directory structure",
881 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
882 haltOnFailure = True
883 ))
884
885 factory.addStep(ShellCommand(
886 name = "linkprepare",
887 description = "Preparing repository symlink",
888 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
889 doStepIf = IsNoMasterBuild,
890 haltOnFailure = True
891 ))
892
893 if enable_kmod_archive:
894 factory.addStep(ShellCommand(
895 name = "kmoddirprepare",
896 description = "Preparing kmod archive upload directory",
897 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
898 haltOnFailure = True
899 ))
900
901 factory.addStep(ShellCommand(
902 name = "dirupload",
903 description = "Uploading directory structure",
904 command = ["rsync", "-4", "-az", "tmp/upload/", "%s/" %(rsync_bin_url)],
905 env={'RSYNC_PASSWORD': rsync_bin_key},
906 haltOnFailure = True,
907 logEnviron = False,
908 ))
909
910 # download remote sha256sums to 'target-sha256sums'
911 factory.addStep(ShellCommand(
912 name = "target-sha256sums",
913 description = "Fetching remote sha256sums for target",
914 command = ["rsync", "-4", "-vz", Interpolate("%(kw:rsyncbinurl)s/targets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1]), "target-sha256sums"],
915 env={'RSYNC_PASSWORD': rsync_bin_key},
916 logEnviron = False,
917 haltOnFailure = False,
918 flunkOnFailure = False,
919 warnOnFailure = False,
920 ))
921
922 # build list of files to upload
923 factory.addStep(FileDownload(
924 name = "dlsha2rsyncpl",
925 mastersrc = "sha2rsync.pl",
926 slavedest = "../sha2rsync.pl",
927 mode = 0755,
928 ))
929
930 factory.addStep(ShellCommand(
931 name = "buildlist",
932 description = "Building list of files to upload",
933 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
934 haltOnFailure = True,
935 ))
936
937 factory.addStep(FileDownload(
938 name = "dlrsync.sh",
939 mastersrc = "rsync.sh",
940 slavedest = "../rsync.sh",
941 mode = 0755
942 ))
943
944 # upload new files and update existing ones
945 factory.addStep(ShellCommand(
946 name = "targetupload",
947 description = "Uploading target files",
948 command=["../rsync.sh", "-4", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]),
949 "-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
950 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
951 env={'RSYNC_PASSWORD': rsync_bin_key},
952 haltOnFailure = True,
953 logEnviron = False,
954 ))
955
956 # delete files which don't exist locally
957 factory.addStep(ShellCommand(
958 name = "targetprune",
959 description = "Pruning target files",
960 command=["../rsync.sh", "-4", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]),
961 "-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
962 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
963 env={'RSYNC_PASSWORD': rsync_bin_key},
964 haltOnFailure = True,
965 logEnviron = False,
966 ))
967
968 if enable_kmod_archive:
969 factory.addStep(ShellCommand(
970 name = "kmodupload",
971 description = "Uploading kmod archive",
972 command=["../rsync.sh", "-4", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]),
973 "-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
974 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
975 env={'RSYNC_PASSWORD': rsync_bin_key},
976 haltOnFailure = True,
977 logEnviron = False,
978 ))
979
980 if rsync_src_url is not None:
981 factory.addStep(ShellCommand(
982 name = "sourceupload",
983 description = "Uploading source archives",
984 command=["../rsync.sh", "-4", "--size-only", "--delay-updates",
985 Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
986 env={'RSYNC_PASSWORD': rsync_src_key},
987 haltOnFailure = True,
988 logEnviron = False,
989 ))
990
991 if False:
992 factory.addStep(ShellCommand(
993 name = "packageupload",
994 description = "Uploading package files",
995 command=["../rsync.sh", "-4", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a", "bin/packages/", "%s/packages/" %(rsync_bin_url)],
996 env={'RSYNC_PASSWORD': rsync_bin_key},
997 haltOnFailure = False,
998 logEnviron = False,
999 ))
1000
1001 # logs
1002 if False:
1003 factory.addStep(ShellCommand(
1004 name = "upload",
1005 description = "Uploading logs",
1006 command=["../rsync.sh", "-4", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az", "logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1007 env={'RSYNC_PASSWORD': rsync_bin_key},
1008 haltOnFailure = False,
1009 alwaysRun = True,
1010 logEnviron = False,
1011 ))
1012
1013 factory.addStep(ShellCommand(
1014 name = "df",
1015 description = "Reporting disk usage",
1016 command=["df", "-h", "."],
1017 env={'LC_ALL': 'C'},
1018 haltOnFailure = False,
1019 alwaysRun = True
1020 ))
1021
1022 factory.addStep(ShellCommand(
1023 name = "ccachestat",
1024 description = "Reporting ccache stats",
1025 command=["ccache", "-s"],
1026 env = MakeEnv(overrides={ 'PATH': ["./staging_dir/host/bin", "${PATH}"] }),
1027 want_stderr = False,
1028 haltOnFailure = False,
1029 flunkOnFailure = False,
1030 warnOnFailure = False,
1031 alwaysRun = True,
1032 ))
1033
1034 from buildbot.config import BuilderConfig
1035
1036 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1037
1038
1039 ####### STATUS TARGETS
1040
1041 # 'status' is a list of Status Targets. The results of each build will be
1042 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1043 # including web pages, email senders, and IRC bots.
1044
1045 c['status'] = []
1046
1047 from buildbot.status import html
1048 from buildbot.status.web import authz, auth
1049
1050 if ini.has_option("status", "bind"):
1051 if ini.has_option("status", "user") and ini.has_option("status", "password"):
1052 authz_cfg=authz.Authz(
1053 # change any of these to True to enable; see the manual for more
1054 # options
1055 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
1056 gracefulShutdown = 'auth',
1057 forceBuild = 'auth', # use this to test your slave once it is set up
1058 forceAllBuilds = 'auth',
1059 pingBuilder = False,
1060 stopBuild = 'auth',
1061 stopAllBuilds = 'auth',
1062 cancelPendingBuild = 'auth',
1063 )
1064 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
1065 else:
1066 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
1067
1068
1069 from buildbot.status import words
1070
1071 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1072 irc_host = ini.get("irc", "host")
1073 irc_port = 6667
1074 irc_chan = ini.get("irc", "channel")
1075 irc_nick = ini.get("irc", "nickname")
1076 irc_pass = None
1077
1078 if ini.has_option("irc", "port"):
1079 irc_port = ini.getint("irc", "port")
1080
1081 if ini.has_option("irc", "password"):
1082 irc_pass = ini.get("irc", "password")
1083
1084 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1085 channels = [{ "channel": irc_chan }],
1086 notify_events = {
1087 'exception': 1,
1088 'successToFailure': 1,
1089 'failureToSuccess': 1
1090 }
1091 )
1092
1093 c['status'].append(irc)
1094
1095 ####### DB URL
1096
1097 c['db'] = {
1098 # This specifies what database buildbot uses to store its state. You can leave
1099 # this at its default for all but the largest installations.
1100 'db_url' : "sqlite:///state.sqlite",
1101 }