phase1: add JSON merge step
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from buildbot import locks
11 from buildbot.changes import filter
12 from buildbot.changes.gitpoller import GitPoller
13 from buildbot.config import BuilderConfig
14 from buildbot.plugins import reporters
15 from buildbot.plugins import schedulers
16 from buildbot.plugins import steps
17 from buildbot.plugins import util
18 from buildbot.process import properties
19 from buildbot.process.factory import BuildFactory
20 from buildbot.process.properties import Interpolate
21 from buildbot.process.properties import Property
22 from buildbot.schedulers.basic import SingleBranchScheduler
23 from buildbot.schedulers.forcesched import BaseParameter
24 from buildbot.schedulers.forcesched import ForceScheduler
25 from buildbot.schedulers.forcesched import ValidationError
26 from buildbot.steps.master import MasterShellCommand
27 from buildbot.steps.shell import SetPropertyFromCommand
28 from buildbot.steps.shell import ShellCommand
29 from buildbot.steps.source.git import Git
30 from buildbot.steps.transfer import FileDownload
31 from buildbot.steps.transfer import FileUpload
32 from buildbot.steps.transfer import StringDownload
33 from buildbot.worker import Worker
34
35
36 # This is a sample buildmaster config file. It must be installed as
37 # 'master.cfg' in your buildmaster's base directory.
38
39 ini = configparser.ConfigParser()
40 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
41
42 # This is the dictionary that the buildmaster pays attention to. We also use
43 # a shorter alias to save typing.
44 c = BuildmasterConfig = {}
45
46 ####### PROJECT IDENTITY
47
48 # the 'title' string will appear at the top of this buildbot
49 # installation's html.WebStatus home page (linked to the
50 # 'titleURL') and is embedded in the title of the waterfall HTML page.
51
52 c['title'] = ini.get("general", "title")
53 c['titleURL'] = ini.get("general", "title_url")
54
55 # the 'buildbotURL' string should point to the location where the buildbot's
56 # internal web server (usually the html.WebStatus page) is visible. This
57 # typically uses the port number set in the Waterfall 'status' entry, but
58 # with an externally-visible host name which the buildbot cannot figure out
59 # without some help.
60
61 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
62
63 ####### BUILDSLAVES
64
65 # The 'workers' list defines the set of recognized buildslaves. Each element is
66 # a Worker object, specifying a unique slave name and password. The same
67 # slave name and password must be configured on the slave.
68
69 slave_port = 9989
70
71 if ini.has_option("phase1", "port"):
72 slave_port = ini.get("phase1", "port")
73
74 c['workers'] = []
75 NetLocks = dict()
76
77 for section in ini.sections():
78 if section.startswith("slave "):
79 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
80 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
81 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
82 name = ini.get(section, "name")
83 password = ini.get(section, "password")
84 max_builds = 1
85 if ini.has_option(section, "builds"):
86 max_builds = ini.getint(section, "builds")
87 sl_props['max_builds'] = max_builds
88 if max_builds == 1:
89 sl_props['shared_wd'] = True
90 if ini.has_option(section, "cleanup"):
91 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
92 if ini.has_option(section, "dl_lock"):
93 lockname = ini.get(section, "dl_lock")
94 sl_props['dl_lock'] = lockname
95 if lockname not in NetLocks:
96 NetLocks[lockname] = locks.MasterLock(lockname)
97 if ini.has_option(section, "ul_lock"):
98 lockname = ini.get(section, "dl_lock")
99 sl_props['ul_lock'] = lockname
100 if lockname not in NetLocks:
101 NetLocks[lockname] = locks.MasterLock(lockname)
102 if ini.has_option(section, "shared_wd"):
103 shared_wd = ini.getboolean(section, "shared_wd")
104 sl_props['shared_wd'] = shared_wd
105 if shared_wd and (max_builds != 1):
106 raise ValueError('max_builds must be 1 with shared workdir!')
107 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
108
109 # 'slavePortnum' defines the TCP port to listen on for connections from workers.
110 # This must match the value configured into the buildslaves (with their
111 # --master option)
112 c['protocols'] = {'pb': {'port': slave_port}}
113
114 # coalesce builds
115 c['collapseRequests'] = True
116
117 # Reduce amount of backlog data
118 c['buildHorizon'] = 30
119 c['logHorizon'] = 20
120
121 ####### CHANGESOURCES
122
123 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
124 scripts_dir = os.path.abspath("../scripts")
125 tree_expire = 0
126 other_builds = 0
127 cc_version = None
128
129 cc_command = "gcc"
130 cxx_command = "g++"
131
132 config_seed = ""
133
134 git_ssh = False
135 git_ssh_key = None
136
137 if ini.has_option("phase1", "expire"):
138 tree_expire = ini.getint("phase1", "expire")
139
140 if ini.has_option("phase1", "other_builds"):
141 other_builds = ini.getint("phase1", "other_builds")
142
143 if ini.has_option("phase1", "cc_version"):
144 cc_version = ini.get("phase1", "cc_version").split()
145 if len(cc_version) == 1:
146 cc_version = ["eq", cc_version[0]]
147
148 if ini.has_option("general", "git_ssh"):
149 git_ssh = ini.getboolean("general", "git_ssh")
150
151 if ini.has_option("general", "git_ssh_key"):
152 git_ssh_key = ini.get("general", "git_ssh_key")
153 else:
154 git_ssh = False
155
156 if ini.has_option("phase1", "config_seed"):
157 config_seed = ini.get("phase1", "config_seed")
158
159 repo_url = ini.get("repo", "url")
160 repo_branch = "master"
161
162 if ini.has_option("repo", "branch"):
163 repo_branch = ini.get("repo", "branch")
164
165 rsync_bin_url = ini.get("rsync", "binary_url")
166 rsync_bin_key = ini.get("rsync", "binary_password")
167 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
168
169 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
170 rsync_bin_defopts += ["--contimeout=20"]
171
172 rsync_src_url = None
173 rsync_src_key = None
174 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
175
176 if ini.has_option("rsync", "source_url"):
177 rsync_src_url = ini.get("rsync", "source_url")
178 rsync_src_key = ini.get("rsync", "source_password")
179
180 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
181 rsync_src_defopts += ["--contimeout=20"]
182
183 usign_key = None
184 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
185
186 if ini.has_option("usign", "key"):
187 usign_key = ini.get("usign", "key")
188
189 if ini.has_option("usign", "comment"):
190 usign_comment = ini.get("usign", "comment")
191
192 enable_kmod_archive = True
193
194
195 # find targets
196 targets = [ ]
197
198 if not os.path.isdir(work_dir+'/source.git'):
199 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
200 else:
201 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
202
203 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
204 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
205
206 while True:
207 line = findtargets.stdout.readline()
208 if not line:
209 break
210 ta = line.decode().strip().split(' ')
211 targets.append(ta[0])
212
213
214 # the 'change_source' setting tells the buildmaster how it should find out
215 # about source code changes. Here we point to the buildbot clone of pyflakes.
216
217 c['change_source'] = []
218 c['change_source'].append(GitPoller(
219 repo_url,
220 workdir=work_dir+'/work.git', branch=repo_branch,
221 pollinterval=300))
222
223 ####### SCHEDULERS
224
225 # Configure the Schedulers, which decide how to react to incoming changes. In this
226 # case, just kick off a 'basebuild' build
227
228 class TagChoiceParameter(BaseParameter):
229 spec_attributes = ["strict", "choices"]
230 type = "list"
231 strict = True
232
233 def __init__(self, name, label=None, **kw):
234 super().__init__(name, label, **kw)
235 self._choice_list = []
236
237 @property
238 def choices(self):
239 taglist = []
240 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
241
242 if basever:
243 findtags = subprocess.Popen(
244 ['git', 'ls-remote', '--tags', repo_url],
245 stdout = subprocess.PIPE)
246
247 while True:
248 line = findtags.stdout.readline()
249
250 if not line:
251 break
252
253 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
254
255 if tagver and tagver[1].find(basever[1]) == 0:
256 taglist.append(tagver[1])
257
258 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
259 taglist.insert(0, '')
260
261 self._choice_list = taglist
262
263 return self._choice_list
264
265 def parse_from_arg(self, s):
266 if self.strict and s not in self._choice_list:
267 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
268 return s
269
270 c['schedulers'] = []
271 c['schedulers'].append(SingleBranchScheduler(
272 name = "all",
273 change_filter = filter.ChangeFilter(branch=repo_branch),
274 treeStableTimer = 60,
275 builderNames = targets))
276
277 c['schedulers'].append(ForceScheduler(
278 name = "force",
279 buttonName = "Force builds",
280 label = "Force build details",
281 builderNames = [ "00_force_build" ],
282
283 codebases = [
284 util.CodebaseParameter(
285 "",
286 label = "Repository",
287 branch = util.FixedParameter(name = "branch", default = ""),
288 revision = util.FixedParameter(name = "revision", default = ""),
289 repository = util.FixedParameter(name = "repository", default = ""),
290 project = util.FixedParameter(name = "project", default = "")
291 )
292 ],
293
294 reason = util.StringParameter(
295 name = "reason",
296 label = "Reason",
297 default = "Trigger build",
298 required = True,
299 size = 80
300 ),
301
302 properties = [
303 util.NestedParameter(
304 name="options",
305 label="Build Options",
306 layout="vertical",
307 fields=[
308 util.ChoiceStringParameter(
309 name = "target",
310 label = "Build target",
311 default = "all",
312 choices = [ "all" ] + targets
313 ),
314 TagChoiceParameter(
315 name = "tag",
316 label = "Build tag",
317 default = ""
318 )
319 ]
320 )
321 ]
322 ))
323
324 ####### BUILDERS
325
326 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
327 # what steps, and which workers can execute them. Note that any particular build will
328 # only take place on one slave.
329
330 CleanTargetMap = [
331 [ "tools", "tools/clean" ],
332 [ "chain", "toolchain/clean" ],
333 [ "linux", "target/linux/clean" ],
334 [ "dir", "dirclean" ],
335 [ "dist", "distclean" ]
336 ]
337
338 def IsMakeCleanRequested(pattern):
339 def CheckCleanProperty(step):
340 val = step.getProperty("clean")
341 if val and re.match(pattern, val):
342 return True
343 else:
344 return False
345
346 return CheckCleanProperty
347
348 def IsSharedWorkdir(step):
349 return bool(step.getProperty("shared_wd"))
350
351 def IsCleanupRequested(step):
352 if IsSharedWorkdir(step):
353 return False
354 do_cleanup = step.getProperty("do_cleanup")
355 if do_cleanup:
356 return True
357 else:
358 return False
359
360 def IsExpireRequested(step):
361 if IsSharedWorkdir(step):
362 return False
363 else:
364 return not IsCleanupRequested(step)
365
366 def IsGitFreshRequested(step):
367 do_cleanup = step.getProperty("do_cleanup")
368 if do_cleanup:
369 return True
370 else:
371 return False
372
373 def IsGitCleanRequested(step):
374 return not IsGitFreshRequested(step)
375
376 def IsTaggingRequested(step):
377 val = step.getProperty("tag")
378 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
379 return True
380 else:
381 return False
382
383 def IsNoTaggingRequested(step):
384 return not IsTaggingRequested(step)
385
386 def IsNoMasterBuild(step):
387 return repo_branch != "master"
388
389 def GetBaseVersion():
390 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
391 return repo_branch.split('-')[1]
392 else:
393 return "master"
394
395 @properties.renderer
396 def GetVersionPrefix(props):
397 basever = GetBaseVersion()
398 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
399 return "%s/" % props["tag"]
400 elif basever != "master":
401 return "%s-SNAPSHOT/" % basever
402 else:
403 return ""
404
405 @properties.renderer
406 def GetNumJobs(props):
407 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
408 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
409 else:
410 return "1"
411
412 @properties.renderer
413 def GetCC(props):
414 if props.hasProperty("cc_command"):
415 return props["cc_command"]
416 else:
417 return "gcc"
418
419 @properties.renderer
420 def GetCXX(props):
421 if props.hasProperty("cxx_command"):
422 return props["cxx_command"]
423 else:
424 return "g++"
425
426 @properties.renderer
427 def GetCwd(props):
428 if props.hasProperty("builddir"):
429 return props["builddir"]
430 elif props.hasProperty("workdir"):
431 return props["workdir"]
432 else:
433 return "/"
434
435 @properties.renderer
436 def GetCCache(props):
437 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
438 return props["ccache_command"]
439 else:
440 return ""
441
442 def GetNextBuild(builder, requests):
443 for r in requests:
444 if r.properties and r.properties.hasProperty("tag"):
445 return r
446 return requests[0]
447
448 def MakeEnv(overrides=None, tryccache=False):
449 env = {
450 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
451 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
452 }
453 if tryccache:
454 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
455 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
456 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
457 else:
458 env['CC'] = env['CCC']
459 env['CXX'] = env['CCXX']
460 env['CCACHE'] = ''
461 if overrides is not None:
462 env.update(overrides)
463 return env
464
465 @properties.renderer
466 def NetLockDl(props):
467 lock = None
468 if props.hasProperty("dl_lock"):
469 lock = NetLocks[props["dl_lock"]]
470 if lock is not None:
471 return [lock.access('exclusive')]
472 else:
473 return []
474
475 @properties.renderer
476 def NetLockUl(props):
477 lock = None
478 if props.hasProperty("ul_lock"):
479 lock = NetLocks[props["ul_lock"]]
480 if lock is not None:
481 return [lock.access('exclusive')]
482 else:
483 return []
484
485 @util.renderer
486 def TagPropertyValue(props):
487 if props.hasProperty("options"):
488 options = props.getProperty("options")
489 if type(options) is dict:
490 return options.get("tag")
491 return None
492
493 def IsTargetSelected(target):
494 def CheckTargetProperty(step):
495 try:
496 options = step.getProperty("options")
497 if type(options) is dict:
498 selected_target = options.get("target", "all")
499 if selected_target != "all" and selected_target != target:
500 return False
501 except KeyError:
502 pass
503
504 return True
505
506 return CheckTargetProperty
507
508 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
509 try:
510 seckey = base64.b64decode(seckey)
511 except:
512 return None
513
514 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
515 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
516
517
518 c['builders'] = []
519
520 dlLock = locks.WorkerLock("slave_dl")
521
522 checkBuiltin = re.sub('[\t\n ]+', ' ', """
523 checkBuiltin() {
524 local symbol op path file;
525 for file in $CHANGED_FILES; do
526 case "$file" in
527 package/*/*) : ;;
528 *) return 0 ;;
529 esac;
530 done;
531 while read symbol op path; do
532 case "$symbol" in package-*)
533 symbol="${symbol##*(}";
534 symbol="${symbol%)}";
535 for file in $CHANGED_FILES; do
536 case "$file" in "package/$path/"*)
537 grep -qsx "$symbol=y" .config && return 0
538 ;; esac;
539 done;
540 esac;
541 done < tmp/.packagedeps;
542 return 1;
543 }
544 """).strip()
545
546
547 class IfBuiltinShellCommand(ShellCommand):
548 def _quote(self, str):
549 if re.search("[^a-zA-Z0-9/_.-]", str):
550 return "'%s'" %(re.sub("'", "'\"'\"'", str))
551 return str
552
553 def setCommand(self, command):
554 if not isinstance(command, (str, unicode)):
555 command = ' '.join(map(self._quote, command))
556 self.command = [
557 '/bin/sh', '-c',
558 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
559 ]
560
561 def setupEnvironment(self, cmd):
562 slaveEnv = self.slaveEnvironment
563 if slaveEnv is None:
564 slaveEnv = { }
565 changedFiles = { }
566 for request in self.build.requests:
567 for source in request.sources:
568 for change in source.changes:
569 for file in change.files:
570 changedFiles[file] = True
571 fullSlaveEnv = slaveEnv.copy()
572 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
573 cmd.args['env'] = fullSlaveEnv
574
575 slaveNames = [ ]
576
577 for slave in c['workers']:
578 slaveNames.append(slave.workername)
579
580 force_factory = BuildFactory()
581
582 c['builders'].append(BuilderConfig(
583 name = "00_force_build",
584 workernames = slaveNames,
585 factory = force_factory))
586
587 for target in targets:
588 ts = target.split('/')
589
590 factory = BuildFactory()
591
592 # setup shared work directory if required
593 factory.addStep(ShellCommand(
594 name = "sharedwd",
595 description = "Setting up shared work directory",
596 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
597 workdir = ".",
598 haltOnFailure = True,
599 doStepIf = IsSharedWorkdir))
600
601 # find number of cores
602 factory.addStep(SetPropertyFromCommand(
603 name = "nproc",
604 property = "nproc",
605 description = "Finding number of CPUs",
606 command = ["nproc"]))
607
608 # find gcc and g++ compilers
609 factory.addStep(FileDownload(
610 name = "dlfindbinpl",
611 mastersrc = scripts_dir + '/findbin.pl',
612 workerdest = "../findbin.pl",
613 mode = 0o755))
614
615 factory.addStep(SetPropertyFromCommand(
616 name = "gcc",
617 property = "cc_command",
618 description = "Finding gcc command",
619 command = [
620 "../findbin.pl", "gcc",
621 cc_version[0] if cc_version is not None else '',
622 cc_version[1] if cc_version is not None else ''
623 ],
624 haltOnFailure = True))
625
626 factory.addStep(SetPropertyFromCommand(
627 name = "g++",
628 property = "cxx_command",
629 description = "Finding g++ command",
630 command = [
631 "../findbin.pl", "g++",
632 cc_version[0] if cc_version is not None else '',
633 cc_version[1] if cc_version is not None else ''
634 ],
635 haltOnFailure = True))
636
637 # see if ccache is available
638 factory.addStep(SetPropertyFromCommand(
639 property = "ccache_command",
640 command = ["which", "ccache"],
641 description = "Testing for ccache command",
642 haltOnFailure = False,
643 flunkOnFailure = False,
644 warnOnFailure = False,
645 ))
646
647 # expire tree if needed
648 if tree_expire > 0:
649 factory.addStep(FileDownload(
650 name = "dlexpiresh",
651 doStepIf = IsExpireRequested,
652 mastersrc = scripts_dir + '/expire.sh',
653 workerdest = "../expire.sh",
654 mode = 0o755))
655
656 factory.addStep(ShellCommand(
657 name = "expire",
658 description = "Checking for build tree expiry",
659 command = ["./expire.sh", str(tree_expire)],
660 workdir = ".",
661 haltOnFailure = True,
662 doStepIf = IsExpireRequested,
663 timeout = 2400))
664
665 # cleanup.sh if needed
666 factory.addStep(FileDownload(
667 name = "dlcleanupsh",
668 mastersrc = scripts_dir + '/cleanup.sh',
669 workerdest = "../cleanup.sh",
670 mode = 0o755,
671 doStepIf = IsCleanupRequested))
672
673 factory.addStep(ShellCommand(
674 name = "cleanold",
675 description = "Cleaning previous builds",
676 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
677 workdir = ".",
678 haltOnFailure = True,
679 doStepIf = IsCleanupRequested,
680 timeout = 2400))
681
682 factory.addStep(ShellCommand(
683 name = "cleanup",
684 description = "Cleaning work area",
685 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
686 workdir = ".",
687 haltOnFailure = True,
688 doStepIf = IsCleanupRequested,
689 timeout = 2400))
690
691 # user-requested clean targets
692 for tuple in CleanTargetMap:
693 factory.addStep(ShellCommand(
694 name = tuple[1],
695 description = 'User-requested "make %s"' % tuple[1],
696 command = ["make", tuple[1], "V=s"],
697 env = MakeEnv(),
698 doStepIf = IsMakeCleanRequested(tuple[0])
699 ))
700
701 # Workaround bug when switching from a checked out tag back to a branch
702 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
703 factory.addStep(ShellCommand(
704 name = "gitcheckout",
705 description = "Ensure that Git HEAD is sane",
706 command = "if [ -d .git ]; then git checkout -f %s; git branch --set-upstream-to origin/%s; else exit 0; fi" %(repo_branch, repo_branch),
707 haltOnFailure = True))
708
709 # check out the source
710 # Git() runs:
711 # if repo doesn't exist: 'git clone repourl'
712 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
713 # 'git fetch -t repourl branch; git reset --hard revision'
714 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
715 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
716 factory.addStep(Git(
717 name = "gitclean",
718 repourl = repo_url,
719 branch = repo_branch,
720 mode = 'full',
721 method = 'clean',
722 haltOnFailure = True,
723 doStepIf = IsGitCleanRequested,
724 ))
725
726 factory.addStep(Git(
727 name = "gitfresh",
728 repourl = repo_url,
729 branch = repo_branch,
730 mode = 'full',
731 method = 'fresh',
732 haltOnFailure = True,
733 doStepIf = IsGitFreshRequested,
734 ))
735
736 # update remote refs
737 factory.addStep(ShellCommand(
738 name = "fetchrefs",
739 description = "Fetching Git remote refs",
740 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
741 haltOnFailure = True
742 ))
743
744 # switch to tag
745 factory.addStep(ShellCommand(
746 name = "switchtag",
747 description = "Checking out Git tag",
748 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
749 haltOnFailure = True,
750 doStepIf = IsTaggingRequested
751 ))
752
753 # Verify that Git HEAD points to a tag or branch
754 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
755 factory.addStep(ShellCommand(
756 name = "gitverify",
757 description = "Ensure that Git HEAD is pointing to a branch or tag",
758 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
759 haltOnFailure = True))
760
761 factory.addStep(ShellCommand(
762 name = "rmtmp",
763 description = "Remove tmp folder",
764 command=["rm", "-rf", "tmp/"]))
765
766 # feed
767 # factory.addStep(ShellCommand(
768 # name = "feedsconf",
769 # description = "Copy the feeds.conf",
770 # command='''cp ~/feeds.conf ./feeds.conf''' ))
771
772 # feed
773 factory.addStep(ShellCommand(
774 name = "rmfeedlinks",
775 description = "Remove feed symlinks",
776 command=["rm", "-rf", "package/feeds/"]))
777
778 factory.addStep(StringDownload(
779 name = "ccachecc",
780 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
781 workerdest = "../ccache_cc.sh",
782 mode = 0o755,
783 ))
784
785 factory.addStep(StringDownload(
786 name = "ccachecxx",
787 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
788 workerdest = "../ccache_cxx.sh",
789 mode = 0o755,
790 ))
791
792 # Git SSH
793 if git_ssh:
794 factory.addStep(StringDownload(
795 name = "dlgitclonekey",
796 s = git_ssh_key,
797 workerdest = "../git-clone.key",
798 mode = 0o600,
799 ))
800
801 factory.addStep(ShellCommand(
802 name = "patchfeedsconf",
803 description = "Patching feeds.conf",
804 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
805 haltOnFailure = True
806 ))
807
808 # feed
809 factory.addStep(ShellCommand(
810 name = "updatefeeds",
811 description = "Updating feeds",
812 command=["./scripts/feeds", "update"],
813 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
814 haltOnFailure = True
815 ))
816
817 # Git SSH
818 if git_ssh:
819 factory.addStep(ShellCommand(
820 name = "rmfeedsconf",
821 description = "Removing feeds.conf",
822 command=["rm", "feeds.conf"],
823 haltOnFailure = True
824 ))
825
826 # feed
827 factory.addStep(ShellCommand(
828 name = "installfeeds",
829 description = "Installing feeds",
830 command=["./scripts/feeds", "install", "-a"],
831 env = MakeEnv(tryccache=True),
832 haltOnFailure = True
833 ))
834
835 # seed config
836 if config_seed is not None:
837 factory.addStep(StringDownload(
838 name = "dlconfigseed",
839 s = config_seed + '\n',
840 workerdest = ".config",
841 mode = 0o644
842 ))
843
844 # configure
845 factory.addStep(ShellCommand(
846 name = "newconfig",
847 description = "Seeding .config",
848 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
849 ))
850
851 factory.addStep(ShellCommand(
852 name = "delbin",
853 description = "Removing output directory",
854 command = ["rm", "-rf", "bin/"]
855 ))
856
857 factory.addStep(ShellCommand(
858 name = "defconfig",
859 description = "Populating .config",
860 command = ["make", "defconfig"],
861 env = MakeEnv()
862 ))
863
864 # check arch
865 factory.addStep(ShellCommand(
866 name = "checkarch",
867 description = "Checking architecture",
868 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
869 logEnviron = False,
870 want_stdout = False,
871 want_stderr = False,
872 haltOnFailure = True
873 ))
874
875 # find libc suffix
876 factory.addStep(SetPropertyFromCommand(
877 name = "libc",
878 property = "libc",
879 description = "Finding libc suffix",
880 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
881
882 # install build key
883 if usign_key is not None:
884 factory.addStep(StringDownload(
885 name = "dlkeybuildpub",
886 s = UsignSec2Pub(usign_key, usign_comment),
887 workerdest = "key-build.pub",
888 mode = 0o600,
889 ))
890
891 factory.addStep(StringDownload(
892 name = "dlkeybuild",
893 s = "# fake private key",
894 workerdest = "key-build",
895 mode = 0o600,
896 ))
897
898 factory.addStep(StringDownload(
899 name = "dlkeybuilducert",
900 s = "# fake certificate",
901 workerdest = "key-build.ucert",
902 mode = 0o600,
903 ))
904
905 # prepare dl
906 factory.addStep(ShellCommand(
907 name = "dldir",
908 description = "Preparing dl/",
909 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
910 logEnviron = False,
911 want_stdout = False
912 ))
913
914 # prepare tar
915 factory.addStep(ShellCommand(
916 name = "dltar",
917 description = "Building and installing GNU tar",
918 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
919 env = MakeEnv(tryccache=True),
920 haltOnFailure = True
921 ))
922
923 # populate dl
924 factory.addStep(ShellCommand(
925 name = "dlrun",
926 description = "Populating dl/",
927 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
928 env = MakeEnv(),
929 logEnviron = False,
930 locks = [dlLock.access('exclusive')],
931 ))
932
933 factory.addStep(ShellCommand(
934 name = "cleanbase",
935 description = "Cleaning base-files",
936 command=["make", "package/base-files/clean", "V=s"]
937 ))
938
939 # build
940 factory.addStep(ShellCommand(
941 name = "tools",
942 description = "Building and installing tools",
943 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
944 env = MakeEnv(tryccache=True),
945 haltOnFailure = True
946 ))
947
948 factory.addStep(ShellCommand(
949 name = "toolchain",
950 description = "Building and installing toolchain",
951 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
952 env = MakeEnv(),
953 haltOnFailure = True
954 ))
955
956 factory.addStep(ShellCommand(
957 name = "kmods",
958 description = "Building kmods",
959 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
960 env = MakeEnv(),
961 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
962 haltOnFailure = True
963 ))
964
965 # find kernel version
966 factory.addStep(SetPropertyFromCommand(
967 name = "kernelversion",
968 property = "kernelversion",
969 description = "Finding the effective Kernel version",
970 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
971 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
972 ))
973
974 factory.addStep(ShellCommand(
975 name = "pkgclean",
976 description = "Cleaning up package build",
977 command=["make", "package/cleanup", "V=s"]
978 ))
979
980 factory.addStep(ShellCommand(
981 name = "pkgbuild",
982 description = "Building packages",
983 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
984 env = MakeEnv(),
985 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
986 haltOnFailure = True
987 ))
988
989 # factory.addStep(IfBuiltinShellCommand(
990 factory.addStep(ShellCommand(
991 name = "pkginstall",
992 description = "Installing packages",
993 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
994 env = MakeEnv(),
995 haltOnFailure = True
996 ))
997
998 factory.addStep(ShellCommand(
999 name = "pkgindex",
1000 description = "Indexing packages",
1001 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1002 env = MakeEnv(),
1003 haltOnFailure = True
1004 ))
1005
1006 if enable_kmod_archive:
1007 # embed kmod repository. Must happen before 'images'
1008
1009 # find rootfs staging directory
1010 factory.addStep(SetPropertyFromCommand(
1011 name = "stageroot",
1012 property = "stageroot",
1013 description = "Finding the rootfs staging directory",
1014 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1015 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1016 ))
1017
1018 factory.addStep(ShellCommand(
1019 name = "filesdir",
1020 description = "Creating file overlay directory",
1021 command=["mkdir", "-p", "files/etc/opkg"],
1022 haltOnFailure = True
1023 ))
1024
1025 factory.addStep(ShellCommand(
1026 name = "kmodconfig",
1027 description = "Embedding kmod repository configuration",
1028 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1029 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1030 haltOnFailure = True
1031 ))
1032
1033 #factory.addStep(IfBuiltinShellCommand(
1034 factory.addStep(ShellCommand(
1035 name = "images",
1036 description = "Building and installing images",
1037 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1038 env = MakeEnv(),
1039 haltOnFailure = True
1040 ))
1041
1042 factory.addStep(ShellCommand(
1043 name = "buildinfo",
1044 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1045 command = "make -j1 buildinfo V=s || true",
1046 env = MakeEnv(),
1047 haltOnFailure = True
1048 ))
1049
1050 factory.addStep(ShellCommand(
1051 name = "json_overview_image_info",
1052 description = "Generate profiles.json in target folder",
1053 command = "make -j1 json_overview_image_info V=s || true",
1054 env = MakeEnv(),
1055 haltOnFailure = True
1056 ))
1057
1058 factory.addStep(ShellCommand(
1059 name = "checksums",
1060 description = "Calculating checksums",
1061 command=["make", "-j1", "checksum", "V=s"],
1062 env = MakeEnv(),
1063 haltOnFailure = True
1064 ))
1065
1066 if enable_kmod_archive:
1067 factory.addStep(ShellCommand(
1068 name = "kmoddir",
1069 description = "Creating kmod directory",
1070 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1071 haltOnFailure = True
1072 ))
1073
1074 factory.addStep(ShellCommand(
1075 name = "kmodprepare",
1076 description = "Preparing kmod archive",
1077 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1078 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1079 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1080 haltOnFailure = True
1081 ))
1082
1083 factory.addStep(ShellCommand(
1084 name = "kmodindex",
1085 description = "Indexing kmod archive",
1086 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1087 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1088 env = MakeEnv(),
1089 haltOnFailure = True
1090 ))
1091
1092 # sign
1093 if ini.has_option("gpg", "key") or usign_key is not None:
1094 factory.addStep(MasterShellCommand(
1095 name = "signprepare",
1096 description = "Preparing temporary signing directory",
1097 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1098 haltOnFailure = True
1099 ))
1100
1101 factory.addStep(ShellCommand(
1102 name = "signpack",
1103 description = "Packing files to sign",
1104 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1105 haltOnFailure = True
1106 ))
1107
1108 factory.addStep(FileUpload(
1109 workersrc = "sign.tar.gz",
1110 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1111 haltOnFailure = True
1112 ))
1113
1114 factory.addStep(MasterShellCommand(
1115 name = "signfiles",
1116 description = "Signing files",
1117 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1118 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1119 haltOnFailure = True
1120 ))
1121
1122 factory.addStep(FileDownload(
1123 name = "dlsigntargz",
1124 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1125 workerdest = "sign.tar.gz",
1126 haltOnFailure = True
1127 ))
1128
1129 factory.addStep(ShellCommand(
1130 name = "signunpack",
1131 description = "Unpacking signed files",
1132 command = ["tar", "-xzf", "sign.tar.gz"],
1133 haltOnFailure = True
1134 ))
1135
1136 # upload
1137 factory.addStep(ShellCommand(
1138 name = "dirprepare",
1139 description = "Preparing upload directory structure",
1140 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1141 haltOnFailure = True
1142 ))
1143
1144 factory.addStep(ShellCommand(
1145 name = "linkprepare",
1146 description = "Preparing repository symlink",
1147 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1148 doStepIf = IsNoMasterBuild,
1149 haltOnFailure = True
1150 ))
1151
1152 if enable_kmod_archive:
1153 factory.addStep(ShellCommand(
1154 name = "kmoddirprepare",
1155 description = "Preparing kmod archive upload directory",
1156 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1157 haltOnFailure = True
1158 ))
1159
1160 factory.addStep(ShellCommand(
1161 name = "dirupload",
1162 description = "Uploading directory structure",
1163 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1164 env={'RSYNC_PASSWORD': rsync_bin_key},
1165 haltOnFailure = True,
1166 logEnviron = False,
1167 ))
1168
1169 # download remote sha256sums to 'target-sha256sums'
1170 factory.addStep(ShellCommand(
1171 name = "target-sha256sums",
1172 description = "Fetching remote sha256sums for target",
1173 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1174 env={'RSYNC_PASSWORD': rsync_bin_key},
1175 logEnviron = False,
1176 haltOnFailure = False,
1177 flunkOnFailure = False,
1178 warnOnFailure = False,
1179 ))
1180
1181 # build list of files to upload
1182 factory.addStep(FileDownload(
1183 name = "dlsha2rsyncpl",
1184 mastersrc = scripts_dir + '/sha2rsync.pl',
1185 workerdest = "../sha2rsync.pl",
1186 mode = 0o755,
1187 ))
1188
1189 factory.addStep(ShellCommand(
1190 name = "buildlist",
1191 description = "Building list of files to upload",
1192 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1193 haltOnFailure = True,
1194 ))
1195
1196 factory.addStep(FileDownload(
1197 name = "dlrsync.sh",
1198 mastersrc = scripts_dir + '/rsync.sh',
1199 workerdest = "../rsync.sh",
1200 mode = 0o755
1201 ))
1202
1203 # upload new files and update existing ones
1204 factory.addStep(ShellCommand(
1205 name = "targetupload",
1206 description = "Uploading target files",
1207 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1208 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1209 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1210 env={'RSYNC_PASSWORD': rsync_bin_key},
1211 haltOnFailure = True,
1212 logEnviron = False,
1213 ))
1214
1215 # delete files which don't exist locally
1216 factory.addStep(ShellCommand(
1217 name = "targetprune",
1218 description = "Pruning target files",
1219 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1220 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1221 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1222 env={'RSYNC_PASSWORD': rsync_bin_key},
1223 haltOnFailure = True,
1224 logEnviron = False,
1225 ))
1226
1227 if enable_kmod_archive:
1228 factory.addStep(ShellCommand(
1229 name = "kmodupload",
1230 description = "Uploading kmod archive",
1231 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1232 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1233 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1234 env={'RSYNC_PASSWORD': rsync_bin_key},
1235 haltOnFailure = True,
1236 logEnviron = False,
1237 ))
1238
1239 if rsync_src_url is not None:
1240 factory.addStep(ShellCommand(
1241 name = "sourcelist",
1242 description = "Finding source archives to upload",
1243 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1244 haltOnFailure = True
1245 ))
1246
1247 factory.addStep(ShellCommand(
1248 name = "sourceupload",
1249 description = "Uploading source archives",
1250 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1251 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1252 env={'RSYNC_PASSWORD': rsync_src_key},
1253 haltOnFailure = True,
1254 logEnviron = False,
1255 ))
1256
1257 if False:
1258 factory.addStep(ShellCommand(
1259 name = "packageupload",
1260 description = "Uploading package files",
1261 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1262 env={'RSYNC_PASSWORD': rsync_bin_key},
1263 haltOnFailure = False,
1264 logEnviron = False,
1265 ))
1266
1267 # logs
1268 if False:
1269 factory.addStep(ShellCommand(
1270 name = "upload",
1271 description = "Uploading logs",
1272 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1273 env={'RSYNC_PASSWORD': rsync_bin_key},
1274 haltOnFailure = False,
1275 alwaysRun = True,
1276 logEnviron = False,
1277 ))
1278
1279 factory.addStep(ShellCommand(
1280 name = "df",
1281 description = "Reporting disk usage",
1282 command=["df", "-h", "."],
1283 env={'LC_ALL': 'C'},
1284 haltOnFailure = False,
1285 alwaysRun = True
1286 ))
1287
1288 factory.addStep(ShellCommand(
1289 name = "ccachestat",
1290 description = "Reporting ccache stats",
1291 command=["ccache", "-s"],
1292 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1293 want_stderr = False,
1294 haltOnFailure = False,
1295 flunkOnFailure = False,
1296 warnOnFailure = False,
1297 alwaysRun = True,
1298 ))
1299
1300 c['builders'].append(BuilderConfig(name=target, workernames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1301
1302 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1303 force_factory.addStep(steps.Trigger(
1304 name = "trigger_%s" % target,
1305 description = "Triggering %s build" % target,
1306 schedulerNames = [ "trigger_%s" % target ],
1307 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1308 doStepIf = IsTargetSelected(target)
1309 ))
1310
1311
1312 ####### STATUS TARGETS
1313
1314 # 'status' is a list of Status Targets. The results of each build will be
1315 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1316 # including web pages, email senders, and IRC bots.
1317
1318 if ini.has_option("phase1", "status_bind"):
1319 c['www'] = {
1320 'port': ini.get("phase1", "status_bind"),
1321 'plugins': {
1322 'waterfall_view': True,
1323 'console_view': True,
1324 'grid_view': True
1325 }
1326 }
1327
1328 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1329 c['www']['auth'] = util.UserPasswordAuth([
1330 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1331 ])
1332 c['www']['authz'] = util.Authz(
1333 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1334 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1335 )
1336
1337
1338 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1339 irc_host = ini.get("irc", "host")
1340 irc_port = 6667
1341 irc_chan = ini.get("irc", "channel")
1342 irc_nick = ini.get("irc", "nickname")
1343 irc_pass = None
1344
1345 if ini.has_option("irc", "port"):
1346 irc_port = ini.getint("irc", "port")
1347
1348 if ini.has_option("irc", "password"):
1349 irc_pass = ini.get("irc", "password")
1350
1351 irc = reporters.IRC(irc_host, irc_nick,
1352 port = irc_port,
1353 password = irc_pass,
1354 channels = [ irc_chan ],
1355 notify_events = [ 'exception', 'problem', 'recovery' ]
1356 )
1357
1358 c['services'].append(irc)
1359
1360 ####### DB URL
1361
1362 c['db'] = {
1363 # This specifies what database buildbot uses to store its state. You can leave
1364 # this at its default for all but the largest installations.
1365 'db_url' : "sqlite:///state.sqlite",
1366 }
1367
1368 c['buildbotNetUsageData'] = None