phase1: treat "kmod_archive" as bool option
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from buildbot import locks
11 from buildbot.changes import filter
12 from buildbot.changes.gitpoller import GitPoller
13 from buildbot.config import BuilderConfig
14 from buildbot.plugins import reporters
15 from buildbot.plugins import schedulers
16 from buildbot.plugins import steps
17 from buildbot.plugins import util
18 from buildbot.process import properties
19 from buildbot.process.factory import BuildFactory
20 from buildbot.process.properties import Interpolate
21 from buildbot.process.properties import Property
22 from buildbot.schedulers.basic import SingleBranchScheduler
23 from buildbot.schedulers.forcesched import BaseParameter
24 from buildbot.schedulers.forcesched import ForceScheduler
25 from buildbot.schedulers.forcesched import ValidationError
26 from buildbot.steps.master import MasterShellCommand
27 from buildbot.steps.shell import SetPropertyFromCommand
28 from buildbot.steps.shell import ShellCommand
29 from buildbot.steps.source.git import Git
30 from buildbot.steps.transfer import FileDownload
31 from buildbot.steps.transfer import FileUpload
32 from buildbot.steps.transfer import StringDownload
33 from buildbot.worker import Worker
34
35
36 # This is a sample buildmaster config file. It must be installed as
37 # 'master.cfg' in your buildmaster's base directory.
38
39 ini = configparser.ConfigParser()
40 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
41
42 # This is the dictionary that the buildmaster pays attention to. We also use
43 # a shorter alias to save typing.
44 c = BuildmasterConfig = {}
45
46 ####### PROJECT IDENTITY
47
48 # the 'title' string will appear at the top of this buildbot
49 # installation's html.WebStatus home page (linked to the
50 # 'titleURL') and is embedded in the title of the waterfall HTML page.
51
52 c['title'] = ini.get("general", "title")
53 c['titleURL'] = ini.get("general", "title_url")
54
55 # the 'buildbotURL' string should point to the location where the buildbot's
56 # internal web server (usually the html.WebStatus page) is visible. This
57 # typically uses the port number set in the Waterfall 'status' entry, but
58 # with an externally-visible host name which the buildbot cannot figure out
59 # without some help.
60
61 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
62
63 ####### BUILDSLAVES
64
65 # The 'workers' list defines the set of recognized buildslaves. Each element is
66 # a Worker object, specifying a unique slave name and password. The same
67 # slave name and password must be configured on the slave.
68
69 slave_port = 9989
70
71 if ini.has_option("phase1", "port"):
72 slave_port = ini.get("phase1", "port")
73
74 c['workers'] = []
75 NetLocks = dict()
76
77 for section in ini.sections():
78 if section.startswith("slave "):
79 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
80 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
81 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
82 name = ini.get(section, "name")
83 password = ini.get(section, "password")
84 max_builds = 1
85 if ini.has_option(section, "builds"):
86 max_builds = ini.getint(section, "builds")
87 sl_props['max_builds'] = max_builds
88 if max_builds == 1:
89 sl_props['shared_wd'] = True
90 if ini.has_option(section, "cleanup"):
91 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
92 if ini.has_option(section, "dl_lock"):
93 lockname = ini.get(section, "dl_lock")
94 sl_props['dl_lock'] = lockname
95 if lockname not in NetLocks:
96 NetLocks[lockname] = locks.MasterLock(lockname)
97 if ini.has_option(section, "ul_lock"):
98 lockname = ini.get(section, "dl_lock")
99 sl_props['ul_lock'] = lockname
100 if lockname not in NetLocks:
101 NetLocks[lockname] = locks.MasterLock(lockname)
102 if ini.has_option(section, "shared_wd"):
103 shared_wd = ini.getboolean(section, "shared_wd")
104 sl_props['shared_wd'] = shared_wd
105 if shared_wd and (max_builds != 1):
106 raise ValueError('max_builds must be 1 with shared workdir!')
107 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
108
109 # 'slavePortnum' defines the TCP port to listen on for connections from workers.
110 # This must match the value configured into the buildslaves (with their
111 # --master option)
112 c['protocols'] = {'pb': {'port': slave_port}}
113
114 # coalesce builds
115 c['collapseRequests'] = True
116
117 # Reduce amount of backlog data
118 c['buildHorizon'] = 30
119 c['logHorizon'] = 20
120
121 ####### CHANGESOURCES
122
123 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
124 scripts_dir = os.path.abspath("../scripts")
125 tree_expire = 0
126 other_builds = 0
127 cc_version = None
128
129 cc_command = "gcc"
130 cxx_command = "g++"
131
132 config_seed = ""
133
134 git_ssh = False
135 git_ssh_key = None
136
137 if ini.has_option("phase1", "expire"):
138 tree_expire = ini.getint("phase1", "expire")
139
140 if ini.has_option("phase1", "other_builds"):
141 other_builds = ini.getint("phase1", "other_builds")
142
143 if ini.has_option("phase1", "cc_version"):
144 cc_version = ini.get("phase1", "cc_version").split()
145 if len(cc_version) == 1:
146 cc_version = ["eq", cc_version[0]]
147
148 if ini.has_option("general", "git_ssh"):
149 git_ssh = ini.getboolean("general", "git_ssh")
150
151 if ini.has_option("general", "git_ssh_key"):
152 git_ssh_key = ini.get("general", "git_ssh_key")
153 else:
154 git_ssh = False
155
156 if ini.has_option("phase1", "config_seed"):
157 config_seed = ini.get("phase1", "config_seed")
158
159 repo_url = ini.get("repo", "url")
160 repo_branch = "master"
161
162 if ini.has_option("repo", "branch"):
163 repo_branch = ini.get("repo", "branch")
164
165 rsync_bin_url = ini.get("rsync", "binary_url")
166 rsync_bin_key = ini.get("rsync", "binary_password")
167 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
168
169 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
170 rsync_bin_defopts += ["--contimeout=20"]
171
172 rsync_src_url = None
173 rsync_src_key = None
174 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
175
176 if ini.has_option("rsync", "source_url"):
177 rsync_src_url = ini.get("rsync", "source_url")
178 rsync_src_key = ini.get("rsync", "source_password")
179
180 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
181 rsync_src_defopts += ["--contimeout=20"]
182
183 usign_key = None
184 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
185
186 if ini.has_option("usign", "key"):
187 usign_key = ini.get("usign", "key")
188
189 if ini.has_option("usign", "comment"):
190 usign_comment = ini.get("usign", "comment")
191
192 enable_kmod_archive = False
193
194 if ini.has_option("phase1", "kmod_archive"):
195 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
196
197
198 # find targets
199 targets = [ ]
200
201 if not os.path.isdir(work_dir+'/source.git'):
202 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
203 else:
204 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
205
206 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
207 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
208
209 while True:
210 line = findtargets.stdout.readline()
211 if not line:
212 break
213 ta = line.decode().strip().split(' ')
214 targets.append(ta[0])
215
216
217 # the 'change_source' setting tells the buildmaster how it should find out
218 # about source code changes. Here we point to the buildbot clone of pyflakes.
219
220 c['change_source'] = []
221 c['change_source'].append(GitPoller(
222 repo_url,
223 workdir=work_dir+'/work.git', branch=repo_branch,
224 pollinterval=300))
225
226 ####### SCHEDULERS
227
228 # Configure the Schedulers, which decide how to react to incoming changes. In this
229 # case, just kick off a 'basebuild' build
230
231 class TagChoiceParameter(BaseParameter):
232 spec_attributes = ["strict", "choices"]
233 type = "list"
234 strict = True
235
236 def __init__(self, name, label=None, **kw):
237 super().__init__(name, label, **kw)
238 self._choice_list = []
239
240 @property
241 def choices(self):
242 taglist = []
243 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
244
245 if basever:
246 findtags = subprocess.Popen(
247 ['git', 'ls-remote', '--tags', repo_url],
248 stdout = subprocess.PIPE)
249
250 while True:
251 line = findtags.stdout.readline()
252
253 if not line:
254 break
255
256 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
257
258 if tagver and tagver[1].find(basever[1]) == 0:
259 taglist.append(tagver[1])
260
261 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
262 taglist.insert(0, '')
263
264 self._choice_list = taglist
265
266 return self._choice_list
267
268 def parse_from_arg(self, s):
269 if self.strict and s not in self._choice_list:
270 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
271 return s
272
273 c['schedulers'] = []
274 c['schedulers'].append(SingleBranchScheduler(
275 name = "all",
276 change_filter = filter.ChangeFilter(branch=repo_branch),
277 treeStableTimer = 60,
278 builderNames = targets))
279
280 c['schedulers'].append(ForceScheduler(
281 name = "force",
282 buttonName = "Force builds",
283 label = "Force build details",
284 builderNames = [ "00_force_build" ],
285
286 codebases = [
287 util.CodebaseParameter(
288 "",
289 label = "Repository",
290 branch = util.FixedParameter(name = "branch", default = ""),
291 revision = util.FixedParameter(name = "revision", default = ""),
292 repository = util.FixedParameter(name = "repository", default = ""),
293 project = util.FixedParameter(name = "project", default = "")
294 )
295 ],
296
297 reason = util.StringParameter(
298 name = "reason",
299 label = "Reason",
300 default = "Trigger build",
301 required = True,
302 size = 80
303 ),
304
305 properties = [
306 util.NestedParameter(
307 name="options",
308 label="Build Options",
309 layout="vertical",
310 fields=[
311 util.ChoiceStringParameter(
312 name = "target",
313 label = "Build target",
314 default = "all",
315 choices = [ "all" ] + targets
316 ),
317 TagChoiceParameter(
318 name = "tag",
319 label = "Build tag",
320 default = ""
321 )
322 ]
323 )
324 ]
325 ))
326
327 ####### BUILDERS
328
329 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
330 # what steps, and which workers can execute them. Note that any particular build will
331 # only take place on one slave.
332
333 CleanTargetMap = [
334 [ "tools", "tools/clean" ],
335 [ "chain", "toolchain/clean" ],
336 [ "linux", "target/linux/clean" ],
337 [ "dir", "dirclean" ],
338 [ "dist", "distclean" ]
339 ]
340
341 def IsMakeCleanRequested(pattern):
342 def CheckCleanProperty(step):
343 val = step.getProperty("clean")
344 if val and re.match(pattern, val):
345 return True
346 else:
347 return False
348
349 return CheckCleanProperty
350
351 def IsSharedWorkdir(step):
352 return bool(step.getProperty("shared_wd"))
353
354 def IsCleanupRequested(step):
355 if IsSharedWorkdir(step):
356 return False
357 do_cleanup = step.getProperty("do_cleanup")
358 if do_cleanup:
359 return True
360 else:
361 return False
362
363 def IsExpireRequested(step):
364 if IsSharedWorkdir(step):
365 return False
366 else:
367 return not IsCleanupRequested(step)
368
369 def IsGitFreshRequested(step):
370 do_cleanup = step.getProperty("do_cleanup")
371 if do_cleanup:
372 return True
373 else:
374 return False
375
376 def IsGitCleanRequested(step):
377 return not IsGitFreshRequested(step)
378
379 def IsTaggingRequested(step):
380 val = step.getProperty("tag")
381 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
382 return True
383 else:
384 return False
385
386 def IsNoTaggingRequested(step):
387 return not IsTaggingRequested(step)
388
389 def IsNoMasterBuild(step):
390 return repo_branch != "master"
391
392 def GetBaseVersion():
393 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
394 return repo_branch.split('-')[1]
395 else:
396 return "master"
397
398 @properties.renderer
399 def GetVersionPrefix(props):
400 basever = GetBaseVersion()
401 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
402 return "%s/" % props["tag"]
403 elif basever != "master":
404 return "%s-SNAPSHOT/" % basever
405 else:
406 return ""
407
408 @properties.renderer
409 def GetNumJobs(props):
410 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
411 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
412 else:
413 return "1"
414
415 @properties.renderer
416 def GetCC(props):
417 if props.hasProperty("cc_command"):
418 return props["cc_command"]
419 else:
420 return "gcc"
421
422 @properties.renderer
423 def GetCXX(props):
424 if props.hasProperty("cxx_command"):
425 return props["cxx_command"]
426 else:
427 return "g++"
428
429 @properties.renderer
430 def GetCwd(props):
431 if props.hasProperty("builddir"):
432 return props["builddir"]
433 elif props.hasProperty("workdir"):
434 return props["workdir"]
435 else:
436 return "/"
437
438 @properties.renderer
439 def GetCCache(props):
440 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
441 return props["ccache_command"]
442 else:
443 return ""
444
445 def GetNextBuild(builder, requests):
446 for r in requests:
447 if r.properties and r.properties.hasProperty("tag"):
448 return r
449 return requests[0]
450
451 def MakeEnv(overrides=None, tryccache=False):
452 env = {
453 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
454 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
455 }
456 if tryccache:
457 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
458 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
459 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
460 else:
461 env['CC'] = env['CCC']
462 env['CXX'] = env['CCXX']
463 env['CCACHE'] = ''
464 if overrides is not None:
465 env.update(overrides)
466 return env
467
468 @properties.renderer
469 def NetLockDl(props):
470 lock = None
471 if props.hasProperty("dl_lock"):
472 lock = NetLocks[props["dl_lock"]]
473 if lock is not None:
474 return [lock.access('exclusive')]
475 else:
476 return []
477
478 @properties.renderer
479 def NetLockUl(props):
480 lock = None
481 if props.hasProperty("ul_lock"):
482 lock = NetLocks[props["ul_lock"]]
483 if lock is not None:
484 return [lock.access('exclusive')]
485 else:
486 return []
487
488 @util.renderer
489 def TagPropertyValue(props):
490 if props.hasProperty("options"):
491 options = props.getProperty("options")
492 if type(options) is dict:
493 return options.get("tag")
494 return None
495
496 def IsTargetSelected(target):
497 def CheckTargetProperty(step):
498 try:
499 options = step.getProperty("options")
500 if type(options) is dict:
501 selected_target = options.get("target", "all")
502 if selected_target != "all" and selected_target != target:
503 return False
504 except KeyError:
505 pass
506
507 return True
508
509 return CheckTargetProperty
510
511 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
512 try:
513 seckey = base64.b64decode(seckey)
514 except:
515 return None
516
517 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
518 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
519
520
521 c['builders'] = []
522
523 dlLock = locks.WorkerLock("slave_dl")
524
525 checkBuiltin = re.sub('[\t\n ]+', ' ', """
526 checkBuiltin() {
527 local symbol op path file;
528 for file in $CHANGED_FILES; do
529 case "$file" in
530 package/*/*) : ;;
531 *) return 0 ;;
532 esac;
533 done;
534 while read symbol op path; do
535 case "$symbol" in package-*)
536 symbol="${symbol##*(}";
537 symbol="${symbol%)}";
538 for file in $CHANGED_FILES; do
539 case "$file" in "package/$path/"*)
540 grep -qsx "$symbol=y" .config && return 0
541 ;; esac;
542 done;
543 esac;
544 done < tmp/.packagedeps;
545 return 1;
546 }
547 """).strip()
548
549
550 class IfBuiltinShellCommand(ShellCommand):
551 def _quote(self, str):
552 if re.search("[^a-zA-Z0-9/_.-]", str):
553 return "'%s'" %(re.sub("'", "'\"'\"'", str))
554 return str
555
556 def setCommand(self, command):
557 if not isinstance(command, (str, unicode)):
558 command = ' '.join(map(self._quote, command))
559 self.command = [
560 '/bin/sh', '-c',
561 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
562 ]
563
564 def setupEnvironment(self, cmd):
565 slaveEnv = self.slaveEnvironment
566 if slaveEnv is None:
567 slaveEnv = { }
568 changedFiles = { }
569 for request in self.build.requests:
570 for source in request.sources:
571 for change in source.changes:
572 for file in change.files:
573 changedFiles[file] = True
574 fullSlaveEnv = slaveEnv.copy()
575 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
576 cmd.args['env'] = fullSlaveEnv
577
578 slaveNames = [ ]
579
580 for slave in c['workers']:
581 slaveNames.append(slave.workername)
582
583 force_factory = BuildFactory()
584
585 c['builders'].append(BuilderConfig(
586 name = "00_force_build",
587 workernames = slaveNames,
588 factory = force_factory))
589
590 for target in targets:
591 ts = target.split('/')
592
593 factory = BuildFactory()
594
595 # setup shared work directory if required
596 factory.addStep(ShellCommand(
597 name = "sharedwd",
598 description = "Setting up shared work directory",
599 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
600 workdir = ".",
601 haltOnFailure = True,
602 doStepIf = IsSharedWorkdir))
603
604 # find number of cores
605 factory.addStep(SetPropertyFromCommand(
606 name = "nproc",
607 property = "nproc",
608 description = "Finding number of CPUs",
609 command = ["nproc"]))
610
611 # find gcc and g++ compilers
612 factory.addStep(FileDownload(
613 name = "dlfindbinpl",
614 mastersrc = scripts_dir + '/findbin.pl',
615 workerdest = "../findbin.pl",
616 mode = 0o755))
617
618 factory.addStep(SetPropertyFromCommand(
619 name = "gcc",
620 property = "cc_command",
621 description = "Finding gcc command",
622 command = [
623 "../findbin.pl", "gcc",
624 cc_version[0] if cc_version is not None else '',
625 cc_version[1] if cc_version is not None else ''
626 ],
627 haltOnFailure = True))
628
629 factory.addStep(SetPropertyFromCommand(
630 name = "g++",
631 property = "cxx_command",
632 description = "Finding g++ command",
633 command = [
634 "../findbin.pl", "g++",
635 cc_version[0] if cc_version is not None else '',
636 cc_version[1] if cc_version is not None else ''
637 ],
638 haltOnFailure = True))
639
640 # see if ccache is available
641 factory.addStep(SetPropertyFromCommand(
642 property = "ccache_command",
643 command = ["which", "ccache"],
644 description = "Testing for ccache command",
645 haltOnFailure = False,
646 flunkOnFailure = False,
647 warnOnFailure = False,
648 ))
649
650 # expire tree if needed
651 if tree_expire > 0:
652 factory.addStep(FileDownload(
653 name = "dlexpiresh",
654 doStepIf = IsExpireRequested,
655 mastersrc = scripts_dir + '/expire.sh',
656 workerdest = "../expire.sh",
657 mode = 0o755))
658
659 factory.addStep(ShellCommand(
660 name = "expire",
661 description = "Checking for build tree expiry",
662 command = ["./expire.sh", str(tree_expire)],
663 workdir = ".",
664 haltOnFailure = True,
665 doStepIf = IsExpireRequested,
666 timeout = 2400))
667
668 # cleanup.sh if needed
669 factory.addStep(FileDownload(
670 name = "dlcleanupsh",
671 mastersrc = scripts_dir + '/cleanup.sh',
672 workerdest = "../cleanup.sh",
673 mode = 0o755,
674 doStepIf = IsCleanupRequested))
675
676 factory.addStep(ShellCommand(
677 name = "cleanold",
678 description = "Cleaning previous builds",
679 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
680 workdir = ".",
681 haltOnFailure = True,
682 doStepIf = IsCleanupRequested,
683 timeout = 2400))
684
685 factory.addStep(ShellCommand(
686 name = "cleanup",
687 description = "Cleaning work area",
688 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
689 workdir = ".",
690 haltOnFailure = True,
691 doStepIf = IsCleanupRequested,
692 timeout = 2400))
693
694 # user-requested clean targets
695 for tuple in CleanTargetMap:
696 factory.addStep(ShellCommand(
697 name = tuple[1],
698 description = 'User-requested "make %s"' % tuple[1],
699 command = ["make", tuple[1], "V=s"],
700 env = MakeEnv(),
701 doStepIf = IsMakeCleanRequested(tuple[0])
702 ))
703
704 # Workaround bug when switching from a checked out tag back to a branch
705 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
706 factory.addStep(ShellCommand(
707 name = "gitcheckout",
708 description = "Ensure that Git HEAD is sane",
709 command = "if [ -d .git ]; then git checkout -f %s; git branch --set-upstream-to origin/%s; else exit 0; fi" %(repo_branch, repo_branch),
710 haltOnFailure = True))
711
712 # check out the source
713 # Git() runs:
714 # if repo doesn't exist: 'git clone repourl'
715 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
716 # 'git fetch -t repourl branch; git reset --hard revision'
717 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
718 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
719 factory.addStep(Git(
720 name = "gitclean",
721 repourl = repo_url,
722 branch = repo_branch,
723 mode = 'full',
724 method = 'clean',
725 haltOnFailure = True,
726 doStepIf = IsGitCleanRequested,
727 ))
728
729 factory.addStep(Git(
730 name = "gitfresh",
731 repourl = repo_url,
732 branch = repo_branch,
733 mode = 'full',
734 method = 'fresh',
735 haltOnFailure = True,
736 doStepIf = IsGitFreshRequested,
737 ))
738
739 # update remote refs
740 factory.addStep(ShellCommand(
741 name = "fetchrefs",
742 description = "Fetching Git remote refs",
743 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
744 haltOnFailure = True
745 ))
746
747 # switch to tag
748 factory.addStep(ShellCommand(
749 name = "switchtag",
750 description = "Checking out Git tag",
751 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
752 haltOnFailure = True,
753 doStepIf = IsTaggingRequested
754 ))
755
756 # Verify that Git HEAD points to a tag or branch
757 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
758 factory.addStep(ShellCommand(
759 name = "gitverify",
760 description = "Ensure that Git HEAD is pointing to a branch or tag",
761 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
762 haltOnFailure = True))
763
764 factory.addStep(ShellCommand(
765 name = "rmtmp",
766 description = "Remove tmp folder",
767 command=["rm", "-rf", "tmp/"]))
768
769 # feed
770 # factory.addStep(ShellCommand(
771 # name = "feedsconf",
772 # description = "Copy the feeds.conf",
773 # command='''cp ~/feeds.conf ./feeds.conf''' ))
774
775 # feed
776 factory.addStep(ShellCommand(
777 name = "rmfeedlinks",
778 description = "Remove feed symlinks",
779 command=["rm", "-rf", "package/feeds/"]))
780
781 factory.addStep(StringDownload(
782 name = "ccachecc",
783 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
784 workerdest = "../ccache_cc.sh",
785 mode = 0o755,
786 ))
787
788 factory.addStep(StringDownload(
789 name = "ccachecxx",
790 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
791 workerdest = "../ccache_cxx.sh",
792 mode = 0o755,
793 ))
794
795 # Git SSH
796 if git_ssh:
797 factory.addStep(StringDownload(
798 name = "dlgitclonekey",
799 s = git_ssh_key,
800 workerdest = "../git-clone.key",
801 mode = 0o600,
802 ))
803
804 factory.addStep(ShellCommand(
805 name = "patchfeedsconf",
806 description = "Patching feeds.conf",
807 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
808 haltOnFailure = True
809 ))
810
811 # feed
812 factory.addStep(ShellCommand(
813 name = "updatefeeds",
814 description = "Updating feeds",
815 command=["./scripts/feeds", "update"],
816 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
817 haltOnFailure = True
818 ))
819
820 # Git SSH
821 if git_ssh:
822 factory.addStep(ShellCommand(
823 name = "rmfeedsconf",
824 description = "Removing feeds.conf",
825 command=["rm", "feeds.conf"],
826 haltOnFailure = True
827 ))
828
829 # feed
830 factory.addStep(ShellCommand(
831 name = "installfeeds",
832 description = "Installing feeds",
833 command=["./scripts/feeds", "install", "-a"],
834 env = MakeEnv(tryccache=True),
835 haltOnFailure = True
836 ))
837
838 # seed config
839 if config_seed is not None:
840 factory.addStep(StringDownload(
841 name = "dlconfigseed",
842 s = config_seed + '\n',
843 workerdest = ".config",
844 mode = 0o644
845 ))
846
847 # configure
848 factory.addStep(ShellCommand(
849 name = "newconfig",
850 description = "Seeding .config",
851 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
852 ))
853
854 factory.addStep(ShellCommand(
855 name = "delbin",
856 description = "Removing output directory",
857 command = ["rm", "-rf", "bin/"]
858 ))
859
860 factory.addStep(ShellCommand(
861 name = "defconfig",
862 description = "Populating .config",
863 command = ["make", "defconfig"],
864 env = MakeEnv()
865 ))
866
867 # check arch
868 factory.addStep(ShellCommand(
869 name = "checkarch",
870 description = "Checking architecture",
871 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
872 logEnviron = False,
873 want_stdout = False,
874 want_stderr = False,
875 haltOnFailure = True
876 ))
877
878 # find libc suffix
879 factory.addStep(SetPropertyFromCommand(
880 name = "libc",
881 property = "libc",
882 description = "Finding libc suffix",
883 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
884
885 # install build key
886 if usign_key is not None:
887 factory.addStep(StringDownload(
888 name = "dlkeybuildpub",
889 s = UsignSec2Pub(usign_key, usign_comment),
890 workerdest = "key-build.pub",
891 mode = 0o600,
892 ))
893
894 factory.addStep(StringDownload(
895 name = "dlkeybuild",
896 s = "# fake private key",
897 workerdest = "key-build",
898 mode = 0o600,
899 ))
900
901 factory.addStep(StringDownload(
902 name = "dlkeybuilducert",
903 s = "# fake certificate",
904 workerdest = "key-build.ucert",
905 mode = 0o600,
906 ))
907
908 # prepare dl
909 factory.addStep(ShellCommand(
910 name = "dldir",
911 description = "Preparing dl/",
912 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
913 logEnviron = False,
914 want_stdout = False
915 ))
916
917 # prepare tar
918 factory.addStep(ShellCommand(
919 name = "dltar",
920 description = "Building and installing GNU tar",
921 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
922 env = MakeEnv(tryccache=True),
923 haltOnFailure = True
924 ))
925
926 # populate dl
927 factory.addStep(ShellCommand(
928 name = "dlrun",
929 description = "Populating dl/",
930 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
931 env = MakeEnv(),
932 logEnviron = False,
933 locks = [dlLock.access('exclusive')],
934 ))
935
936 factory.addStep(ShellCommand(
937 name = "cleanbase",
938 description = "Cleaning base-files",
939 command=["make", "package/base-files/clean", "V=s"]
940 ))
941
942 # build
943 factory.addStep(ShellCommand(
944 name = "tools",
945 description = "Building and installing tools",
946 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
947 env = MakeEnv(tryccache=True),
948 haltOnFailure = True
949 ))
950
951 factory.addStep(ShellCommand(
952 name = "toolchain",
953 description = "Building and installing toolchain",
954 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
955 env = MakeEnv(),
956 haltOnFailure = True
957 ))
958
959 factory.addStep(ShellCommand(
960 name = "kmods",
961 description = "Building kmods",
962 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
963 env = MakeEnv(),
964 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
965 haltOnFailure = True
966 ))
967
968 # find kernel version
969 factory.addStep(SetPropertyFromCommand(
970 name = "kernelversion",
971 property = "kernelversion",
972 description = "Finding the effective Kernel version",
973 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
974 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
975 ))
976
977 factory.addStep(ShellCommand(
978 name = "pkgclean",
979 description = "Cleaning up package build",
980 command=["make", "package/cleanup", "V=s"]
981 ))
982
983 factory.addStep(ShellCommand(
984 name = "pkgbuild",
985 description = "Building packages",
986 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
987 env = MakeEnv(),
988 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
989 haltOnFailure = True
990 ))
991
992 # factory.addStep(IfBuiltinShellCommand(
993 factory.addStep(ShellCommand(
994 name = "pkginstall",
995 description = "Installing packages",
996 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
997 env = MakeEnv(),
998 haltOnFailure = True
999 ))
1000
1001 factory.addStep(ShellCommand(
1002 name = "pkgindex",
1003 description = "Indexing packages",
1004 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1005 env = MakeEnv(),
1006 haltOnFailure = True
1007 ))
1008
1009 if enable_kmod_archive:
1010 # embed kmod repository. Must happen before 'images'
1011
1012 # find rootfs staging directory
1013 factory.addStep(SetPropertyFromCommand(
1014 name = "stageroot",
1015 property = "stageroot",
1016 description = "Finding the rootfs staging directory",
1017 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1018 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1019 ))
1020
1021 factory.addStep(ShellCommand(
1022 name = "filesdir",
1023 description = "Creating file overlay directory",
1024 command=["mkdir", "-p", "files/etc/opkg"],
1025 haltOnFailure = True
1026 ))
1027
1028 factory.addStep(ShellCommand(
1029 name = "kmodconfig",
1030 description = "Embedding kmod repository configuration",
1031 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1032 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1033 haltOnFailure = True
1034 ))
1035
1036 #factory.addStep(IfBuiltinShellCommand(
1037 factory.addStep(ShellCommand(
1038 name = "images",
1039 description = "Building and installing images",
1040 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1041 env = MakeEnv(),
1042 haltOnFailure = True
1043 ))
1044
1045 factory.addStep(ShellCommand(
1046 name = "buildinfo",
1047 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1048 command = "make -j1 buildinfo V=s || true",
1049 env = MakeEnv(),
1050 haltOnFailure = True
1051 ))
1052
1053 factory.addStep(ShellCommand(
1054 name = "json_overview_image_info",
1055 description = "Generate profiles.json in target folder",
1056 command = "make -j1 json_overview_image_info V=s || true",
1057 env = MakeEnv(),
1058 haltOnFailure = True
1059 ))
1060
1061 factory.addStep(ShellCommand(
1062 name = "checksums",
1063 description = "Calculating checksums",
1064 command=["make", "-j1", "checksum", "V=s"],
1065 env = MakeEnv(),
1066 haltOnFailure = True
1067 ))
1068
1069 if enable_kmod_archive:
1070 factory.addStep(ShellCommand(
1071 name = "kmoddir",
1072 description = "Creating kmod directory",
1073 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1074 haltOnFailure = True
1075 ))
1076
1077 factory.addStep(ShellCommand(
1078 name = "kmodprepare",
1079 description = "Preparing kmod archive",
1080 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1081 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1082 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1083 haltOnFailure = True
1084 ))
1085
1086 factory.addStep(ShellCommand(
1087 name = "kmodindex",
1088 description = "Indexing kmod archive",
1089 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1090 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1091 env = MakeEnv(),
1092 haltOnFailure = True
1093 ))
1094
1095 # sign
1096 if ini.has_option("gpg", "key") or usign_key is not None:
1097 factory.addStep(MasterShellCommand(
1098 name = "signprepare",
1099 description = "Preparing temporary signing directory",
1100 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1101 haltOnFailure = True
1102 ))
1103
1104 factory.addStep(ShellCommand(
1105 name = "signpack",
1106 description = "Packing files to sign",
1107 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1108 haltOnFailure = True
1109 ))
1110
1111 factory.addStep(FileUpload(
1112 workersrc = "sign.tar.gz",
1113 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1114 haltOnFailure = True
1115 ))
1116
1117 factory.addStep(MasterShellCommand(
1118 name = "signfiles",
1119 description = "Signing files",
1120 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1121 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1122 haltOnFailure = True
1123 ))
1124
1125 factory.addStep(FileDownload(
1126 name = "dlsigntargz",
1127 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1128 workerdest = "sign.tar.gz",
1129 haltOnFailure = True
1130 ))
1131
1132 factory.addStep(ShellCommand(
1133 name = "signunpack",
1134 description = "Unpacking signed files",
1135 command = ["tar", "-xzf", "sign.tar.gz"],
1136 haltOnFailure = True
1137 ))
1138
1139 # upload
1140 factory.addStep(ShellCommand(
1141 name = "dirprepare",
1142 description = "Preparing upload directory structure",
1143 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1144 haltOnFailure = True
1145 ))
1146
1147 factory.addStep(ShellCommand(
1148 name = "linkprepare",
1149 description = "Preparing repository symlink",
1150 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1151 doStepIf = IsNoMasterBuild,
1152 haltOnFailure = True
1153 ))
1154
1155 if enable_kmod_archive:
1156 factory.addStep(ShellCommand(
1157 name = "kmoddirprepare",
1158 description = "Preparing kmod archive upload directory",
1159 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1160 haltOnFailure = True
1161 ))
1162
1163 factory.addStep(ShellCommand(
1164 name = "dirupload",
1165 description = "Uploading directory structure",
1166 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1167 env={'RSYNC_PASSWORD': rsync_bin_key},
1168 haltOnFailure = True,
1169 logEnviron = False,
1170 ))
1171
1172 # download remote sha256sums to 'target-sha256sums'
1173 factory.addStep(ShellCommand(
1174 name = "target-sha256sums",
1175 description = "Fetching remote sha256sums for target",
1176 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1177 env={'RSYNC_PASSWORD': rsync_bin_key},
1178 logEnviron = False,
1179 haltOnFailure = False,
1180 flunkOnFailure = False,
1181 warnOnFailure = False,
1182 ))
1183
1184 # build list of files to upload
1185 factory.addStep(FileDownload(
1186 name = "dlsha2rsyncpl",
1187 mastersrc = scripts_dir + '/sha2rsync.pl',
1188 workerdest = "../sha2rsync.pl",
1189 mode = 0o755,
1190 ))
1191
1192 factory.addStep(ShellCommand(
1193 name = "buildlist",
1194 description = "Building list of files to upload",
1195 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1196 haltOnFailure = True,
1197 ))
1198
1199 factory.addStep(FileDownload(
1200 name = "dlrsync.sh",
1201 mastersrc = scripts_dir + '/rsync.sh',
1202 workerdest = "../rsync.sh",
1203 mode = 0o755
1204 ))
1205
1206 # upload new files and update existing ones
1207 factory.addStep(ShellCommand(
1208 name = "targetupload",
1209 description = "Uploading target files",
1210 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1211 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1212 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1213 env={'RSYNC_PASSWORD': rsync_bin_key},
1214 haltOnFailure = True,
1215 logEnviron = False,
1216 ))
1217
1218 # delete files which don't exist locally
1219 factory.addStep(ShellCommand(
1220 name = "targetprune",
1221 description = "Pruning target files",
1222 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1223 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1224 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1225 env={'RSYNC_PASSWORD': rsync_bin_key},
1226 haltOnFailure = True,
1227 logEnviron = False,
1228 ))
1229
1230 if enable_kmod_archive:
1231 factory.addStep(ShellCommand(
1232 name = "kmodupload",
1233 description = "Uploading kmod archive",
1234 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1235 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1236 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1237 env={'RSYNC_PASSWORD': rsync_bin_key},
1238 haltOnFailure = True,
1239 logEnviron = False,
1240 ))
1241
1242 if rsync_src_url is not None:
1243 factory.addStep(ShellCommand(
1244 name = "sourcelist",
1245 description = "Finding source archives to upload",
1246 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1247 haltOnFailure = True
1248 ))
1249
1250 factory.addStep(ShellCommand(
1251 name = "sourceupload",
1252 description = "Uploading source archives",
1253 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1254 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1255 env={'RSYNC_PASSWORD': rsync_src_key},
1256 haltOnFailure = True,
1257 logEnviron = False,
1258 ))
1259
1260 if False:
1261 factory.addStep(ShellCommand(
1262 name = "packageupload",
1263 description = "Uploading package files",
1264 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1265 env={'RSYNC_PASSWORD': rsync_bin_key},
1266 haltOnFailure = False,
1267 logEnviron = False,
1268 ))
1269
1270 # logs
1271 if False:
1272 factory.addStep(ShellCommand(
1273 name = "upload",
1274 description = "Uploading logs",
1275 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1276 env={'RSYNC_PASSWORD': rsync_bin_key},
1277 haltOnFailure = False,
1278 alwaysRun = True,
1279 logEnviron = False,
1280 ))
1281
1282 factory.addStep(ShellCommand(
1283 name = "df",
1284 description = "Reporting disk usage",
1285 command=["df", "-h", "."],
1286 env={'LC_ALL': 'C'},
1287 haltOnFailure = False,
1288 alwaysRun = True
1289 ))
1290
1291 factory.addStep(ShellCommand(
1292 name = "ccachestat",
1293 description = "Reporting ccache stats",
1294 command=["ccache", "-s"],
1295 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1296 want_stderr = False,
1297 haltOnFailure = False,
1298 flunkOnFailure = False,
1299 warnOnFailure = False,
1300 alwaysRun = True,
1301 ))
1302
1303 c['builders'].append(BuilderConfig(name=target, workernames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1304
1305 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1306 force_factory.addStep(steps.Trigger(
1307 name = "trigger_%s" % target,
1308 description = "Triggering %s build" % target,
1309 schedulerNames = [ "trigger_%s" % target ],
1310 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1311 doStepIf = IsTargetSelected(target)
1312 ))
1313
1314
1315 ####### STATUS TARGETS
1316
1317 # 'status' is a list of Status Targets. The results of each build will be
1318 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1319 # including web pages, email senders, and IRC bots.
1320
1321 if ini.has_option("phase1", "status_bind"):
1322 c['www'] = {
1323 'port': ini.get("phase1", "status_bind"),
1324 'plugins': {
1325 'waterfall_view': True,
1326 'console_view': True,
1327 'grid_view': True
1328 }
1329 }
1330
1331 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1332 c['www']['auth'] = util.UserPasswordAuth([
1333 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1334 ])
1335 c['www']['authz'] = util.Authz(
1336 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1337 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1338 )
1339
1340
1341 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1342 irc_host = ini.get("irc", "host")
1343 irc_port = 6667
1344 irc_chan = ini.get("irc", "channel")
1345 irc_nick = ini.get("irc", "nickname")
1346 irc_pass = None
1347
1348 if ini.has_option("irc", "port"):
1349 irc_port = ini.getint("irc", "port")
1350
1351 if ini.has_option("irc", "password"):
1352 irc_pass = ini.get("irc", "password")
1353
1354 irc = reporters.IRC(irc_host, irc_nick,
1355 port = irc_port,
1356 password = irc_pass,
1357 channels = [ irc_chan ],
1358 notify_events = [ 'exception', 'problem', 'recovery' ]
1359 )
1360
1361 c['services'].append(irc)
1362
1363 ####### DB URL
1364
1365 c['db'] = {
1366 # This specifies what database buildbot uses to store its state. You can leave
1367 # this at its default for all but the largest installations.
1368 'db_url' : "sqlite:///state.sqlite",
1369 }
1370
1371 c['buildbotNetUsageData'] = None