2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
40 if not os.path.exists("twistd.pid"):
41 with open("twistd.pid", "w") as pidfile:
42 pidfile.write("{}".format(os.getpid()))
44 ini = configparser.ConfigParser()
45 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47 buildbot_url = ini.get("phase2", "buildbot_url")
49 # This is a sample buildmaster config file. It must be installed as
50 # 'master.cfg' in your buildmaster's base directory.
52 # This is the dictionary that the buildmaster pays attention to. We also use
53 # a shorter alias to save typing.
54 c = BuildmasterConfig = {}
58 # The 'workers' list defines the set of recognized buildworkers. Each element is
59 # a Worker object, specifying a unique worker name and password. The same
60 # worker name and password must be configured on the worker.
68 if ini.has_option("phase2", "port"):
69 worker_port = ini.get("phase2", "port")
71 if ini.has_option("phase2", "persistent"):
72 persistent = ini.getboolean("phase2", "persistent")
74 if ini.has_option("phase2", "expire"):
75 tree_expire = ini.getint("phase2", "expire")
77 if ini.has_option("general", "git_ssh"):
78 git_ssh = ini.getboolean("general", "git_ssh")
80 if ini.has_option("general", "git_ssh_key"):
81 git_ssh_key = ini.get("general", "git_ssh_key")
88 for section in ini.sections():
89 if section.startswith("worker "):
90 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
91 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
92 name = ini.get(section, "name")
93 password = ini.get(section, "password")
94 sl_props = { 'shared_wd': False }
97 if ini.has_option(section, "builds"):
98 max_builds[name] = ini.getint(section, "builds")
100 if max_builds[name] == 1:
101 sl_props['shared_wd'] = True
103 if ini.has_option(section, "shared_wd"):
104 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
105 if sl_props['shared_wd'] and (max_builds != 1):
106 raise ValueError('max_builds must be 1 with shared workdir!')
108 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
110 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
111 # This must match the value configured into the buildworkers (with their
113 c['protocols'] = {'pb': {'port': worker_port}}
116 c['collapseRequests'] = True
118 # Reduce amount of backlog data
119 c['configurators'] = [util.JanitorConfigurator(
120 logHorizon=timedelta(days=3),
124 ####### CHANGESOURCES
126 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
127 scripts_dir = os.path.abspath("../scripts")
129 rsync_bin_url = ini.get("rsync", "binary_url")
130 rsync_bin_key = ini.get("rsync", "binary_password")
135 if ini.has_option("rsync", "source_url"):
136 rsync_src_url = ini.get("rsync", "source_url")
137 rsync_src_key = ini.get("rsync", "source_password")
141 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
143 if ini.has_option("rsync", "sdk_url"):
144 rsync_sdk_url = ini.get("rsync", "sdk_url")
146 if ini.has_option("rsync", "sdk_password"):
147 rsync_sdk_key = ini.get("rsync", "sdk_password")
149 if ini.has_option("rsync", "sdk_pattern"):
150 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
152 repo_url = ini.get("repo", "url")
153 repo_branch = "master"
155 if ini.has_option("repo", "branch"):
156 repo_branch = ini.get("repo", "branch")
159 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
161 if ini.has_option("usign", "key"):
162 usign_key = ini.get("usign", "key")
164 if ini.has_option("usign", "comment"):
165 usign_comment = ini.get("usign", "comment")
172 if not os.path.isdir(work_dir+'/source.git'):
173 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
175 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
177 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
178 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
179 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
182 line = findarches.stdout.readline()
185 at = line.decode().strip().split()
187 archnames.append(at[0])
192 feedbranches = dict()
194 c['change_source'] = []
196 def parse_feed_entry(line):
197 parts = line.strip().split()
198 if parts[0].startswith("src-git"):
200 url = parts[2].strip().split(';')
201 branch = url[1] if len(url) > 1 else 'master'
202 feedbranches[url[0]] = branch
203 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
205 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
206 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
208 line = make.stdout.readline()
210 parse_feed_entry(str(line, 'utf-8'))
212 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
214 parse_feed_entry(line)
216 if len(c['change_source']) == 0:
217 log.err("FATAL ERROR: no change_sources defined, aborting!")
222 # Configure the Schedulers, which decide how to react to incoming changes. In this
223 # case, just kick off a 'basebuild' build
226 c['schedulers'].append(SingleBranchScheduler(
228 change_filter = filter.ChangeFilter(
229 filter_fn = lambda change: change.branch == feedbranches[change.repository]
231 treeStableTimer = 60,
232 builderNames = archnames))
234 c['schedulers'].append(ForceScheduler(
236 buttonName = "Force builds",
237 label = "Force build details",
238 builderNames = [ "00_force_build" ],
241 util.CodebaseParameter(
243 label = "Repository",
244 branch = util.FixedParameter(name = "branch", default = ""),
245 revision = util.FixedParameter(name = "revision", default = ""),
246 repository = util.FixedParameter(name = "repository", default = ""),
247 project = util.FixedParameter(name = "project", default = "")
251 reason = util.StringParameter(
254 default = "Trigger build",
260 util.NestedParameter(
262 label="Build Options",
265 util.ChoiceStringParameter(
266 name = "architecture",
267 label = "Build architecture",
269 choices = [ "all" ] + archnames
278 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
279 # what steps, and which workers can execute them. Note that any particular build will
280 # only take place on one worker.
282 def GetDirectorySuffix(props):
283 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
284 if props.hasProperty("release_version"):
285 m = verpat.match(props["release_version"])
287 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
290 def GetNumJobs(props):
291 if props.hasProperty("workername") and props.hasProperty("nproc"):
292 return (int(props["nproc"]) / max_builds[props["workername"]])
297 if props.hasProperty("builddir"):
298 return props["builddir"]
299 elif props.hasProperty("workdir"):
300 return props["workdir"]
304 def IsArchitectureSelected(target):
305 def CheckArchitectureProperty(step):
307 options = step.getProperty("options")
308 if type(options) is dict:
309 selected_arch = options.get("architecture", "all")
310 if selected_arch != "all" and selected_arch != target:
317 return CheckArchitectureProperty
319 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
321 seckey = base64.b64decode(seckey)
325 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
326 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
328 def IsSharedWorkdir(step):
329 return bool(step.getProperty("shared_wd"))
331 @defer.inlineCallbacks
332 def getNewestCompleteTime(bldr):
333 """Returns the complete_at of the latest completed and not SKIPPED
334 build request for this builder, or None if there are no such build
335 requests. We need to filter out SKIPPED requests because we're
336 using collapseRequests=True which is unfortunately marking all
337 previous requests as complete when new buildset is created.
339 @returns: datetime instance or None, via Deferred
342 bldrid = yield bldr.getBuilderId()
343 completed = yield bldr.master.data.get(
344 ('builders', bldrid, 'buildrequests'),
346 resultspec.Filter('complete', 'eq', [True]),
347 resultspec.Filter('results', 'ne', [results.SKIPPED]),
349 order=['-complete_at'], limit=1)
353 complete_at = completed[0]['complete_at']
355 last_build = yield bldr.master.data.get(
358 resultspec.Filter('builderid', 'eq', [bldrid]),
360 order=['-started_at'], limit=1)
362 if last_build and last_build[0]:
363 last_complete_at = last_build[0]['complete_at']
364 if last_complete_at and (last_complete_at > complete_at):
365 return last_complete_at
369 @defer.inlineCallbacks
370 def prioritizeBuilders(master, builders):
371 """Returns sorted list of builders by their last timestamp of completed and
374 @returns: list of sorted builders
377 def is_building(bldr):
378 return bool(bldr.building) or bool(bldr.old_building)
381 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
382 d.addCallback(lambda complete_at: (complete_at, bldr))
386 (complete_at, bldr) = item
390 complete_at = date.replace(tzinfo=tzutc())
392 if is_building(bldr):
394 complete_at = date.replace(tzinfo=tzutc())
396 return (complete_at, bldr.name)
398 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
399 results.sort(key=bldr_sort)
402 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
404 return [r[1] for r in results]
406 c['prioritizeBuilders'] = prioritizeBuilders
409 dlLock = locks.WorkerLock("worker_dl")
413 for worker in c['workers']:
414 workerNames.append(worker.workername)
416 force_factory = BuildFactory()
418 c['builders'].append(BuilderConfig(
419 name = "00_force_build",
420 workernames = workerNames,
421 factory = force_factory))
424 ts = arch[1].split('/')
426 factory = BuildFactory()
428 # setup shared work directory if required
429 factory.addStep(ShellCommand(
431 description = "Setting up shared work directory",
432 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
434 haltOnFailure = True,
435 doStepIf = IsSharedWorkdir))
437 # find number of cores
438 factory.addStep(SetProperty(
441 description = "Finding number of CPUs",
442 command = ["nproc"]))
445 factory.addStep(FileDownload(
446 mastersrc = scripts_dir + '/cleanup.sh',
447 workerdest = "../cleanup.sh",
451 factory.addStep(ShellCommand(
453 description = "Cleaning previous builds",
454 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
456 haltOnFailure = True,
459 factory.addStep(ShellCommand(
461 description = "Cleaning work area",
462 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
464 haltOnFailure = True,
467 # expire tree if needed
468 elif tree_expire > 0:
469 factory.addStep(FileDownload(
470 mastersrc = scripts_dir + '/expire.sh',
471 workerdest = "../expire.sh",
474 factory.addStep(ShellCommand(
476 description = "Checking for build tree expiry",
477 command = ["./expire.sh", str(tree_expire)],
479 haltOnFailure = True,
482 factory.addStep(ShellCommand(
484 description = "Preparing SDK directory",
485 command = ["mkdir", "-p", "sdk"],
486 haltOnFailure = True))
488 factory.addStep(ShellCommand(
489 name = "downloadsdk",
490 description = "Downloading SDK archive",
491 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
492 env={'RSYNC_PASSWORD': rsync_sdk_key},
493 haltOnFailure = True,
496 factory.addStep(ShellCommand(
498 description = "Unpacking SDK archive",
499 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
500 haltOnFailure = True))
502 factory.addStep(ShellCommand(
504 description = "Updating SDK",
505 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
506 haltOnFailure = True))
508 factory.addStep(ShellCommand(
509 name = "cleancmdlinks",
510 description = "Sanitizing host command symlinks",
511 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
512 haltOnFailure = True))
514 factory.addStep(StringDownload(
515 name = "writeversionmk",
516 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
517 workerdest = "sdk/getversion.mk",
520 factory.addStep(SetProperty(
522 property = "release_version",
523 description = "Finding SDK release version",
524 workdir = "build/sdk",
525 command = ["make", "-f", "getversion.mk"]))
528 if usign_key is not None:
529 factory.addStep(StringDownload(
530 name = "dlkeybuildpub",
531 s = UsignSec2Pub(usign_key, usign_comment),
532 workerdest = "sdk/key-build.pub",
535 factory.addStep(StringDownload(
537 s = "# fake private key",
538 workerdest = "sdk/key-build",
541 factory.addStep(StringDownload(
542 name = "dlkeybuilducert",
543 s = "# fake certificate",
544 workerdest = "sdk/key-build.ucert",
547 factory.addStep(ShellCommand(
549 description = "Preparing download directory",
550 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
551 haltOnFailure = True))
553 factory.addStep(ShellCommand(
555 description = "Preparing SDK configuration",
556 workdir = "build/sdk",
557 command = ["sh", "-c", "rm -f .config && make defconfig"]))
559 factory.addStep(FileDownload(
560 mastersrc = scripts_dir + '/ccache.sh',
561 workerdest = 'sdk/ccache.sh',
564 factory.addStep(ShellCommand(
566 description = "Preparing ccache",
567 workdir = "build/sdk",
568 command = ["./ccache.sh"],
569 haltOnFailure = True))
571 factory.addStep(ShellCommand(
572 name = "patchfeedsconfgitfull",
573 description = "Patching feeds.conf to use src-git-full",
574 workdir = "build/sdk",
575 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
576 haltOnFailure = True))
579 factory.addStep(StringDownload(
580 name = "dlgitclonekey",
582 workerdest = "../git-clone.key",
585 factory.addStep(ShellCommand(
586 name = "patchfeedsconf",
587 description = "Patching feeds.conf to use SSH cloning",
588 workdir = "build/sdk",
589 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
590 haltOnFailure = True))
592 factory.addStep(ShellCommand(
593 name = "updatefeeds",
594 description = "Updating feeds",
595 workdir = "build/sdk",
596 command = ["./scripts/feeds", "update", "-f"],
597 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
598 haltOnFailure = True))
601 factory.addStep(ShellCommand(
602 name = "rmfeedsconf",
603 description = "Removing feeds.conf",
604 workdir = "build/sdk",
605 command=["rm", "feeds.conf"],
606 haltOnFailure = True))
608 factory.addStep(ShellCommand(
609 name = "installfeeds",
610 description = "Installing feeds",
611 workdir = "build/sdk",
612 command = ["./scripts/feeds", "install", "-a"],
613 haltOnFailure = True))
615 factory.addStep(ShellCommand(
617 description = "Clearing failure logs",
618 workdir = "build/sdk",
619 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
620 haltOnFailure = False,
621 flunkOnFailure = False,
622 warnOnFailure = True,
625 factory.addStep(ShellCommand(
627 description = "Building packages",
628 workdir = "build/sdk",
630 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
631 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
632 haltOnFailure = True))
634 factory.addStep(ShellCommand(
635 name = "mkfeedsconf",
636 description = "Generating pinned feeds.conf",
637 workdir = "build/sdk",
638 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
640 if ini.has_option("gpg", "key") or usign_key is not None:
641 factory.addStep(MasterShellCommand(
642 name = "signprepare",
643 description = "Preparing temporary signing directory",
644 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
648 factory.addStep(ShellCommand(
650 description = "Packing files to sign",
651 workdir = "build/sdk",
652 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
656 factory.addStep(FileUpload(
657 workersrc = "sdk/sign.tar.gz",
658 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
662 factory.addStep(MasterShellCommand(
664 description = "Signing files",
665 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
666 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
670 factory.addStep(FileDownload(
671 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
672 workerdest = "sdk/sign.tar.gz",
676 factory.addStep(ShellCommand(
678 description = "Unpacking signed files",
679 workdir = "build/sdk",
680 command = ["tar", "-xzf", "sign.tar.gz"],
684 factory.addStep(ShellCommand(
685 name = "uploadprepare",
686 description = "Preparing package directory",
687 workdir = "build/sdk",
688 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
689 env={'RSYNC_PASSWORD': rsync_bin_key},
690 haltOnFailure = True,
694 factory.addStep(ShellCommand(
695 name = "packageupload",
696 description = "Uploading package files",
697 workdir = "build/sdk",
698 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
699 env={'RSYNC_PASSWORD': rsync_bin_key},
700 haltOnFailure = True,
704 factory.addStep(ShellCommand(
706 description = "Preparing log directory",
707 workdir = "build/sdk",
708 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
709 env={'RSYNC_PASSWORD': rsync_bin_key},
710 haltOnFailure = True,
714 factory.addStep(ShellCommand(
716 description = "Finding failure logs",
717 workdir = "build/sdk/logs/package/feeds",
718 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
719 haltOnFailure = False,
720 flunkOnFailure = False,
721 warnOnFailure = True,
724 factory.addStep(ShellCommand(
726 description = "Collecting failure logs",
727 workdir = "build/sdk",
728 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
729 haltOnFailure = False,
730 flunkOnFailure = False,
731 warnOnFailure = True,
734 factory.addStep(ShellCommand(
736 description = "Uploading failure logs",
737 workdir = "build/sdk",
738 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
739 env={'RSYNC_PASSWORD': rsync_bin_key},
740 haltOnFailure = False,
741 flunkOnFailure = False,
742 warnOnFailure = True,
746 if rsync_src_url is not None:
747 factory.addStep(ShellCommand(
749 description = "Finding source archives to upload",
750 workdir = "build/sdk",
751 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
755 factory.addStep(ShellCommand(
756 name = "sourceupload",
757 description = "Uploading source archives",
758 workdir = "build/sdk",
759 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
760 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
761 env={'RSYNC_PASSWORD': rsync_src_key},
762 haltOnFailure = False,
763 flunkOnFailure = False,
764 warnOnFailure = True,
768 factory.addStep(ShellCommand(
770 description = "Reporting disk usage",
771 command=["df", "-h", "."],
773 haltOnFailure = False,
774 flunkOnFailure = False,
775 warnOnFailure = False,
779 factory.addStep(ShellCommand(
781 description = "Reporting estimated file space usage",
782 command=["du", "-sh", "."],
784 haltOnFailure = False,
785 flunkOnFailure = False,
786 warnOnFailure = False,
790 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
792 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
793 force_factory.addStep(steps.Trigger(
794 name = "trigger_%s" % arch[0],
795 description = "Triggering %s build" % arch[0],
796 schedulerNames = [ "trigger_%s" % arch[0] ],
797 set_properties = { "reason": Property("reason") },
798 doStepIf = IsArchitectureSelected(arch[0])
801 ####### STATUS arches
803 # 'status' is a list of Status arches. The results of each build will be
804 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
805 # including web pages, email senders, and IRC bots.
807 if ini.has_option("phase2", "status_bind"):
809 'port': ini.get("phase2", "status_bind"),
811 'waterfall_view': True,
812 'console_view': True,
817 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
818 c['www']['auth'] = util.UserPasswordAuth([
819 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
821 c['www']['authz'] = util.Authz(
822 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
823 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
826 ####### PROJECT IDENTITY
828 # the 'title' string will appear at the top of this buildbot
829 # installation's html.WebStatus home page (linked to the
830 # 'titleURL') and is embedded in the title of the waterfall HTML page.
832 c['title'] = ini.get("general", "title")
833 c['titleURL'] = ini.get("general", "title_url")
835 # the 'buildbotURL' string should point to the location where the buildbot's
836 # internal web server (usually the html.WebStatus page) is visible. This
837 # typically uses the port number set in the Waterfall 'status' entry, but
838 # with an externally-visible host name which the buildbot cannot figure out
841 c['buildbotURL'] = buildbot_url
846 # This specifies what database buildbot uses to store its state. You can leave
847 # this at its default for all but the largest installations.
848 'db_url' : "sqlite:///state.sqlite",
851 c['buildbotNetUsageData'] = None