2 # ex: set syntax=python:
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
14 from twisted.internet import defer
15 from twisted.python import log
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
40 ini = configparser.ConfigParser()
41 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
43 buildbot_url = ini.get("phase2", "buildbot_url")
45 # This is a sample buildmaster config file. It must be installed as
46 # 'master.cfg' in your buildmaster's base directory.
48 # This is the dictionary that the buildmaster pays attention to. We also use
49 # a shorter alias to save typing.
50 c = BuildmasterConfig = {}
54 # The 'workers' list defines the set of recognized buildworkers. Each element is
55 # a Worker object, specifying a unique worker name and password. The same
56 # worker name and password must be configured on the worker.
65 if ini.has_option("phase2", "port"):
66 worker_port = ini.get("phase2", "port")
68 if ini.has_option("phase2", "persistent"):
69 persistent = ini.getboolean("phase2", "persistent")
71 if ini.has_option("phase2", "other_builds"):
72 other_builds = ini.getint("phase2", "other_builds")
74 if ini.has_option("phase2", "expire"):
75 tree_expire = ini.getint("phase2", "expire")
77 if ini.has_option("general", "git_ssh"):
78 git_ssh = ini.getboolean("general", "git_ssh")
80 if ini.has_option("general", "git_ssh_key"):
81 git_ssh_key = ini.get("general", "git_ssh_key")
88 for section in ini.sections():
89 if section.startswith("worker "):
90 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
91 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
92 name = ini.get(section, "name")
93 password = ini.get(section, "password")
94 sl_props = { 'shared_wd': False }
97 if ini.has_option(section, "builds"):
98 max_builds[name] = ini.getint(section, "builds")
100 if max_builds[name] == 1:
101 sl_props['shared_wd'] = True
103 if ini.has_option(section, "shared_wd"):
104 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
105 if sl_props['shared_wd'] and (max_builds != 1):
106 raise ValueError('max_builds must be 1 with shared workdir!')
108 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
110 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
111 # This must match the value configured into the buildworkers (with their
113 c['protocols'] = {'pb': {'port': worker_port}}
116 c['collapseRequests'] = True
118 # Reduce amount of backlog data
119 c['configurators'] = [util.JanitorConfigurator(
120 logHorizon=timedelta(days=3),
124 ####### CHANGESOURCES
126 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
127 scripts_dir = os.path.abspath("../scripts")
129 rsync_bin_url = ini.get("rsync", "binary_url")
130 rsync_bin_key = ini.get("rsync", "binary_password")
135 if ini.has_option("rsync", "source_url"):
136 rsync_src_url = ini.get("rsync", "source_url")
137 rsync_src_key = ini.get("rsync", "source_password")
141 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
143 if ini.has_option("rsync", "sdk_url"):
144 rsync_sdk_url = ini.get("rsync", "sdk_url")
146 if ini.has_option("rsync", "sdk_password"):
147 rsync_sdk_key = ini.get("rsync", "sdk_password")
149 if ini.has_option("rsync", "sdk_pattern"):
150 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
152 repo_url = ini.get("repo", "url")
153 repo_branch = "master"
155 if ini.has_option("repo", "branch"):
156 repo_branch = ini.get("repo", "branch")
159 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
161 if ini.has_option("usign", "key"):
162 usign_key = ini.get("usign", "key")
164 if ini.has_option("usign", "comment"):
165 usign_comment = ini.get("usign", "comment")
172 if not os.path.isdir(work_dir+'/source.git'):
173 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
175 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
177 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
178 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
179 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
182 line = findarches.stdout.readline()
185 at = line.decode().strip().split()
187 archnames.append(at[0])
192 feedbranches = dict()
194 c['change_source'] = []
196 def parse_feed_entry(line):
197 parts = line.strip().split()
198 if parts[0] == "src-git":
200 url = parts[2].strip().split(';')
201 branch = url[1] if len(url) > 1 else 'master'
202 feedbranches[url[0]] = branch
203 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
205 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
206 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
208 line = make.stdout.readline()
210 parse_feed_entry(line)
212 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
214 parse_feed_entry(line)
219 # Configure the Schedulers, which decide how to react to incoming changes. In this
220 # case, just kick off a 'basebuild' build
223 c['schedulers'].append(SingleBranchScheduler(
225 change_filter = filter.ChangeFilter(
226 filter_fn = lambda change: change.branch == feedbranches[change.repository]
228 treeStableTimer = 60,
229 builderNames = archnames))
231 c['schedulers'].append(ForceScheduler(
233 buttonName = "Force builds",
234 label = "Force build details",
235 builderNames = [ "00_force_build" ],
238 util.CodebaseParameter(
240 label = "Repository",
241 branch = util.FixedParameter(name = "branch", default = ""),
242 revision = util.FixedParameter(name = "revision", default = ""),
243 repository = util.FixedParameter(name = "repository", default = ""),
244 project = util.FixedParameter(name = "project", default = "")
248 reason = util.StringParameter(
251 default = "Trigger build",
257 util.NestedParameter(
259 label="Build Options",
262 util.ChoiceStringParameter(
263 name = "architecture",
264 label = "Build architecture",
266 choices = [ "all" ] + archnames
275 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
276 # what steps, and which workers can execute them. Note that any particular build will
277 # only take place on one worker.
279 def GetDirectorySuffix(props):
280 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
281 if props.hasProperty("release_version"):
282 m = verpat.match(props["release_version"])
284 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
287 def GetNumJobs(props):
288 if props.hasProperty("workername") and props.hasProperty("nproc"):
289 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
294 if props.hasProperty("builddir"):
295 return props["builddir"]
296 elif props.hasProperty("workdir"):
297 return props["workdir"]
301 def IsArchitectureSelected(target):
302 def CheckArchitectureProperty(step):
304 options = step.getProperty("options")
305 if type(options) is dict:
306 selected_arch = options.get("architecture", "all")
307 if selected_arch != "all" and selected_arch != target:
314 return CheckArchitectureProperty
316 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
318 seckey = base64.b64decode(seckey)
322 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
323 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
325 def IsSharedWorkdir(step):
326 return bool(step.getProperty("shared_wd"))
328 @defer.inlineCallbacks
329 def getNewestCompleteTime(bldr):
330 """Returns the complete_at of the latest completed and not SKIPPED
331 build request for this builder, or None if there are no such build
332 requests. We need to filter out SKIPPED requests because we're
333 using collapseRequests=True which is unfortunately marking all
334 previous requests as complete when new buildset is created.
336 @returns: datetime instance or None, via Deferred
339 bldrid = yield bldr.getBuilderId()
340 completed = yield bldr.master.data.get(
341 ('builders', bldrid, 'buildrequests'),
343 resultspec.Filter('complete', 'eq', [True]),
344 resultspec.Filter('results', 'ne', [results.SKIPPED]),
346 order=['-complete_at'], limit=1)
350 return completed[0]['complete_at']
352 @defer.inlineCallbacks
353 def prioritizeBuilders(master, builders):
354 """Returns sorted list of builders by their last timestamp of completed and
357 @returns: list of sorted builders
360 def is_building(bldr):
361 return bool(bldr.building) or bool(bldr.old_building)
364 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
365 d.addCallback(lambda complete_at: (complete_at, bldr))
369 (complete_at, bldr) = item
373 complete_at = date.replace(tzinfo=tzutc())
375 if is_building(bldr):
377 complete_at = date.replace(tzinfo=tzutc())
379 return (complete_at, bldr.name)
381 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
382 results.sort(key=bldr_sort)
385 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
387 return [r[1] for r in results]
389 c['prioritizeBuilders'] = prioritizeBuilders
392 dlLock = locks.WorkerLock("worker_dl")
396 for worker in c['workers']:
397 workerNames.append(worker.workername)
399 force_factory = BuildFactory()
401 c['builders'].append(BuilderConfig(
402 name = "00_force_build",
403 workernames = workerNames,
404 factory = force_factory))
407 ts = arch[1].split('/')
409 factory = BuildFactory()
411 # setup shared work directory if required
412 factory.addStep(ShellCommand(
414 description = "Setting up shared work directory",
415 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
417 haltOnFailure = True,
418 doStepIf = IsSharedWorkdir))
420 # find number of cores
421 factory.addStep(SetProperty(
424 description = "Finding number of CPUs",
425 command = ["nproc"]))
428 factory.addStep(FileDownload(
429 mastersrc = scripts_dir + '/cleanup.sh',
430 workerdest = "../cleanup.sh",
434 factory.addStep(ShellCommand(
436 description = "Cleaning previous builds",
437 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
439 haltOnFailure = True,
442 factory.addStep(ShellCommand(
444 description = "Cleaning work area",
445 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
447 haltOnFailure = True,
450 # expire tree if needed
451 elif tree_expire > 0:
452 factory.addStep(FileDownload(
453 mastersrc = scripts_dir + '/expire.sh',
454 workerdest = "../expire.sh",
457 factory.addStep(ShellCommand(
459 description = "Checking for build tree expiry",
460 command = ["./expire.sh", str(tree_expire)],
462 haltOnFailure = True,
465 factory.addStep(ShellCommand(
467 description = "Preparing SDK directory",
468 command = ["mkdir", "-p", "sdk"],
469 haltOnFailure = True))
471 factory.addStep(ShellCommand(
472 name = "downloadsdk",
473 description = "Downloading SDK archive",
474 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
475 env={'RSYNC_PASSWORD': rsync_sdk_key},
476 haltOnFailure = True,
479 factory.addStep(ShellCommand(
481 description = "Unpacking SDK archive",
482 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
483 haltOnFailure = True))
485 factory.addStep(ShellCommand(
487 description = "Updating SDK",
488 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
489 haltOnFailure = True))
491 factory.addStep(ShellCommand(
492 name = "cleancmdlinks",
493 description = "Sanitizing host command symlinks",
494 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
495 haltOnFailure = True))
497 factory.addStep(StringDownload(
498 name = "writeversionmk",
499 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
500 workerdest = "sdk/getversion.mk",
503 factory.addStep(SetProperty(
505 property = "release_version",
506 description = "Finding SDK release version",
507 workdir = "build/sdk",
508 command = ["make", "-f", "getversion.mk"]))
511 if usign_key is not None:
512 factory.addStep(StringDownload(
513 name = "dlkeybuildpub",
514 s = UsignSec2Pub(usign_key, usign_comment),
515 workerdest = "sdk/key-build.pub",
518 factory.addStep(StringDownload(
520 s = "# fake private key",
521 workerdest = "sdk/key-build",
524 factory.addStep(StringDownload(
525 name = "dlkeybuilducert",
526 s = "# fake certificate",
527 workerdest = "sdk/key-build.ucert",
530 factory.addStep(ShellCommand(
532 description = "Preparing download directory",
533 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
534 haltOnFailure = True))
536 factory.addStep(ShellCommand(
538 description = "Preparing SDK configuration",
539 workdir = "build/sdk",
540 command = ["sh", "-c", "rm -f .config && make defconfig"]))
542 factory.addStep(FileDownload(
543 mastersrc = scripts_dir + '/ccache.sh',
544 workerdest = 'sdk/ccache.sh',
547 factory.addStep(ShellCommand(
549 description = "Preparing ccache",
550 workdir = "build/sdk",
551 command = ["./ccache.sh"],
552 haltOnFailure = True))
554 factory.addStep(ShellCommand(
555 name = "patchfeedsconfgitfull",
556 description = "Patching feeds.conf to use src-git-full",
557 workdir = "build/sdk",
558 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
559 haltOnFailure = True))
562 factory.addStep(StringDownload(
563 name = "dlgitclonekey",
565 workerdest = "../git-clone.key",
568 factory.addStep(ShellCommand(
569 name = "patchfeedsconf",
570 description = "Patching feeds.conf to use SSH cloning",
571 workdir = "build/sdk",
572 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
573 haltOnFailure = True))
575 factory.addStep(ShellCommand(
576 name = "updatefeeds",
577 description = "Updating feeds",
578 workdir = "build/sdk",
579 command = ["./scripts/feeds", "update", "-f"],
580 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
581 haltOnFailure = True))
584 factory.addStep(ShellCommand(
585 name = "rmfeedsconf",
586 description = "Removing feeds.conf",
587 workdir = "build/sdk",
588 command=["rm", "feeds.conf"],
589 haltOnFailure = True))
591 factory.addStep(ShellCommand(
592 name = "installfeeds",
593 description = "Installing feeds",
594 workdir = "build/sdk",
595 command = ["./scripts/feeds", "install", "-a"],
596 haltOnFailure = True))
598 factory.addStep(ShellCommand(
600 description = "Clearing failure logs",
601 workdir = "build/sdk",
602 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
603 haltOnFailure = False
606 factory.addStep(ShellCommand(
608 description = "Building packages",
609 workdir = "build/sdk",
611 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
612 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
613 haltOnFailure = True))
615 factory.addStep(ShellCommand(
616 name = "mkfeedsconf",
617 description = "Generating pinned feeds.conf",
618 workdir = "build/sdk",
619 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
621 if ini.has_option("gpg", "key") or usign_key is not None:
622 factory.addStep(MasterShellCommand(
623 name = "signprepare",
624 description = "Preparing temporary signing directory",
625 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
629 factory.addStep(ShellCommand(
631 description = "Packing files to sign",
632 workdir = "build/sdk",
633 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
637 factory.addStep(FileUpload(
638 workersrc = "sdk/sign.tar.gz",
639 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
643 factory.addStep(MasterShellCommand(
645 description = "Signing files",
646 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
647 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
651 factory.addStep(FileDownload(
652 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
653 workerdest = "sdk/sign.tar.gz",
657 factory.addStep(ShellCommand(
659 description = "Unpacking signed files",
660 workdir = "build/sdk",
661 command = ["tar", "-xzf", "sign.tar.gz"],
665 factory.addStep(ShellCommand(
666 name = "uploadprepare",
667 description = "Preparing package directory",
668 workdir = "build/sdk",
669 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
670 env={'RSYNC_PASSWORD': rsync_bin_key},
671 haltOnFailure = True,
675 factory.addStep(ShellCommand(
676 name = "packageupload",
677 description = "Uploading package files",
678 workdir = "build/sdk",
679 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
680 env={'RSYNC_PASSWORD': rsync_bin_key},
681 haltOnFailure = True,
685 factory.addStep(ShellCommand(
687 description = "Preparing log directory",
688 workdir = "build/sdk",
689 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
690 env={'RSYNC_PASSWORD': rsync_bin_key},
691 haltOnFailure = True,
695 factory.addStep(ShellCommand(
697 description = "Finding failure logs",
698 workdir = "build/sdk/logs/package/feeds",
699 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
700 haltOnFailure = False
703 factory.addStep(ShellCommand(
705 description = "Collecting failure logs",
706 workdir = "build/sdk",
707 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
708 haltOnFailure = False
711 factory.addStep(ShellCommand(
713 description = "Uploading failure logs",
714 workdir = "build/sdk",
715 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
716 env={'RSYNC_PASSWORD': rsync_bin_key},
717 haltOnFailure = False,
721 if rsync_src_url is not None:
722 factory.addStep(ShellCommand(
724 description = "Finding source archives to upload",
725 workdir = "build/sdk",
726 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
730 factory.addStep(ShellCommand(
731 name = "sourceupload",
732 description = "Uploading source archives",
733 workdir = "build/sdk",
734 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
735 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
736 env={'RSYNC_PASSWORD': rsync_src_key},
737 haltOnFailure = False,
741 factory.addStep(ShellCommand(
743 description = "Reporting disk usage",
744 command=["df", "-h", "."],
746 haltOnFailure = False,
750 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
752 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
753 force_factory.addStep(steps.Trigger(
754 name = "trigger_%s" % arch[0],
755 description = "Triggering %s build" % arch[0],
756 schedulerNames = [ "trigger_%s" % arch[0] ],
757 set_properties = { "reason": Property("reason") },
758 doStepIf = IsArchitectureSelected(arch[0])
761 ####### STATUS arches
763 # 'status' is a list of Status arches. The results of each build will be
764 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
765 # including web pages, email senders, and IRC bots.
767 if ini.has_option("phase2", "status_bind"):
769 'port': ini.get("phase2", "status_bind"),
771 'waterfall_view': True,
772 'console_view': True,
777 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
778 c['www']['auth'] = util.UserPasswordAuth([
779 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
781 c['www']['authz'] = util.Authz(
782 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
783 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
786 ####### PROJECT IDENTITY
788 # the 'title' string will appear at the top of this buildbot
789 # installation's html.WebStatus home page (linked to the
790 # 'titleURL') and is embedded in the title of the waterfall HTML page.
792 c['title'] = ini.get("general", "title")
793 c['titleURL'] = ini.get("general", "title_url")
795 # the 'buildbotURL' string should point to the location where the buildbot's
796 # internal web server (usually the html.WebStatus page) is visible. This
797 # typically uses the port number set in the Waterfall 'status' entry, but
798 # with an externally-visible host name which the buildbot cannot figure out
801 c['buildbotURL'] = buildbot_url
806 # This specifies what database buildbot uses to store its state. You can leave
807 # this at its default for all but the largest installations.
808 'db_url' : "sqlite:///state.sqlite",
811 c['buildbotNetUsageData'] = None