phase2: sanitize SDK host command symlinks
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import ConfigParser
9
10 from buildbot import locks
11
12 ini = ConfigParser.ConfigParser()
13 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
14
15 buildbot_url = ini.get("phase2", "buildbot_url")
16
17 # This is a sample buildmaster config file. It must be installed as
18 # 'master.cfg' in your buildmaster's base directory.
19
20 # This is the dictionary that the buildmaster pays attention to. We also use
21 # a shorter alias to save typing.
22 c = BuildmasterConfig = {}
23
24 ####### BUILDSLAVES
25
26 # The 'slaves' list defines the set of recognized buildslaves. Each element is
27 # a BuildSlave object, specifying a unique slave name and password. The same
28 # slave name and password must be configured on the slave.
29 from buildbot.buildslave import BuildSlave
30
31 slave_port = 9990
32 persistent = False
33 other_builds = 0
34 tree_expire = 0
35 git_ssh = False
36 git_ssh_key = None
37
38 if ini.has_option("phase2", "port"):
39 slave_port = ini.getint("phase2", "port")
40
41 if ini.has_option("phase2", "persistent"):
42 persistent = ini.getboolean("phase2", "persistent")
43
44 if ini.has_option("phase2", "other_builds"):
45 other_builds = ini.getint("phase2", "other_builds")
46
47 if ini.has_option("phase2", "expire"):
48 tree_expire = ini.getint("phase2", "expire")
49
50 if ini.has_option("general", "git_ssh"):
51 git_ssh = ini.getboolean("general", "git_ssh")
52
53 if ini.has_option("general", "git_ssh_key"):
54 git_ssh_key = ini.get("general", "git_ssh_key")
55 else:
56 git_ssh = False
57
58 c['slaves'] = []
59 max_builds = dict()
60
61 for section in ini.sections():
62 if section.startswith("slave "):
63 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
64 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
65 name = ini.get(section, "name")
66 password = ini.get(section, "password")
67 sl_props = { 'shared_wd': False }
68 max_builds[name] = 1
69
70 if ini.has_option(section, "builds"):
71 max_builds[name] = ini.getint(section, "builds")
72
73 if max_builds[name] == 1:
74 sl_props['shared_wd'] = True
75
76 if ini.has_option(section, "shared_wd"):
77 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
78 if sl_props['shared_wd'] and (max_builds != 1):
79 raise ValueError('max_builds must be 1 with shared workdir!')
80
81 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name], properties = sl_props))
82
83 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
84 # This must match the value configured into the buildslaves (with their
85 # --master option)
86 c['slavePortnum'] = slave_port
87
88 # coalesce builds
89 c['mergeRequests'] = True
90
91 # Reduce amount of backlog data
92 c['buildHorizon'] = 30
93 c['logHorizon'] = 20
94
95 ####### CHANGESOURCES
96
97 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
98 scripts_dir = os.path.abspath("../scripts")
99
100 rsync_bin_url = ini.get("rsync", "binary_url")
101 rsync_bin_key = ini.get("rsync", "binary_password")
102
103 rsync_src_url = None
104 rsync_src_key = None
105
106 if ini.has_option("rsync", "source_url"):
107 rsync_src_url = ini.get("rsync", "source_url")
108 rsync_src_key = ini.get("rsync", "source_password")
109
110 rsync_sdk_url = None
111 rsync_sdk_key = None
112 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
113
114 if ini.has_option("rsync", "sdk_url"):
115 rsync_sdk_url = ini.get("rsync", "sdk_url")
116
117 if ini.has_option("rsync", "sdk_password"):
118 rsync_sdk_key = ini.get("rsync", "sdk_password")
119
120 if ini.has_option("rsync", "sdk_pattern"):
121 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
122
123 repo_url = ini.get("repo", "url")
124 repo_branch = "master"
125
126 if ini.has_option("repo", "branch"):
127 repo_branch = ini.get("repo", "branch")
128
129 usign_key = None
130 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
131
132 if ini.has_option("usign", "key"):
133 usign_key = ini.get("usign", "key")
134
135 if ini.has_option("usign", "comment"):
136 usign_comment = ini.get("usign", "comment")
137
138
139 # find arches
140 arches = [ ]
141 archnames = [ ]
142
143 if not os.path.isdir(work_dir+'/source.git'):
144 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
145 else:
146 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
147
148 findarches = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'architectures'],
149 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
150
151 while True:
152 line = findarches.stdout.readline()
153 if not line:
154 break
155 at = line.strip().split()
156 arches.append(at)
157 archnames.append(at[0])
158
159
160 # find feeds
161 feeds = []
162 feedbranches = dict()
163
164 from buildbot.changes.gitpoller import GitPoller
165 c['change_source'] = []
166
167 def parse_feed_entry(line):
168 parts = line.strip().split()
169 if parts[0] == "src-git":
170 feeds.append(parts)
171 url = parts[2].strip().split(';')
172 branch = url[1] if len(url) > 1 else 'master'
173 feedbranches[url[0]] = branch
174 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
175
176 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
177 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
178
179 line = make.stdout.readline()
180 if line:
181 parse_feed_entry(line)
182
183 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
184 for line in f:
185 parse_feed_entry(line)
186
187
188 ####### SCHEDULERS
189
190 # Configure the Schedulers, which decide how to react to incoming changes. In this
191 # case, just kick off a 'basebuild' build
192
193 def branch_change_filter(change):
194 return change.branch == feedbranches[change.repository]
195
196 from buildbot.schedulers.basic import SingleBranchScheduler
197 from buildbot.schedulers.forcesched import ForceScheduler
198 from buildbot.changes import filter
199 c['schedulers'] = []
200 c['schedulers'].append(SingleBranchScheduler(
201 name="all",
202 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
203 treeStableTimer=60,
204 builderNames=archnames))
205
206 c['schedulers'].append(ForceScheduler(
207 name="force",
208 builderNames=archnames))
209
210 ####### BUILDERS
211
212 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
213 # what steps, and which slaves can execute them. Note that any particular build will
214 # only take place on one slave.
215
216 from buildbot.process.factory import BuildFactory
217 from buildbot.steps.source import Git
218 from buildbot.steps.shell import ShellCommand
219 from buildbot.steps.shell import SetProperty
220 from buildbot.steps.transfer import FileUpload
221 from buildbot.steps.transfer import FileDownload
222 from buildbot.steps.transfer import StringDownload
223 from buildbot.steps.master import MasterShellCommand
224 from buildbot.process.properties import WithProperties
225
226
227 def GetDirectorySuffix(props):
228 verpat = re.compile('^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
229 if props.hasProperty("release_version"):
230 m = verpat.match(props["release_version"])
231 if m is not None:
232 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
233 return ""
234
235 def GetNumJobs(props):
236 if props.hasProperty("slavename") and props.hasProperty("nproc"):
237 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
238 else:
239 return 1
240
241 def GetCwd(props):
242 if props.hasProperty("builddir"):
243 return props["builddir"]
244 elif props.hasProperty("workdir"):
245 return props["workdir"]
246 else:
247 return "/"
248
249 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
250 try:
251 seckey = base64.b64decode(seckey)
252 except:
253 return None
254
255 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
256 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
257
258 def IsSharedWorkdir(step):
259 return bool(step.getProperty("shared_wd"))
260
261
262 c['builders'] = []
263
264 dlLock = locks.SlaveLock("slave_dl")
265
266 slaveNames = [ ]
267
268 for slave in c['slaves']:
269 slaveNames.append(slave.slavename)
270
271 for arch in arches:
272 ts = arch[1].split('/')
273
274 factory = BuildFactory()
275
276 # setup shared work directory if required
277 factory.addStep(ShellCommand(
278 name = "sharedwd",
279 description = "Setting up shared work directory",
280 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
281 workdir = ".",
282 haltOnFailure = True,
283 doStepIf = IsSharedWorkdir))
284
285 # find number of cores
286 factory.addStep(SetProperty(
287 name = "nproc",
288 property = "nproc",
289 description = "Finding number of CPUs",
290 command = ["nproc"]))
291
292 # prepare workspace
293 factory.addStep(FileDownload(
294 mastersrc = scripts_dir + '/cleanup.sh',
295 slavedest = "../cleanup.sh",
296 mode = 0755))
297
298 if not persistent:
299 factory.addStep(ShellCommand(
300 name = "cleanold",
301 description = "Cleaning previous builds",
302 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
303 workdir = ".",
304 haltOnFailure = True,
305 timeout = 2400))
306
307 factory.addStep(ShellCommand(
308 name = "cleanup",
309 description = "Cleaning work area",
310 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
311 workdir = ".",
312 haltOnFailure = True,
313 timeout = 2400))
314
315 # expire tree if needed
316 elif tree_expire > 0:
317 factory.addStep(FileDownload(
318 mastersrc = scripts_dir + '/expire.sh',
319 slavedest = "../expire.sh",
320 mode = 0755))
321
322 factory.addStep(ShellCommand(
323 name = "expire",
324 description = "Checking for build tree expiry",
325 command = ["./expire.sh", str(tree_expire)],
326 workdir = ".",
327 haltOnFailure = True,
328 timeout = 2400))
329
330 factory.addStep(ShellCommand(
331 name = "mksdkdir",
332 description = "Preparing SDK directory",
333 command = ["mkdir", "-p", "sdk"],
334 haltOnFailure = True))
335
336 factory.addStep(ShellCommand(
337 name = "downloadsdk",
338 description = "Downloading SDK archive",
339 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
340 env={'RSYNC_PASSWORD': rsync_sdk_key},
341 haltOnFailure = True,
342 logEnviron = False))
343
344 factory.addStep(ShellCommand(
345 name = "unpacksdk",
346 description = "Unpacking SDK archive",
347 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
348 haltOnFailure = True))
349
350 factory.addStep(ShellCommand(
351 name = "updatesdk",
352 description = "Updating SDK",
353 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
354 haltOnFailure = True))
355
356 factory.addStep(ShellCommand(
357 name = "cleancmdlinks",
358 description = "Sanitizing host command symlinks",
359 command = "sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
360 haltOnFailure = True))
361
362 factory.addStep(StringDownload(
363 name = "writeversionmk",
364 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
365 slavedest = "sdk/getversion.mk",
366 mode = 0755))
367
368 factory.addStep(SetProperty(
369 name = "getversion",
370 property = "release_version",
371 description = "Finding SDK release version",
372 workdir = "build/sdk",
373 command = ["make", "-f", "getversion.mk"]))
374
375 # install build key
376 if usign_key is not None:
377 factory.addStep(StringDownload(
378 name = "dlkeybuildpub",
379 s = UsignSec2Pub(usign_key, usign_comment),
380 slavedest = "sdk/key-build.pub",
381 mode = 0600))
382
383 factory.addStep(StringDownload(
384 name = "dlkeybuild",
385 s = "# fake private key",
386 slavedest = "sdk/key-build",
387 mode = 0600))
388
389 factory.addStep(StringDownload(
390 name = "dlkeybuilducert",
391 s = "# fake certificate",
392 slavedest = "sdk/key-build.ucert",
393 mode = 0600))
394
395 factory.addStep(ShellCommand(
396 name = "mkdldir",
397 description = "Preparing download directory",
398 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
399 haltOnFailure = True))
400
401 factory.addStep(ShellCommand(
402 name = "mkconf",
403 description = "Preparing SDK configuration",
404 workdir = "build/sdk",
405 command = ["sh", "-c", "rm -f .config && make defconfig"]))
406
407 factory.addStep(FileDownload(
408 mastersrc = scripts_dir + '/ccache.sh',
409 slavedest = 'sdk/ccache.sh',
410 mode = 0755))
411
412 factory.addStep(ShellCommand(
413 name = "prepccache",
414 description = "Preparing ccache",
415 workdir = "build/sdk",
416 command = ["./ccache.sh"],
417 haltOnFailure = True))
418
419 if git_ssh:
420 factory.addStep(StringDownload(
421 name = "dlgitclonekey",
422 s = git_ssh_key,
423 slavedest = "../git-clone.key",
424 mode = 0600))
425
426 factory.addStep(ShellCommand(
427 name = "patchfeedsconf",
428 description = "Patching feeds.conf",
429 workdir = "build/sdk",
430 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
431 haltOnFailure = True))
432
433 factory.addStep(ShellCommand(
434 name = "updatefeeds",
435 description = "Updating feeds",
436 workdir = "build/sdk",
437 command = ["./scripts/feeds", "update", "-f"],
438 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
439 haltOnFailure = True))
440
441 if git_ssh:
442 factory.addStep(ShellCommand(
443 name = "rmfeedsconf",
444 description = "Removing feeds.conf",
445 workdir = "build/sdk",
446 command=["rm", "feeds.conf"],
447 haltOnFailure = True))
448
449 factory.addStep(ShellCommand(
450 name = "installfeeds",
451 description = "Installing feeds",
452 workdir = "build/sdk",
453 command = ["./scripts/feeds", "install", "-a"],
454 haltOnFailure = True))
455
456 factory.addStep(ShellCommand(
457 name = "logclear",
458 description = "Clearing failure logs",
459 workdir = "build/sdk",
460 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
461 haltOnFailure = False
462 ))
463
464 factory.addStep(ShellCommand(
465 name = "compile",
466 description = "Building packages",
467 workdir = "build/sdk",
468 timeout = 3600,
469 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
470 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
471 haltOnFailure = True))
472
473 factory.addStep(ShellCommand(
474 name = "mkfeedsconf",
475 description = "Generating pinned feeds.conf",
476 workdir = "build/sdk",
477 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
478
479 if ini.has_option("gpg", "key") or usign_key is not None:
480 factory.addStep(MasterShellCommand(
481 name = "signprepare",
482 description = "Preparing temporary signing directory",
483 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
484 haltOnFailure = True
485 ))
486
487 factory.addStep(ShellCommand(
488 name = "signpack",
489 description = "Packing files to sign",
490 workdir = "build/sdk",
491 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
492 haltOnFailure = True
493 ))
494
495 factory.addStep(FileUpload(
496 slavesrc = "sdk/sign.tar.gz",
497 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
498 haltOnFailure = True
499 ))
500
501 factory.addStep(MasterShellCommand(
502 name = "signfiles",
503 description = "Signing files",
504 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
505 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
506 haltOnFailure = True
507 ))
508
509 factory.addStep(FileDownload(
510 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
511 slavedest = "sdk/sign.tar.gz",
512 haltOnFailure = True
513 ))
514
515 factory.addStep(ShellCommand(
516 name = "signunpack",
517 description = "Unpacking signed files",
518 workdir = "build/sdk",
519 command = ["tar", "-xzf", "sign.tar.gz"],
520 haltOnFailure = True
521 ))
522
523 factory.addStep(ShellCommand(
524 name = "uploadprepare",
525 description = "Preparing package directory",
526 workdir = "build/sdk",
527 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
528 env={'RSYNC_PASSWORD': rsync_bin_key},
529 haltOnFailure = True,
530 logEnviron = False
531 ))
532
533 factory.addStep(ShellCommand(
534 name = "packageupload",
535 description = "Uploading package files",
536 workdir = "build/sdk",
537 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
538 env={'RSYNC_PASSWORD': rsync_bin_key},
539 haltOnFailure = True,
540 logEnviron = False
541 ))
542
543 factory.addStep(ShellCommand(
544 name = "logprepare",
545 description = "Preparing log directory",
546 workdir = "build/sdk",
547 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
548 env={'RSYNC_PASSWORD': rsync_bin_key},
549 haltOnFailure = True,
550 logEnviron = False
551 ))
552
553 factory.addStep(ShellCommand(
554 name = "logfind",
555 description = "Finding failure logs",
556 workdir = "build/sdk/logs/package/feeds",
557 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
558 haltOnFailure = False
559 ))
560
561 factory.addStep(ShellCommand(
562 name = "logcollect",
563 description = "Collecting failure logs",
564 workdir = "build/sdk",
565 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
566 haltOnFailure = False
567 ))
568
569 factory.addStep(ShellCommand(
570 name = "logupload",
571 description = "Uploading failure logs",
572 workdir = "build/sdk",
573 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
574 env={'RSYNC_PASSWORD': rsync_bin_key},
575 haltOnFailure = False,
576 logEnviron = False
577 ))
578
579 if rsync_src_url is not None:
580 factory.addStep(ShellCommand(
581 name = "sourcelist",
582 description = "Finding source archives to upload",
583 workdir = "build/sdk",
584 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
585 haltOnFailure = True
586 ))
587
588 factory.addStep(ShellCommand(
589 name = "sourceupload",
590 description = "Uploading source archives",
591 workdir = "build/sdk",
592 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
593 WithProperties("--partial-dir=.~tmp~%s~%%(slavename)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
594 env={'RSYNC_PASSWORD': rsync_src_key},
595 haltOnFailure = False,
596 logEnviron = False
597 ))
598
599 factory.addStep(ShellCommand(
600 name = "df",
601 description = "Reporting disk usage",
602 command=["df", "-h", "."],
603 env={'LC_ALL': 'C'},
604 haltOnFailure = False,
605 alwaysRun = True
606 ))
607
608 from buildbot.config import BuilderConfig
609
610 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
611
612
613 ####### STATUS arches
614
615 # 'status' is a list of Status arches. The results of each build will be
616 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
617 # including web pages, email senders, and IRC bots.
618
619 c['status'] = []
620
621 from buildbot.status import html
622 from buildbot.status.web import authz, auth
623
624 if ini.has_option("phase2", "status_bind"):
625 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
626 authz_cfg=authz.Authz(
627 # change any of these to True to enable; see the manual for more
628 # options
629 auth=auth.BasicAuth([(ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))]),
630 gracefulShutdown = 'auth',
631 forceBuild = 'auth', # use this to test your slave once it is set up
632 forceAllBuilds = 'auth',
633 pingBuilder = False,
634 stopBuild = 'auth',
635 stopAllBuilds = 'auth',
636 cancelPendingBuild = 'auth',
637 )
638 c['status'].append(html.WebStatus(http_port=ini.get("phase2", "status_bind"), authz=authz_cfg))
639 else:
640 c['status'].append(html.WebStatus(http_port=ini.get("phase2", "status_bind")))
641
642 ####### PROJECT IDENTITY
643
644 # the 'title' string will appear at the top of this buildbot
645 # installation's html.WebStatus home page (linked to the
646 # 'titleURL') and is embedded in the title of the waterfall HTML page.
647
648 c['title'] = ini.get("general", "title")
649 c['titleURL'] = ini.get("general", "title_url")
650
651 # the 'buildbotURL' string should point to the location where the buildbot's
652 # internal web server (usually the html.WebStatus page) is visible. This
653 # typically uses the port number set in the Waterfall 'status' entry, but
654 # with an externally-visible host name which the buildbot cannot figure out
655 # without some help.
656
657 c['buildbotURL'] = buildbot_url
658
659 ####### DB URL
660
661 c['db'] = {
662 # This specifies what database buildbot uses to store its state. You can leave
663 # this at its default for all but the largest installations.
664 'db_url' : "sqlite:///state.sqlite",
665 }