buildmaster: fix Twisted dependency hell by using twisted==22.10.0
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
39
40
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
44
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47
48 buildbot_url = ini.get("phase2", "buildbot_url")
49
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
52
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
56
57 ####### BUILDWORKERS
58
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
62
63 worker_port = 9990
64 persistent = False
65
66 if ini.has_option("phase2", "port"):
67 worker_port = ini.get("phase2", "port")
68
69 if ini.has_option("phase2", "persistent"):
70 persistent = ini.getboolean("phase2", "persistent")
71
72 c['workers'] = []
73 max_builds = dict()
74
75 for section in ini.sections():
76 if section.startswith("worker "):
77 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
78 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
79 name = ini.get(section, "name")
80 password = ini.get(section, "password")
81 sl_props = { 'shared_wd': False }
82 max_builds[name] = 1
83
84 if ini.has_option(section, "builds"):
85 max_builds[name] = ini.getint(section, "builds")
86
87 if max_builds[name] == 1:
88 sl_props['shared_wd'] = True
89
90 if ini.has_option(section, "shared_wd"):
91 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
92 if sl_props['shared_wd'] and (max_builds != 1):
93 raise ValueError('max_builds must be 1 with shared workdir!')
94
95 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
96
97 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
98 # This must match the value configured into the buildworkers (with their
99 # --master option)
100 c['protocols'] = {'pb': {'port': worker_port}}
101
102 # coalesce builds
103 c['collapseRequests'] = True
104
105 # Reduce amount of backlog data
106 c['configurators'] = [util.JanitorConfigurator(
107 logHorizon=timedelta(days=3),
108 hour=6,
109 )]
110
111 ####### CHANGESOURCES
112
113 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
114 scripts_dir = os.path.abspath("../scripts")
115
116 rsync_bin_url = ini.get("rsync", "binary_url")
117 rsync_bin_key = ini.get("rsync", "binary_password")
118
119 rsync_src_url = None
120 rsync_src_key = None
121
122 if ini.has_option("rsync", "source_url"):
123 rsync_src_url = ini.get("rsync", "source_url")
124 rsync_src_key = ini.get("rsync", "source_password")
125
126 rsync_sdk_url = None
127 rsync_sdk_key = None
128 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
129
130 if ini.has_option("rsync", "sdk_url"):
131 rsync_sdk_url = ini.get("rsync", "sdk_url")
132
133 if ini.has_option("rsync", "sdk_password"):
134 rsync_sdk_key = ini.get("rsync", "sdk_password")
135
136 if ini.has_option("rsync", "sdk_pattern"):
137 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
138
139 rsync_defopts = ["-4", "-v", "--timeout=120"]
140
141 repo_url = ini.get("repo", "url")
142 repo_branch = "master"
143
144 if ini.has_option("repo", "branch"):
145 repo_branch = ini.get("repo", "branch")
146
147 usign_key = None
148 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
149
150 if ini.has_option("usign", "key"):
151 usign_key = ini.get("usign", "key")
152
153 if ini.has_option("usign", "comment"):
154 usign_comment = ini.get("usign", "comment")
155
156
157 # find arches
158 arches = [ ]
159 archnames = [ ]
160
161 if not os.path.isdir(work_dir+'/source.git'):
162 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
163 else:
164 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
165
166 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
167 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
168 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
169
170 while True:
171 line = findarches.stdout.readline()
172 if not line:
173 break
174 at = line.decode().strip().split()
175 arches.append(at)
176 archnames.append(at[0])
177
178
179 # find feeds
180 feeds = []
181 feedbranches = dict()
182
183 c['change_source'] = []
184
185 def parse_feed_entry(line):
186 parts = line.strip().split()
187 if parts[0].startswith("src-git"):
188 feeds.append(parts)
189 url = parts[2].strip().split(';')
190 branch = url[1] if len(url) > 1 else 'master'
191 feedbranches[url[0]] = branch
192 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
193
194 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
195 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
196
197 line = make.stdout.readline()
198 if line:
199 parse_feed_entry(str(line, 'utf-8'))
200
201 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
202 for line in f:
203 parse_feed_entry(line)
204
205 if len(c['change_source']) == 0:
206 log.err("FATAL ERROR: no change_sources defined, aborting!")
207 sys.exit(-1)
208
209 ####### SCHEDULERS
210
211 # Configure the Schedulers, which decide how to react to incoming changes. In this
212 # case, just kick off a 'basebuild' build
213
214 c['schedulers'] = []
215 c['schedulers'].append(SingleBranchScheduler(
216 name = "all",
217 change_filter = filter.ChangeFilter(
218 filter_fn = lambda change: change.branch == feedbranches[change.repository]
219 ),
220 treeStableTimer = 60,
221 builderNames = archnames))
222
223 c['schedulers'].append(ForceScheduler(
224 name = "force",
225 buttonName = "Force builds",
226 label = "Force build details",
227 builderNames = [ "00_force_build" ],
228
229 codebases = [
230 util.CodebaseParameter(
231 "",
232 label = "Repository",
233 branch = util.FixedParameter(name = "branch", default = ""),
234 revision = util.FixedParameter(name = "revision", default = ""),
235 repository = util.FixedParameter(name = "repository", default = ""),
236 project = util.FixedParameter(name = "project", default = "")
237 )
238 ],
239
240 reason = util.StringParameter(
241 name = "reason",
242 label = "Reason",
243 default = "Trigger build",
244 required = True,
245 size = 80
246 ),
247
248 properties = [
249 util.NestedParameter(
250 name="options",
251 label="Build Options",
252 layout="vertical",
253 fields=[
254 util.ChoiceStringParameter(
255 name = "architecture",
256 label = "Build architecture",
257 default = "all",
258 choices = [ "all" ] + archnames
259 )
260 ]
261 )
262 ]
263 ))
264
265 ####### BUILDERS
266
267 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
268 # what steps, and which workers can execute them. Note that any particular build will
269 # only take place on one worker.
270
271 @properties.renderer
272 def GetDirectorySuffix(props):
273 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
274 if props.hasProperty("release_version"):
275 m = verpat.match(props["release_version"])
276 if m is not None:
277 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
278 return ""
279
280 @properties.renderer
281 def GetNumJobs(props):
282 if props.hasProperty("workername") and props.hasProperty("nproc"):
283 return str(int(props["nproc"]) / max_builds[props["workername"]])
284 else:
285 return "1"
286
287 @properties.renderer
288 def GetCwd(props):
289 if props.hasProperty("builddir"):
290 return props["builddir"]
291 elif props.hasProperty("workdir"):
292 return props["workdir"]
293 else:
294 return "/"
295
296 def IsArchitectureSelected(target):
297 def CheckArchitectureProperty(step):
298 try:
299 options = step.getProperty("options")
300 if type(options) is dict:
301 selected_arch = options.get("architecture", "all")
302 if selected_arch != "all" and selected_arch != target:
303 return False
304 except KeyError:
305 pass
306
307 return True
308
309 return CheckArchitectureProperty
310
311 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
312 try:
313 seckey = base64.b64decode(seckey)
314 except Exception:
315 return None
316
317 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
318 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
319
320 def IsSharedWorkdir(step):
321 return bool(step.getProperty("shared_wd"))
322
323 @defer.inlineCallbacks
324 def getNewestCompleteTime(bldr):
325 """Returns the complete_at of the latest completed and not SKIPPED
326 build request for this builder, or None if there are no such build
327 requests. We need to filter out SKIPPED requests because we're
328 using collapseRequests=True which is unfortunately marking all
329 previous requests as complete when new buildset is created.
330
331 @returns: datetime instance or None, via Deferred
332 """
333
334 bldrid = yield bldr.getBuilderId()
335 completed = yield bldr.master.data.get(
336 ('builders', bldrid, 'buildrequests'),
337 [
338 resultspec.Filter('complete', 'eq', [True]),
339 resultspec.Filter('results', 'ne', [results.SKIPPED]),
340 ],
341 order=['-complete_at'], limit=1)
342 if not completed:
343 return
344
345 complete_at = completed[0]['complete_at']
346
347 last_build = yield bldr.master.data.get(
348 ('builds', ),
349 [
350 resultspec.Filter('builderid', 'eq', [bldrid]),
351 ],
352 order=['-started_at'], limit=1)
353
354 if last_build and last_build[0]:
355 last_complete_at = last_build[0]['complete_at']
356 if last_complete_at and (last_complete_at > complete_at):
357 return last_complete_at
358
359 return complete_at
360
361 @defer.inlineCallbacks
362 def prioritizeBuilders(master, builders):
363 """Returns sorted list of builders by their last timestamp of completed and
364 not skipped build.
365
366 @returns: list of sorted builders
367 """
368
369 def is_building(bldr):
370 return bool(bldr.building) or bool(bldr.old_building)
371
372 def bldr_info(bldr):
373 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
374 d.addCallback(lambda complete_at: (complete_at, bldr))
375 return d
376
377 def bldr_sort(item):
378 (complete_at, bldr) = item
379
380 if not complete_at:
381 date = datetime.min
382 complete_at = date.replace(tzinfo=tzutc())
383
384 if is_building(bldr):
385 date = datetime.max
386 complete_at = date.replace(tzinfo=tzutc())
387
388 return (complete_at, bldr.name)
389
390 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
391 results.sort(key=bldr_sort)
392
393 for r in results:
394 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
395
396 return [r[1] for r in results]
397
398 c['prioritizeBuilders'] = prioritizeBuilders
399 c['builders'] = []
400
401 dlLock = locks.WorkerLock("worker_dl")
402
403 workerNames = [ ]
404
405 for worker in c['workers']:
406 workerNames.append(worker.workername)
407
408 force_factory = BuildFactory()
409
410 c['builders'].append(BuilderConfig(
411 name = "00_force_build",
412 workernames = workerNames,
413 factory = force_factory))
414
415 for arch in arches:
416 ts = arch[1].split('/')
417
418 factory = BuildFactory()
419
420 # setup shared work directory if required
421 factory.addStep(ShellCommand(
422 name = "sharedwd",
423 description = "Setting up shared work directory",
424 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
425 workdir = ".",
426 haltOnFailure = True,
427 doStepIf = IsSharedWorkdir))
428
429 # find number of cores
430 factory.addStep(SetPropertyFromCommand(
431 name = "nproc",
432 property = "nproc",
433 description = "Finding number of CPUs",
434 command = ["nproc"]))
435
436 # prepare workspace
437 factory.addStep(FileDownload(
438 mastersrc = scripts_dir + '/cleanup.sh',
439 workerdest = "../cleanup.sh",
440 mode = 0o755))
441
442 if not persistent:
443 factory.addStep(ShellCommand(
444 name = "cleanold",
445 description = "Cleaning previous builds",
446 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
447 workdir = ".",
448 haltOnFailure = True,
449 timeout = 2400))
450
451 factory.addStep(ShellCommand(
452 name = "cleanup",
453 description = "Cleaning work area",
454 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
455 workdir = ".",
456 haltOnFailure = True,
457 timeout = 2400))
458
459 factory.addStep(ShellCommand(
460 name = "mksdkdir",
461 description = "Preparing SDK directory",
462 command = ["mkdir", "-p", "sdk"],
463 haltOnFailure = True))
464
465 factory.addStep(ShellCommand(
466 name = "downloadsdk",
467 description = "Downloading SDK archive",
468 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
469 env={'RSYNC_PASSWORD': rsync_sdk_key},
470 haltOnFailure = True,
471 logEnviron = False))
472
473 factory.addStep(ShellCommand(
474 name = "unpacksdk",
475 description = "Unpacking SDK archive",
476 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
477 haltOnFailure = True))
478
479 factory.addStep(ShellCommand(
480 name = "updatesdk",
481 description = "Updating SDK",
482 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
483 haltOnFailure = True))
484
485 factory.addStep(ShellCommand(
486 name = "cleancmdlinks",
487 description = "Sanitizing host command symlinks",
488 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
489 haltOnFailure = True))
490
491 factory.addStep(StringDownload(
492 name = "writeversionmk",
493 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
494 workerdest = "sdk/getversion.mk",
495 mode = 0o755))
496
497 factory.addStep(SetPropertyFromCommand(
498 name = "getversion",
499 property = "release_version",
500 description = "Finding SDK release version",
501 workdir = "build/sdk",
502 command = ["make", "-f", "getversion.mk"]))
503
504 # install build key
505 if usign_key is not None:
506 factory.addStep(StringDownload(
507 name = "dlkeybuildpub",
508 s = UsignSec2Pub(usign_key, usign_comment),
509 workerdest = "sdk/key-build.pub",
510 mode = 0o600))
511
512 factory.addStep(StringDownload(
513 name = "dlkeybuild",
514 s = "# fake private key",
515 workerdest = "sdk/key-build",
516 mode = 0o600))
517
518 factory.addStep(StringDownload(
519 name = "dlkeybuilducert",
520 s = "# fake certificate",
521 workerdest = "sdk/key-build.ucert",
522 mode = 0o600))
523
524 factory.addStep(ShellCommand(
525 name = "mkdldir",
526 description = "Preparing download directory",
527 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
528 haltOnFailure = True))
529
530 factory.addStep(ShellCommand(
531 name = "mkconf",
532 description = "Preparing SDK configuration",
533 workdir = "build/sdk",
534 command = ["sh", "-c", "rm -f .config && make defconfig"]))
535
536 factory.addStep(FileDownload(
537 mastersrc = scripts_dir + '/ccache.sh',
538 workerdest = 'sdk/ccache.sh',
539 mode = 0o755))
540
541 factory.addStep(ShellCommand(
542 name = "prepccache",
543 description = "Preparing ccache",
544 workdir = "build/sdk",
545 command = ["./ccache.sh"],
546 haltOnFailure = True))
547
548 factory.addStep(ShellCommand(
549 name = "updatefeeds",
550 description = "Updating feeds",
551 workdir = "build/sdk",
552 command = ["./scripts/feeds", "update", "-f"],
553 haltOnFailure = True))
554
555 factory.addStep(ShellCommand(
556 name = "installfeeds",
557 description = "Installing feeds",
558 workdir = "build/sdk",
559 command = ["./scripts/feeds", "install", "-a"],
560 haltOnFailure = True))
561
562 factory.addStep(ShellCommand(
563 name = "logclear",
564 description = "Clearing failure logs",
565 workdir = "build/sdk",
566 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
567 haltOnFailure = False,
568 flunkOnFailure = False,
569 warnOnFailure = True,
570 ))
571
572 factory.addStep(ShellCommand(
573 name = "compile",
574 description = "Building packages",
575 workdir = "build/sdk",
576 timeout = 3600,
577 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
578 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
579 haltOnFailure = True))
580
581 factory.addStep(ShellCommand(
582 name = "mkfeedsconf",
583 description = "Generating pinned feeds.conf",
584 workdir = "build/sdk",
585 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
586
587 factory.addStep(ShellCommand(
588 name = "checksums",
589 description = "Calculating checksums",
590 descriptionDone="Checksums calculated",
591 workdir = "build/sdk",
592 command = "cd bin/packages/%s; " %(arch[0]) + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums)",
593 haltOnFailure = True
594 ))
595
596 if ini.has_option("gpg", "key") or usign_key is not None:
597 factory.addStep(MasterShellCommand(
598 name = "signprepare",
599 description = "Preparing temporary signing directory",
600 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
601 haltOnFailure = True
602 ))
603
604 factory.addStep(ShellCommand(
605 name = "signpack",
606 description = "Packing files to sign",
607 workdir = "build/sdk",
608 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
609 haltOnFailure = True
610 ))
611
612 factory.addStep(FileUpload(
613 workersrc = "sdk/sign.tar.gz",
614 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
615 haltOnFailure = True
616 ))
617
618 factory.addStep(MasterShellCommand(
619 name = "signfiles",
620 description = "Signing files",
621 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
622 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
623 haltOnFailure = True
624 ))
625
626 factory.addStep(FileDownload(
627 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
628 workerdest = "sdk/sign.tar.gz",
629 haltOnFailure = True
630 ))
631
632 factory.addStep(ShellCommand(
633 name = "signunpack",
634 description = "Unpacking signed files",
635 workdir = "build/sdk",
636 command = ["tar", "-xzf", "sign.tar.gz"],
637 haltOnFailure = True
638 ))
639
640 # download remote sha256sums to 'target-sha256sums'
641 factory.addStep(ShellCommand(
642 name = "target-sha256sums",
643 description = "Fetching remote sha256sums for arch",
644 command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
645 env={'RSYNC_PASSWORD': rsync_bin_key},
646 logEnviron = False,
647 haltOnFailure = False,
648 flunkOnFailure = False,
649 warnOnFailure = False,
650 ))
651
652 factory.addStep(FileDownload(
653 name="dlrsync.sh",
654 mastersrc = scripts_dir + "/rsync.sh",
655 workerdest = "../rsync.sh",
656 mode = 0o755
657 ))
658
659 factory.addStep(FileDownload(
660 name = "dlsha2rsyncpl",
661 mastersrc = "sha2rsync.pl",
662 workerdest = "../sha2rsync.pl",
663 mode = 0o755,
664 ))
665
666 factory.addStep(ShellCommand(
667 name = "buildlist",
668 description = "Building list of files to upload",
669 workdir = "build/sdk",
670 command = ["../../../sha2rsync.pl", "../../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
671 haltOnFailure = True,
672 ))
673
674 factory.addStep(ShellCommand(
675 name = "uploadprepare",
676 description = "Preparing package directory",
677 workdir = "build/sdk",
678 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
679 env={'RSYNC_PASSWORD': rsync_bin_key},
680 haltOnFailure = True,
681 logEnviron = False
682 ))
683
684 factory.addStep(ShellCommand(
685 name = "packageupload",
686 description = "Uploading package files",
687 workdir = "build/sdk",
688 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
689 env={'RSYNC_PASSWORD': rsync_bin_key},
690 haltOnFailure = True,
691 logEnviron = False
692 ))
693
694 factory.addStep(ShellCommand(
695 name = "packageprune",
696 description = "Pruning package files",
697 workdir = "build/sdk",
698 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
699 env={'RSYNC_PASSWORD': rsync_bin_key},
700 haltOnFailure = True,
701 logEnviron = False
702 ))
703
704 factory.addStep(ShellCommand(
705 name = "logprepare",
706 description = "Preparing log directory",
707 workdir = "build/sdk",
708 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
709 env={'RSYNC_PASSWORD': rsync_bin_key},
710 haltOnFailure = True,
711 logEnviron = False
712 ))
713
714 factory.addStep(ShellCommand(
715 name = "logfind",
716 description = "Finding failure logs",
717 workdir = "build/sdk/logs/package/feeds",
718 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
719 haltOnFailure = False,
720 flunkOnFailure = False,
721 warnOnFailure = True,
722 ))
723
724 factory.addStep(ShellCommand(
725 name = "logcollect",
726 description = "Collecting failure logs",
727 workdir = "build/sdk",
728 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
729 haltOnFailure = False,
730 flunkOnFailure = False,
731 warnOnFailure = True,
732 ))
733
734 factory.addStep(ShellCommand(
735 name = "logupload",
736 description = "Uploading failure logs",
737 workdir = "build/sdk",
738 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
739 env={'RSYNC_PASSWORD': rsync_bin_key},
740 haltOnFailure = False,
741 flunkOnFailure = False,
742 warnOnFailure = True,
743 logEnviron = False
744 ))
745
746 if rsync_src_url is not None:
747 factory.addStep(ShellCommand(
748 name = "sourcelist",
749 description = "Finding source archives to upload",
750 workdir = "build/sdk",
751 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
752 haltOnFailure = True
753 ))
754
755 factory.addStep(ShellCommand(
756 name = "sourceupload",
757 description = "Uploading source archives",
758 workdir = "build/sdk",
759 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
760 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
761 env={'RSYNC_PASSWORD': rsync_src_key},
762 haltOnFailure = False,
763 flunkOnFailure = False,
764 warnOnFailure = True,
765 logEnviron = False
766 ))
767
768 factory.addStep(ShellCommand(
769 name = "df",
770 description = "Reporting disk usage",
771 command=["df", "-h", "."],
772 env={'LC_ALL': 'C'},
773 haltOnFailure = False,
774 flunkOnFailure = False,
775 warnOnFailure = False,
776 alwaysRun = True
777 ))
778
779 factory.addStep(ShellCommand(
780 name = "du",
781 description = "Reporting estimated file space usage",
782 command=["du", "-sh", "."],
783 env={'LC_ALL': 'C'},
784 haltOnFailure = False,
785 flunkOnFailure = False,
786 warnOnFailure = False,
787 alwaysRun = True
788 ))
789
790 factory.addStep(ShellCommand(
791 name = "ccachestat",
792 description = "Reporting ccache stats",
793 command=["ccache", "-s"],
794 want_stderr = False,
795 haltOnFailure = False,
796 flunkOnFailure = False,
797 warnOnFailure = False,
798 alwaysRun = True,
799 ))
800
801 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
802
803 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
804 force_factory.addStep(steps.Trigger(
805 name = "trigger_%s" % arch[0],
806 description = "Triggering %s build" % arch[0],
807 schedulerNames = [ "trigger_%s" % arch[0] ],
808 set_properties = { "reason": Property("reason") },
809 doStepIf = IsArchitectureSelected(arch[0])
810 ))
811
812 ####### STATUS arches
813
814 # 'status' is a list of Status arches. The results of each build will be
815 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
816 # including web pages, email senders, and IRC bots.
817
818 if ini.has_option("phase2", "status_bind"):
819 c['www'] = {
820 'port': ini.get("phase2", "status_bind"),
821 'plugins': {
822 'waterfall_view': True,
823 'console_view': True,
824 'grid_view': True
825 }
826 }
827
828 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
829 c['www']['auth'] = util.UserPasswordAuth([
830 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
831 ])
832 c['www']['authz'] = util.Authz(
833 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
834 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
835 )
836
837 ####### PROJECT IDENTITY
838
839 # the 'title' string will appear at the top of this buildbot
840 # installation's html.WebStatus home page (linked to the
841 # 'titleURL') and is embedded in the title of the waterfall HTML page.
842
843 c['title'] = ini.get("general", "title")
844 c['titleURL'] = ini.get("general", "title_url")
845
846 # the 'buildbotURL' string should point to the location where the buildbot's
847 # internal web server (usually the html.WebStatus page) is visible. This
848 # typically uses the port number set in the Waterfall 'status' entry, but
849 # with an externally-visible host name which the buildbot cannot figure out
850 # without some help.
851
852 c['buildbotURL'] = buildbot_url
853
854 ####### DB URL
855
856 c['db'] = {
857 # This specifies what database buildbot uses to store its state. You can leave
858 # this at its default for all but the largest installations.
859 'db_url' : "sqlite:///state.sqlite",
860 }
861
862 c['buildbotNetUsageData'] = None