phase1: remove unused 'cc_version'
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
57
58 ####### PROJECT IDENTITY
59
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
63
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
66
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
71 # without some help.
72
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
74
75 ####### BUILDWORKERS
76
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
80
81 worker_port = 9989
82
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
123 # --master option)
124 c['protocols'] = {'pb': {'port': worker_port}}
125
126 # coalesce builds
127 c['collapseRequests'] = True
128
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
132 hour=6,
133 )]
134
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
142
143 @returns: datetime instance or None, via Deferred
144 """
145
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
149 [
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 ],
153 order=['-complete_at'], limit=1)
154 if not completed:
155 return
156
157 complete_at = completed[0]['complete_at']
158
159 last_build = yield bldr.master.data.get(
160 ('builds', ),
161 [
162 resultspec.Filter('builderid', 'eq', [bldrid]),
163 ],
164 order=['-started_at'], limit=1)
165
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
170
171 return complete_at
172
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
176 not skipped build.
177
178 @returns: list of sorted builders
179 """
180
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
183
184 def bldr_info(bldr):
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
187 return d
188
189 def bldr_sort(item):
190 (complete_at, bldr) = item
191
192 if not complete_at:
193 date = datetime.min
194 complete_at = date.replace(tzinfo=tzutc())
195
196 if is_building(bldr):
197 date = datetime.max
198 complete_at = date.replace(tzinfo=tzutc())
199
200 return (complete_at, bldr.name)
201
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
204
205 for r in results:
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207
208 return [r[1] for r in results]
209
210 c['prioritizeBuilders'] = prioritizeBuilders
211
212 ####### CHANGESOURCES
213
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = 0
217
218 cc_command = "gcc"
219 cxx_command = "g++"
220
221 config_seed = ""
222
223 git_ssh = False
224 git_ssh_key = None
225
226 if ini.has_option("phase1", "expire"):
227 tree_expire = ini.getint("phase1", "expire")
228
229 if ini.has_option("general", "git_ssh"):
230 git_ssh = ini.getboolean("general", "git_ssh")
231
232 if ini.has_option("general", "git_ssh_key"):
233 git_ssh_key = ini.get("general", "git_ssh_key")
234 else:
235 git_ssh = False
236
237 if ini.has_option("phase1", "config_seed"):
238 config_seed = ini.get("phase1", "config_seed")
239
240 repo_url = ini.get("repo", "url")
241 repo_branch = "master"
242
243 if ini.has_option("repo", "branch"):
244 repo_branch = ini.get("repo", "branch")
245
246 rsync_bin_url = ini.get("rsync", "binary_url")
247 rsync_bin_key = ini.get("rsync", "binary_password")
248 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
249
250 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
251 rsync_bin_defopts += ["--contimeout=20"]
252
253 rsync_src_url = None
254 rsync_src_key = None
255 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
256
257 if ini.has_option("rsync", "source_url"):
258 rsync_src_url = ini.get("rsync", "source_url")
259 rsync_src_key = ini.get("rsync", "source_password")
260
261 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
262 rsync_src_defopts += ["--contimeout=20"]
263
264 usign_key = None
265 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
266
267 if ini.has_option("usign", "key"):
268 usign_key = ini.get("usign", "key")
269
270 if ini.has_option("usign", "comment"):
271 usign_comment = ini.get("usign", "comment")
272
273 enable_kmod_archive = False
274 embed_kmod_repository = False
275
276 if ini.has_option("phase1", "kmod_archive"):
277 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
278
279 if ini.has_option("phase1", "kmod_repository"):
280 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
281
282
283 # find targets
284 targets = [ ]
285
286 if not os.path.isdir(work_dir+'/source.git'):
287 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
288 else:
289 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
290
291 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
292 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
293 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
294
295 while True:
296 line = findtargets.stdout.readline()
297 if not line:
298 break
299 ta = line.decode().strip().split(' ')
300 targets.append(ta[0])
301
302
303 # the 'change_source' setting tells the buildmaster how it should find out
304 # about source code changes. Here we point to the buildbot clone of pyflakes.
305
306 c['change_source'] = []
307 c['change_source'].append(GitPoller(
308 repo_url,
309 workdir=work_dir+'/work.git', branch=repo_branch,
310 pollinterval=300))
311
312 ####### SCHEDULERS
313
314 # Configure the Schedulers, which decide how to react to incoming changes. In this
315 # case, just kick off a 'basebuild' build
316
317 class TagChoiceParameter(BaseParameter):
318 spec_attributes = ["strict", "choices"]
319 type = "list"
320 strict = True
321
322 def __init__(self, name, label=None, **kw):
323 super().__init__(name, label, **kw)
324 self._choice_list = []
325
326 @property
327 def choices(self):
328 taglist = []
329 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
330
331 if basever:
332 findtags = subprocess.Popen(
333 ['git', 'ls-remote', '--tags', repo_url],
334 stdout = subprocess.PIPE)
335
336 while True:
337 line = findtags.stdout.readline()
338
339 if not line:
340 break
341
342 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
343
344 if tagver and tagver[1].find(basever[1]) == 0:
345 taglist.append(tagver[1])
346
347 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
348 taglist.insert(0, '')
349
350 self._choice_list = taglist
351
352 return self._choice_list
353
354 def parse_from_arg(self, s):
355 if self.strict and s not in self._choice_list:
356 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
357 return s
358
359 c['schedulers'] = []
360 c['schedulers'].append(SingleBranchScheduler(
361 name = "all",
362 change_filter = filter.ChangeFilter(branch=repo_branch),
363 treeStableTimer = 60,
364 builderNames = targets))
365
366 c['schedulers'].append(ForceScheduler(
367 name = "force",
368 buttonName = "Force builds",
369 label = "Force build details",
370 builderNames = [ "00_force_build" ],
371
372 codebases = [
373 util.CodebaseParameter(
374 "",
375 label = "Repository",
376 branch = util.FixedParameter(name = "branch", default = ""),
377 revision = util.FixedParameter(name = "revision", default = ""),
378 repository = util.FixedParameter(name = "repository", default = ""),
379 project = util.FixedParameter(name = "project", default = "")
380 )
381 ],
382
383 reason = util.StringParameter(
384 name = "reason",
385 label = "Reason",
386 default = "Trigger build",
387 required = True,
388 size = 80
389 ),
390
391 properties = [
392 util.NestedParameter(
393 name="options",
394 label="Build Options",
395 layout="vertical",
396 fields=[
397 util.ChoiceStringParameter(
398 name = "target",
399 label = "Build target",
400 default = "all",
401 choices = [ "all" ] + targets
402 ),
403 TagChoiceParameter(
404 name = "tag",
405 label = "Build tag",
406 default = ""
407 )
408 ]
409 )
410 ]
411 ))
412
413 ####### BUILDERS
414
415 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
416 # what steps, and which workers can execute them. Note that any particular build will
417 # only take place on one worker.
418
419 CleanTargetMap = [
420 [ "tools", "tools/clean" ],
421 [ "chain", "toolchain/clean" ],
422 [ "linux", "target/linux/clean" ],
423 [ "dir", "dirclean" ],
424 [ "dist", "distclean" ]
425 ]
426
427 def IsMakeCleanRequested(pattern):
428 def CheckCleanProperty(step):
429 val = step.getProperty("clean")
430 if val and re.match(pattern, val):
431 return True
432 else:
433 return False
434
435 return CheckCleanProperty
436
437 def IsSharedWorkdir(step):
438 return bool(step.getProperty("shared_wd"))
439
440 def IsCleanupRequested(step):
441 if IsSharedWorkdir(step):
442 return False
443 do_cleanup = step.getProperty("do_cleanup")
444 if do_cleanup:
445 return True
446 else:
447 return False
448
449 def IsExpireRequested(step):
450 if IsSharedWorkdir(step):
451 return False
452 else:
453 return not IsCleanupRequested(step)
454
455 def IsGitFreshRequested(step):
456 do_cleanup = step.getProperty("do_cleanup")
457 if do_cleanup:
458 return True
459 else:
460 return False
461
462 def IsGitCleanRequested(step):
463 return not IsGitFreshRequested(step)
464
465 def IsTaggingRequested(step):
466 val = step.getProperty("tag")
467 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
468 return True
469 else:
470 return False
471
472 def IsNoTaggingRequested(step):
473 return not IsTaggingRequested(step)
474
475 def IsNoMasterBuild(step):
476 return repo_branch != "master"
477
478 def GetBaseVersion():
479 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
480 return repo_branch.split('-')[1]
481 else:
482 return "master"
483
484 @properties.renderer
485 def GetVersionPrefix(props):
486 basever = GetBaseVersion()
487 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
488 return "%s/" % props["tag"]
489 elif basever != "master":
490 return "%s-SNAPSHOT/" % basever
491 else:
492 return ""
493
494 @properties.renderer
495 def GetNumJobs(props):
496 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
497 return str(int(int(props["nproc"]) / props["max_builds"]))
498 else:
499 return "1"
500
501 @properties.renderer
502 def GetCC(props):
503 if props.hasProperty("cc_command"):
504 return props["cc_command"]
505 else:
506 return "gcc"
507
508 @properties.renderer
509 def GetCXX(props):
510 if props.hasProperty("cxx_command"):
511 return props["cxx_command"]
512 else:
513 return "g++"
514
515 @properties.renderer
516 def GetCwd(props):
517 if props.hasProperty("builddir"):
518 return props["builddir"]
519 elif props.hasProperty("workdir"):
520 return props["workdir"]
521 else:
522 return "/"
523
524 @properties.renderer
525 def GetCCache(props):
526 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
527 return props["ccache_command"]
528 else:
529 return ""
530
531 def GetNextBuild(builder, requests):
532 for r in requests:
533 if r.properties and r.properties.hasProperty("tag"):
534 return r
535
536 r = requests[0]
537 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
538 return r
539
540 def MakeEnv(overrides=None, tryccache=False):
541 env = {
542 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
543 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
544 }
545 if tryccache:
546 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
547 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
548 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
549 else:
550 env['CC'] = env['CCC']
551 env['CXX'] = env['CCXX']
552 env['CCACHE'] = ''
553 if overrides is not None:
554 env.update(overrides)
555 return env
556
557 @properties.renderer
558 def NetLockDl(props):
559 lock = None
560 if props.hasProperty("dl_lock"):
561 lock = NetLocks[props["dl_lock"]]
562 if lock is not None:
563 return [lock.access('exclusive')]
564 else:
565 return []
566
567 @properties.renderer
568 def NetLockUl(props):
569 lock = None
570 if props.hasProperty("ul_lock"):
571 lock = NetLocks[props["ul_lock"]]
572 if lock is not None:
573 return [lock.access('exclusive')]
574 else:
575 return []
576
577 @util.renderer
578 def TagPropertyValue(props):
579 if props.hasProperty("options"):
580 options = props.getProperty("options")
581 if type(options) is dict:
582 return options.get("tag")
583 return None
584
585 def IsTargetSelected(target):
586 def CheckTargetProperty(step):
587 try:
588 options = step.getProperty("options")
589 if type(options) is dict:
590 selected_target = options.get("target", "all")
591 if selected_target != "all" and selected_target != target:
592 return False
593 except KeyError:
594 pass
595
596 return True
597
598 return CheckTargetProperty
599
600 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
601 try:
602 seckey = base64.b64decode(seckey)
603 except:
604 return None
605
606 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
607 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
608
609
610 c['builders'] = []
611
612 dlLock = locks.WorkerLock("worker_dl")
613
614 checkBuiltin = re.sub('[\t\n ]+', ' ', """
615 checkBuiltin() {
616 local symbol op path file;
617 for file in $CHANGED_FILES; do
618 case "$file" in
619 package/*/*) : ;;
620 *) return 0 ;;
621 esac;
622 done;
623 while read symbol op path; do
624 case "$symbol" in package-*)
625 symbol="${symbol##*(}";
626 symbol="${symbol%)}";
627 for file in $CHANGED_FILES; do
628 case "$file" in "package/$path/"*)
629 grep -qsx "$symbol=y" .config && return 0
630 ;; esac;
631 done;
632 esac;
633 done < tmp/.packagedeps;
634 return 1;
635 }
636 """).strip()
637
638
639 class IfBuiltinShellCommand(ShellCommand):
640 def _quote(self, str):
641 if re.search("[^a-zA-Z0-9/_.-]", str):
642 return "'%s'" %(re.sub("'", "'\"'\"'", str))
643 return str
644
645 def setCommand(self, command):
646 if not isinstance(command, (str, unicode)):
647 command = ' '.join(map(self._quote, command))
648 self.command = [
649 '/bin/sh', '-c',
650 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
651 ]
652
653 def setupEnvironment(self, cmd):
654 workerEnv = self.workerEnvironment
655 if workerEnv is None:
656 workerEnv = { }
657 changedFiles = { }
658 for request in self.build.requests:
659 for source in request.sources:
660 for change in source.changes:
661 for file in change.files:
662 changedFiles[file] = True
663 fullSlaveEnv = workerEnv.copy()
664 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
665 cmd.args['env'] = fullSlaveEnv
666
667 workerNames = [ ]
668
669 for worker in c['workers']:
670 workerNames.append(worker.workername)
671
672 force_factory = BuildFactory()
673
674 c['builders'].append(BuilderConfig(
675 name = "00_force_build",
676 workernames = workerNames,
677 factory = force_factory))
678
679 for target in targets:
680 ts = target.split('/')
681
682 factory = BuildFactory()
683
684 # setup shared work directory if required
685 factory.addStep(ShellCommand(
686 name = "sharedwd",
687 description = "Setting up shared work directory",
688 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
689 workdir = ".",
690 haltOnFailure = True,
691 doStepIf = IsSharedWorkdir))
692
693 # find number of cores
694 factory.addStep(SetPropertyFromCommand(
695 name = "nproc",
696 property = "nproc",
697 description = "Finding number of CPUs",
698 command = ["nproc"]))
699
700 # find gcc and g++ compilers
701 factory.addStep(FileDownload(
702 name = "dlfindbinpl",
703 mastersrc = scripts_dir + '/findbin.pl',
704 workerdest = "../findbin.pl",
705 mode = 0o755))
706
707 factory.addStep(SetPropertyFromCommand(
708 name = "gcc",
709 property = "cc_command",
710 description = "Finding gcc command",
711 command = [
712 "../findbin.pl", "gcc", "", "",
713 ],
714 haltOnFailure = True))
715
716 factory.addStep(SetPropertyFromCommand(
717 name = "g++",
718 property = "cxx_command",
719 description = "Finding g++ command",
720 command = [
721 "../findbin.pl", "g++", "", "",
722 ],
723 haltOnFailure = True))
724
725 # see if ccache is available
726 factory.addStep(SetPropertyFromCommand(
727 property = "ccache_command",
728 command = ["which", "ccache"],
729 description = "Testing for ccache command",
730 haltOnFailure = False,
731 flunkOnFailure = False,
732 warnOnFailure = False,
733 ))
734
735 # expire tree if needed
736 if tree_expire > 0:
737 factory.addStep(FileDownload(
738 name = "dlexpiresh",
739 doStepIf = IsExpireRequested,
740 mastersrc = scripts_dir + '/expire.sh',
741 workerdest = "../expire.sh",
742 mode = 0o755))
743
744 factory.addStep(ShellCommand(
745 name = "expire",
746 description = "Checking for build tree expiry",
747 command = ["./expire.sh", str(tree_expire)],
748 workdir = ".",
749 haltOnFailure = True,
750 doStepIf = IsExpireRequested,
751 timeout = 2400))
752
753 # cleanup.sh if needed
754 factory.addStep(FileDownload(
755 name = "dlcleanupsh",
756 mastersrc = scripts_dir + '/cleanup.sh',
757 workerdest = "../cleanup.sh",
758 mode = 0o755,
759 doStepIf = IsCleanupRequested))
760
761 factory.addStep(ShellCommand(
762 name = "cleanold",
763 description = "Cleaning previous builds",
764 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
765 workdir = ".",
766 haltOnFailure = True,
767 doStepIf = IsCleanupRequested,
768 timeout = 2400))
769
770 factory.addStep(ShellCommand(
771 name = "cleanup",
772 description = "Cleaning work area",
773 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
774 workdir = ".",
775 haltOnFailure = True,
776 doStepIf = IsCleanupRequested,
777 timeout = 2400))
778
779 # user-requested clean targets
780 for tuple in CleanTargetMap:
781 factory.addStep(ShellCommand(
782 name = tuple[1],
783 description = 'User-requested "make %s"' % tuple[1],
784 command = ["make", tuple[1], "V=s"],
785 env = MakeEnv(),
786 doStepIf = IsMakeCleanRequested(tuple[0])
787 ))
788
789 # Workaround bug when switching from a checked out tag back to a branch
790 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
791 factory.addStep(ShellCommand(
792 name = "gitcheckout",
793 description = "Ensure that Git HEAD is sane",
794 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
795 haltOnFailure = True))
796
797 # check out the source
798 # Git() runs:
799 # if repo doesn't exist: 'git clone repourl'
800 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
801 # 'git fetch -t repourl branch; git reset --hard revision'
802 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
803 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
804 factory.addStep(Git(
805 name = "gitclean",
806 repourl = repo_url,
807 branch = repo_branch,
808 mode = 'full',
809 method = 'clean',
810 locks = NetLockDl,
811 haltOnFailure = True,
812 doStepIf = IsGitCleanRequested,
813 ))
814
815 factory.addStep(Git(
816 name = "gitfresh",
817 repourl = repo_url,
818 branch = repo_branch,
819 mode = 'full',
820 method = 'fresh',
821 locks = NetLockDl,
822 haltOnFailure = True,
823 doStepIf = IsGitFreshRequested,
824 ))
825
826 # update remote refs
827 factory.addStep(ShellCommand(
828 name = "fetchrefs",
829 description = "Fetching Git remote refs",
830 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
831 haltOnFailure = True
832 ))
833
834 # switch to tag
835 factory.addStep(ShellCommand(
836 name = "switchtag",
837 description = "Checking out Git tag",
838 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
839 haltOnFailure = True,
840 doStepIf = IsTaggingRequested
841 ))
842
843 # Verify that Git HEAD points to a tag or branch
844 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
845 factory.addStep(ShellCommand(
846 name = "gitverify",
847 description = "Ensure that Git HEAD is pointing to a branch or tag",
848 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
849 haltOnFailure = True))
850
851 factory.addStep(ShellCommand(
852 name = "rmtmp",
853 description = "Remove tmp folder",
854 command=["rm", "-rf", "tmp/"]))
855
856 # feed
857 # factory.addStep(ShellCommand(
858 # name = "feedsconf",
859 # description = "Copy the feeds.conf",
860 # command='''cp ~/feeds.conf ./feeds.conf''' ))
861
862 # feed
863 factory.addStep(ShellCommand(
864 name = "rmfeedlinks",
865 description = "Remove feed symlinks",
866 command=["rm", "-rf", "package/feeds/"]))
867
868 factory.addStep(StringDownload(
869 name = "ccachecc",
870 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
871 workerdest = "../ccache_cc.sh",
872 mode = 0o755,
873 ))
874
875 factory.addStep(StringDownload(
876 name = "ccachecxx",
877 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
878 workerdest = "../ccache_cxx.sh",
879 mode = 0o755,
880 ))
881
882 # Git SSH
883 if git_ssh:
884 factory.addStep(StringDownload(
885 name = "dlgitclonekey",
886 s = git_ssh_key,
887 workerdest = "../git-clone.key",
888 mode = 0o600,
889 ))
890
891 factory.addStep(ShellCommand(
892 name = "patchfeedsconf",
893 description = "Patching feeds.conf",
894 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
895 haltOnFailure = True
896 ))
897
898 # feed
899 factory.addStep(ShellCommand(
900 name = "updatefeeds",
901 description = "Updating feeds",
902 command=["./scripts/feeds", "update"],
903 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
904 haltOnFailure = True,
905 locks = NetLockDl,
906 ))
907
908 # Git SSH
909 if git_ssh:
910 factory.addStep(ShellCommand(
911 name = "rmfeedsconf",
912 description = "Removing feeds.conf",
913 command=["rm", "feeds.conf"],
914 haltOnFailure = True
915 ))
916
917 # feed
918 factory.addStep(ShellCommand(
919 name = "installfeeds",
920 description = "Installing feeds",
921 command=["./scripts/feeds", "install", "-a"],
922 env = MakeEnv(tryccache=True),
923 haltOnFailure = True
924 ))
925
926 # seed config
927 if config_seed is not None:
928 factory.addStep(StringDownload(
929 name = "dlconfigseed",
930 s = config_seed + '\n',
931 workerdest = ".config",
932 mode = 0o644
933 ))
934
935 # configure
936 factory.addStep(ShellCommand(
937 name = "newconfig",
938 description = "Seeding .config",
939 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
940 ))
941
942 factory.addStep(ShellCommand(
943 name = "delbin",
944 description = "Removing output directory",
945 command = ["rm", "-rf", "bin/"]
946 ))
947
948 factory.addStep(ShellCommand(
949 name = "defconfig",
950 description = "Populating .config",
951 command = ["make", "defconfig"],
952 env = MakeEnv()
953 ))
954
955 # check arch
956 factory.addStep(ShellCommand(
957 name = "checkarch",
958 description = "Checking architecture",
959 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
960 logEnviron = False,
961 want_stdout = False,
962 want_stderr = False,
963 haltOnFailure = True
964 ))
965
966 # find libc suffix
967 factory.addStep(SetPropertyFromCommand(
968 name = "libc",
969 property = "libc",
970 description = "Finding libc suffix",
971 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
972
973 # install build key
974 if usign_key is not None:
975 factory.addStep(StringDownload(
976 name = "dlkeybuildpub",
977 s = UsignSec2Pub(usign_key, usign_comment),
978 workerdest = "key-build.pub",
979 mode = 0o600,
980 ))
981
982 factory.addStep(StringDownload(
983 name = "dlkeybuild",
984 s = "# fake private key",
985 workerdest = "key-build",
986 mode = 0o600,
987 ))
988
989 factory.addStep(StringDownload(
990 name = "dlkeybuilducert",
991 s = "# fake certificate",
992 workerdest = "key-build.ucert",
993 mode = 0o600,
994 ))
995
996 # prepare dl
997 factory.addStep(ShellCommand(
998 name = "dldir",
999 description = "Preparing dl/",
1000 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
1001 logEnviron = False,
1002 want_stdout = False
1003 ))
1004
1005 # prepare tar
1006 factory.addStep(ShellCommand(
1007 name = "dltar",
1008 description = "Building and installing GNU tar",
1009 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
1010 env = MakeEnv(tryccache=True),
1011 haltOnFailure = True
1012 ))
1013
1014 # populate dl
1015 factory.addStep(ShellCommand(
1016 name = "dlrun",
1017 description = "Populating dl/",
1018 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
1019 env = MakeEnv(),
1020 logEnviron = False,
1021 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
1022 ))
1023
1024 factory.addStep(ShellCommand(
1025 name = "cleanbase",
1026 description = "Cleaning base-files",
1027 command=["make", "package/base-files/clean", "V=s"]
1028 ))
1029
1030 # build
1031 factory.addStep(ShellCommand(
1032 name = "tools",
1033 description = "Building and installing tools",
1034 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
1035 env = MakeEnv(tryccache=True),
1036 haltOnFailure = True
1037 ))
1038
1039 factory.addStep(ShellCommand(
1040 name = "toolchain",
1041 description = "Building and installing toolchain",
1042 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1043 env = MakeEnv(),
1044 haltOnFailure = True
1045 ))
1046
1047 factory.addStep(ShellCommand(
1048 name = "kmods",
1049 description = "Building kmods",
1050 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1051 env = MakeEnv(),
1052 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1053 haltOnFailure = True
1054 ))
1055
1056 # find kernel version
1057 factory.addStep(SetPropertyFromCommand(
1058 name = "kernelversion",
1059 property = "kernelversion",
1060 description = "Finding the effective Kernel version",
1061 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1062 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1063 ))
1064
1065 factory.addStep(ShellCommand(
1066 name = "pkgclean",
1067 description = "Cleaning up package build",
1068 command=["make", "package/cleanup", "V=s"]
1069 ))
1070
1071 factory.addStep(ShellCommand(
1072 name = "pkgbuild",
1073 description = "Building packages",
1074 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1075 env = MakeEnv(),
1076 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1077 haltOnFailure = True
1078 ))
1079
1080 # factory.addStep(IfBuiltinShellCommand(
1081 factory.addStep(ShellCommand(
1082 name = "pkginstall",
1083 description = "Installing packages",
1084 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1085 env = MakeEnv(),
1086 haltOnFailure = True
1087 ))
1088
1089 factory.addStep(ShellCommand(
1090 name = "pkgindex",
1091 description = "Indexing packages",
1092 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1093 env = MakeEnv(),
1094 haltOnFailure = True
1095 ))
1096
1097 if enable_kmod_archive and embed_kmod_repository:
1098 # embed kmod repository. Must happen before 'images'
1099
1100 # find rootfs staging directory
1101 factory.addStep(SetPropertyFromCommand(
1102 name = "stageroot",
1103 property = "stageroot",
1104 description = "Finding the rootfs staging directory",
1105 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1106 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1107 want_stderr = False
1108 ))
1109
1110 factory.addStep(ShellCommand(
1111 name = "filesdir",
1112 description = "Creating file overlay directory",
1113 command=["mkdir", "-p", "files/etc/opkg"],
1114 haltOnFailure = True
1115 ))
1116
1117 factory.addStep(ShellCommand(
1118 name = "kmodconfig",
1119 description = "Embedding kmod repository configuration",
1120 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1121 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1122 haltOnFailure = True
1123 ))
1124
1125 #factory.addStep(IfBuiltinShellCommand(
1126 factory.addStep(ShellCommand(
1127 name = "images",
1128 description = "Building and installing images",
1129 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1130 env = MakeEnv(),
1131 haltOnFailure = True
1132 ))
1133
1134 factory.addStep(ShellCommand(
1135 name = "buildinfo",
1136 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1137 command = "make -j1 buildinfo V=s || true",
1138 env = MakeEnv(),
1139 haltOnFailure = True
1140 ))
1141
1142 factory.addStep(ShellCommand(
1143 name = "json_overview_image_info",
1144 description = "Generate profiles.json in target folder",
1145 command = "make -j1 json_overview_image_info V=s || true",
1146 env = MakeEnv(),
1147 haltOnFailure = True
1148 ))
1149
1150 factory.addStep(ShellCommand(
1151 name = "checksums",
1152 description = "Calculating checksums",
1153 command=["make", "-j1", "checksum", "V=s"],
1154 env = MakeEnv(),
1155 haltOnFailure = True
1156 ))
1157
1158 if enable_kmod_archive:
1159 factory.addStep(ShellCommand(
1160 name = "kmoddir",
1161 description = "Creating kmod directory",
1162 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1163 haltOnFailure = True
1164 ))
1165
1166 factory.addStep(ShellCommand(
1167 name = "kmodprepare",
1168 description = "Preparing kmod archive",
1169 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1170 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1171 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1172 haltOnFailure = True
1173 ))
1174
1175 factory.addStep(ShellCommand(
1176 name = "kmodindex",
1177 description = "Indexing kmod archive",
1178 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1179 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1180 env = MakeEnv(),
1181 haltOnFailure = True
1182 ))
1183
1184 # sign
1185 if ini.has_option("gpg", "key") or usign_key is not None:
1186 factory.addStep(MasterShellCommand(
1187 name = "signprepare",
1188 description = "Preparing temporary signing directory",
1189 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1190 haltOnFailure = True
1191 ))
1192
1193 factory.addStep(ShellCommand(
1194 name = "signpack",
1195 description = "Packing files to sign",
1196 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1197 haltOnFailure = True
1198 ))
1199
1200 factory.addStep(FileUpload(
1201 workersrc = "sign.tar.gz",
1202 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1203 haltOnFailure = True
1204 ))
1205
1206 factory.addStep(MasterShellCommand(
1207 name = "signfiles",
1208 description = "Signing files",
1209 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1210 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1211 haltOnFailure = True
1212 ))
1213
1214 factory.addStep(FileDownload(
1215 name = "dlsigntargz",
1216 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1217 workerdest = "sign.tar.gz",
1218 haltOnFailure = True
1219 ))
1220
1221 factory.addStep(ShellCommand(
1222 name = "signunpack",
1223 description = "Unpacking signed files",
1224 command = ["tar", "-xzf", "sign.tar.gz"],
1225 haltOnFailure = True
1226 ))
1227
1228 # upload
1229 factory.addStep(ShellCommand(
1230 name = "dirprepare",
1231 description = "Preparing upload directory structure",
1232 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1233 haltOnFailure = True
1234 ))
1235
1236 factory.addStep(ShellCommand(
1237 name = "linkprepare",
1238 description = "Preparing repository symlink",
1239 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1240 doStepIf = IsNoMasterBuild,
1241 haltOnFailure = True
1242 ))
1243
1244 if enable_kmod_archive:
1245 factory.addStep(ShellCommand(
1246 name = "kmoddirprepare",
1247 description = "Preparing kmod archive upload directory",
1248 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1249 haltOnFailure = True
1250 ))
1251
1252 factory.addStep(ShellCommand(
1253 name = "dirupload",
1254 description = "Uploading directory structure",
1255 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1256 env={'RSYNC_PASSWORD': rsync_bin_key},
1257 haltOnFailure = True,
1258 logEnviron = False,
1259 locks = NetLockUl,
1260 ))
1261
1262 # download remote sha256sums to 'target-sha256sums'
1263 factory.addStep(ShellCommand(
1264 name = "target-sha256sums",
1265 description = "Fetching remote sha256sums for target",
1266 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1267 env={'RSYNC_PASSWORD': rsync_bin_key},
1268 logEnviron = False,
1269 haltOnFailure = False,
1270 flunkOnFailure = False,
1271 warnOnFailure = False,
1272 ))
1273
1274 # build list of files to upload
1275 factory.addStep(FileDownload(
1276 name = "dlsha2rsyncpl",
1277 mastersrc = scripts_dir + '/sha2rsync.pl',
1278 workerdest = "../sha2rsync.pl",
1279 mode = 0o755,
1280 ))
1281
1282 factory.addStep(ShellCommand(
1283 name = "buildlist",
1284 description = "Building list of files to upload",
1285 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1286 haltOnFailure = True,
1287 ))
1288
1289 factory.addStep(FileDownload(
1290 name = "dlrsync.sh",
1291 mastersrc = scripts_dir + '/rsync.sh',
1292 workerdest = "../rsync.sh",
1293 mode = 0o755
1294 ))
1295
1296 # upload new files and update existing ones
1297 factory.addStep(ShellCommand(
1298 name = "targetupload",
1299 description = "Uploading target files",
1300 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1301 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1302 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1303 env={'RSYNC_PASSWORD': rsync_bin_key},
1304 haltOnFailure = True,
1305 logEnviron = False,
1306 ))
1307
1308 # delete files which don't exist locally
1309 factory.addStep(ShellCommand(
1310 name = "targetprune",
1311 description = "Pruning target files",
1312 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1313 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1314 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1315 env={'RSYNC_PASSWORD': rsync_bin_key},
1316 haltOnFailure = True,
1317 logEnviron = False,
1318 locks = NetLockUl,
1319 ))
1320
1321 if enable_kmod_archive:
1322 factory.addStep(ShellCommand(
1323 name = "kmodupload",
1324 description = "Uploading kmod archive",
1325 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1326 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1327 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1328 env={'RSYNC_PASSWORD': rsync_bin_key},
1329 haltOnFailure = True,
1330 logEnviron = False,
1331 locks = NetLockUl,
1332 ))
1333
1334 if rsync_src_url is not None:
1335 factory.addStep(ShellCommand(
1336 name = "sourcelist",
1337 description = "Finding source archives to upload",
1338 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1339 haltOnFailure = True
1340 ))
1341
1342 factory.addStep(ShellCommand(
1343 name = "sourceupload",
1344 description = "Uploading source archives",
1345 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1346 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1347 env={'RSYNC_PASSWORD': rsync_src_key},
1348 haltOnFailure = True,
1349 logEnviron = False,
1350 locks = NetLockUl,
1351 ))
1352
1353 if False:
1354 factory.addStep(ShellCommand(
1355 name = "packageupload",
1356 description = "Uploading package files",
1357 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1358 env={'RSYNC_PASSWORD': rsync_bin_key},
1359 haltOnFailure = False,
1360 flunkOnFailure = False,
1361 warnOnFailure = True,
1362 logEnviron = False,
1363 locks = NetLockUl,
1364 ))
1365
1366 # logs
1367 if False:
1368 factory.addStep(ShellCommand(
1369 name = "upload",
1370 description = "Uploading logs",
1371 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1372 env={'RSYNC_PASSWORD': rsync_bin_key},
1373 haltOnFailure = False,
1374 flunkOnFailure = False,
1375 warnOnFailure = True,
1376 alwaysRun = True,
1377 logEnviron = False,
1378 locks = NetLockUl,
1379 ))
1380
1381 factory.addStep(ShellCommand(
1382 name = "df",
1383 description = "Reporting disk usage",
1384 command=["df", "-h", "."],
1385 env={'LC_ALL': 'C'},
1386 haltOnFailure = False,
1387 flunkOnFailure = False,
1388 warnOnFailure = False,
1389 alwaysRun = True
1390 ))
1391
1392 factory.addStep(ShellCommand(
1393 name = "du",
1394 description = "Reporting estimated file space usage",
1395 command=["du", "-sh", "."],
1396 env={'LC_ALL': 'C'},
1397 haltOnFailure = False,
1398 flunkOnFailure = False,
1399 warnOnFailure = False,
1400 alwaysRun = True
1401 ))
1402
1403 factory.addStep(ShellCommand(
1404 name = "ccachestat",
1405 description = "Reporting ccache stats",
1406 command=["ccache", "-s"],
1407 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1408 want_stderr = False,
1409 haltOnFailure = False,
1410 flunkOnFailure = False,
1411 warnOnFailure = False,
1412 alwaysRun = True,
1413 ))
1414
1415 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1416
1417 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1418 force_factory.addStep(steps.Trigger(
1419 name = "trigger_%s" % target,
1420 description = "Triggering %s build" % target,
1421 schedulerNames = [ "trigger_%s" % target ],
1422 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1423 doStepIf = IsTargetSelected(target)
1424 ))
1425
1426
1427 ####### STATUS TARGETS
1428
1429 # 'status' is a list of Status Targets. The results of each build will be
1430 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1431 # including web pages, email senders, and IRC bots.
1432
1433 if ini.has_option("phase1", "status_bind"):
1434 c['www'] = {
1435 'port': ini.get("phase1", "status_bind"),
1436 'plugins': {
1437 'waterfall_view': True,
1438 'console_view': True,
1439 'grid_view': True
1440 }
1441 }
1442
1443 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1444 c['www']['auth'] = util.UserPasswordAuth([
1445 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1446 ])
1447 c['www']['authz'] = util.Authz(
1448 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1449 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1450 )
1451
1452 c['services'] = []
1453 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1454 irc_host = ini.get("irc", "host")
1455 irc_port = 6667
1456 irc_chan = ini.get("irc", "channel")
1457 irc_nick = ini.get("irc", "nickname")
1458 irc_pass = None
1459
1460 if ini.has_option("irc", "port"):
1461 irc_port = ini.getint("irc", "port")
1462
1463 if ini.has_option("irc", "password"):
1464 irc_pass = ini.get("irc", "password")
1465
1466 irc = reporters.IRC(irc_host, irc_nick,
1467 port = irc_port,
1468 password = irc_pass,
1469 channels = [ irc_chan ],
1470 notify_events = [ 'exception', 'problem', 'recovery' ]
1471 )
1472
1473 c['services'].append(irc)
1474
1475 c['revlink'] = util.RevlinkMatch([
1476 r'https://git.openwrt.org/openwrt/(.*).git'
1477 ],
1478 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1479
1480 ####### DB URL
1481
1482 c['db'] = {
1483 # This specifies what database buildbot uses to store its state. You can leave
1484 # this at its default for all but the largest installations.
1485 'db_url' : "sqlite:///state.sqlite",
1486 }
1487
1488 c['buildbotNetUsageData'] = None