treewide: use worker term
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from datetime import timedelta
11
12 from buildbot import locks
13 from buildbot.changes import filter
14 from buildbot.changes.gitpoller import GitPoller
15 from buildbot.config import BuilderConfig
16 from buildbot.plugins import reporters
17 from buildbot.plugins import schedulers
18 from buildbot.plugins import steps
19 from buildbot.plugins import util
20 from buildbot.process import properties
21 from buildbot.process.factory import BuildFactory
22 from buildbot.process.properties import Interpolate
23 from buildbot.process.properties import Property
24 from buildbot.schedulers.basic import SingleBranchScheduler
25 from buildbot.schedulers.forcesched import BaseParameter
26 from buildbot.schedulers.forcesched import ForceScheduler
27 from buildbot.schedulers.forcesched import ValidationError
28 from buildbot.steps.master import MasterShellCommand
29 from buildbot.steps.shell import SetPropertyFromCommand
30 from buildbot.steps.shell import ShellCommand
31 from buildbot.steps.source.git import Git
32 from buildbot.steps.transfer import FileDownload
33 from buildbot.steps.transfer import FileUpload
34 from buildbot.steps.transfer import StringDownload
35 from buildbot.worker import Worker
36
37
38 # This is a sample buildmaster config file. It must be installed as
39 # 'master.cfg' in your buildmaster's base directory.
40
41 ini = configparser.ConfigParser()
42 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
43
44 # This is the dictionary that the buildmaster pays attention to. We also use
45 # a shorter alias to save typing.
46 c = BuildmasterConfig = {}
47
48 ####### PROJECT IDENTITY
49
50 # the 'title' string will appear at the top of this buildbot
51 # installation's html.WebStatus home page (linked to the
52 # 'titleURL') and is embedded in the title of the waterfall HTML page.
53
54 c['title'] = ini.get("general", "title")
55 c['titleURL'] = ini.get("general", "title_url")
56
57 # the 'buildbotURL' string should point to the location where the buildbot's
58 # internal web server (usually the html.WebStatus page) is visible. This
59 # typically uses the port number set in the Waterfall 'status' entry, but
60 # with an externally-visible host name which the buildbot cannot figure out
61 # without some help.
62
63 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
64
65 ####### BUILDWORKERS
66
67 # The 'workers' list defines the set of recognized buildworkers. Each element is
68 # a Worker object, specifying a unique worker name and password. The same
69 # worker name and password must be configured on the worker.
70
71 worker_port = 9989
72
73 if ini.has_option("phase1", "port"):
74 worker_port = ini.get("phase1", "port")
75
76 c['workers'] = []
77 NetLocks = dict()
78
79 for section in ini.sections():
80 if section.startswith("worker "):
81 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
82 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
83 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
84 name = ini.get(section, "name")
85 password = ini.get(section, "password")
86 max_builds = 1
87 if ini.has_option(section, "builds"):
88 max_builds = ini.getint(section, "builds")
89 sl_props['max_builds'] = max_builds
90 if max_builds == 1:
91 sl_props['shared_wd'] = True
92 if ini.has_option(section, "cleanup"):
93 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
94 if ini.has_option(section, "dl_lock"):
95 lockname = ini.get(section, "dl_lock")
96 sl_props['dl_lock'] = lockname
97 if lockname not in NetLocks:
98 NetLocks[lockname] = locks.MasterLock(lockname)
99 if ini.has_option(section, "ul_lock"):
100 lockname = ini.get(section, "dl_lock")
101 sl_props['ul_lock'] = lockname
102 if lockname not in NetLocks:
103 NetLocks[lockname] = locks.MasterLock(lockname)
104 if ini.has_option(section, "shared_wd"):
105 shared_wd = ini.getboolean(section, "shared_wd")
106 sl_props['shared_wd'] = shared_wd
107 if shared_wd and (max_builds != 1):
108 raise ValueError('max_builds must be 1 with shared workdir!')
109 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
110
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
113 # --master option)
114 c['protocols'] = {'pb': {'port': worker_port}}
115
116 # coalesce builds
117 c['collapseRequests'] = True
118
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
122 hour=6,
123 )]
124
125 ####### CHANGESOURCES
126
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
129 tree_expire = 0
130 other_builds = 0
131 cc_version = None
132
133 cc_command = "gcc"
134 cxx_command = "g++"
135
136 config_seed = ""
137
138 git_ssh = False
139 git_ssh_key = None
140
141 if ini.has_option("phase1", "expire"):
142 tree_expire = ini.getint("phase1", "expire")
143
144 if ini.has_option("phase1", "other_builds"):
145 other_builds = ini.getint("phase1", "other_builds")
146
147 if ini.has_option("phase1", "cc_version"):
148 cc_version = ini.get("phase1", "cc_version").split()
149 if len(cc_version) == 1:
150 cc_version = ["eq", cc_version[0]]
151
152 if ini.has_option("general", "git_ssh"):
153 git_ssh = ini.getboolean("general", "git_ssh")
154
155 if ini.has_option("general", "git_ssh_key"):
156 git_ssh_key = ini.get("general", "git_ssh_key")
157 else:
158 git_ssh = False
159
160 if ini.has_option("phase1", "config_seed"):
161 config_seed = ini.get("phase1", "config_seed")
162
163 repo_url = ini.get("repo", "url")
164 repo_branch = "master"
165
166 if ini.has_option("repo", "branch"):
167 repo_branch = ini.get("repo", "branch")
168
169 rsync_bin_url = ini.get("rsync", "binary_url")
170 rsync_bin_key = ini.get("rsync", "binary_password")
171 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
172
173 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
174 rsync_bin_defopts += ["--contimeout=20"]
175
176 rsync_src_url = None
177 rsync_src_key = None
178 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
179
180 if ini.has_option("rsync", "source_url"):
181 rsync_src_url = ini.get("rsync", "source_url")
182 rsync_src_key = ini.get("rsync", "source_password")
183
184 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
185 rsync_src_defopts += ["--contimeout=20"]
186
187 usign_key = None
188 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
189
190 if ini.has_option("usign", "key"):
191 usign_key = ini.get("usign", "key")
192
193 if ini.has_option("usign", "comment"):
194 usign_comment = ini.get("usign", "comment")
195
196 enable_kmod_archive = False
197 embed_kmod_repository = False
198
199 if ini.has_option("phase1", "kmod_archive"):
200 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
201
202 if ini.has_option("phase1", "kmod_repository"):
203 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
204
205
206 # find targets
207 targets = [ ]
208
209 if not os.path.isdir(work_dir+'/source.git'):
210 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
211 else:
212 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
213
214 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
215 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
216 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
217
218 while True:
219 line = findtargets.stdout.readline()
220 if not line:
221 break
222 ta = line.decode().strip().split(' ')
223 targets.append(ta[0])
224
225
226 # the 'change_source' setting tells the buildmaster how it should find out
227 # about source code changes. Here we point to the buildbot clone of pyflakes.
228
229 c['change_source'] = []
230 c['change_source'].append(GitPoller(
231 repo_url,
232 workdir=work_dir+'/work.git', branch=repo_branch,
233 pollinterval=300))
234
235 ####### SCHEDULERS
236
237 # Configure the Schedulers, which decide how to react to incoming changes. In this
238 # case, just kick off a 'basebuild' build
239
240 class TagChoiceParameter(BaseParameter):
241 spec_attributes = ["strict", "choices"]
242 type = "list"
243 strict = True
244
245 def __init__(self, name, label=None, **kw):
246 super().__init__(name, label, **kw)
247 self._choice_list = []
248
249 @property
250 def choices(self):
251 taglist = []
252 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
253
254 if basever:
255 findtags = subprocess.Popen(
256 ['git', 'ls-remote', '--tags', repo_url],
257 stdout = subprocess.PIPE)
258
259 while True:
260 line = findtags.stdout.readline()
261
262 if not line:
263 break
264
265 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
266
267 if tagver and tagver[1].find(basever[1]) == 0:
268 taglist.append(tagver[1])
269
270 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
271 taglist.insert(0, '')
272
273 self._choice_list = taglist
274
275 return self._choice_list
276
277 def parse_from_arg(self, s):
278 if self.strict and s not in self._choice_list:
279 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
280 return s
281
282 c['schedulers'] = []
283 c['schedulers'].append(SingleBranchScheduler(
284 name = "all",
285 change_filter = filter.ChangeFilter(branch=repo_branch),
286 treeStableTimer = 60,
287 builderNames = targets))
288
289 c['schedulers'].append(ForceScheduler(
290 name = "force",
291 buttonName = "Force builds",
292 label = "Force build details",
293 builderNames = [ "00_force_build" ],
294
295 codebases = [
296 util.CodebaseParameter(
297 "",
298 label = "Repository",
299 branch = util.FixedParameter(name = "branch", default = ""),
300 revision = util.FixedParameter(name = "revision", default = ""),
301 repository = util.FixedParameter(name = "repository", default = ""),
302 project = util.FixedParameter(name = "project", default = "")
303 )
304 ],
305
306 reason = util.StringParameter(
307 name = "reason",
308 label = "Reason",
309 default = "Trigger build",
310 required = True,
311 size = 80
312 ),
313
314 properties = [
315 util.NestedParameter(
316 name="options",
317 label="Build Options",
318 layout="vertical",
319 fields=[
320 util.ChoiceStringParameter(
321 name = "target",
322 label = "Build target",
323 default = "all",
324 choices = [ "all" ] + targets
325 ),
326 TagChoiceParameter(
327 name = "tag",
328 label = "Build tag",
329 default = ""
330 )
331 ]
332 )
333 ]
334 ))
335
336 ####### BUILDERS
337
338 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
339 # what steps, and which workers can execute them. Note that any particular build will
340 # only take place on one worker.
341
342 CleanTargetMap = [
343 [ "tools", "tools/clean" ],
344 [ "chain", "toolchain/clean" ],
345 [ "linux", "target/linux/clean" ],
346 [ "dir", "dirclean" ],
347 [ "dist", "distclean" ]
348 ]
349
350 def IsMakeCleanRequested(pattern):
351 def CheckCleanProperty(step):
352 val = step.getProperty("clean")
353 if val and re.match(pattern, val):
354 return True
355 else:
356 return False
357
358 return CheckCleanProperty
359
360 def IsSharedWorkdir(step):
361 return bool(step.getProperty("shared_wd"))
362
363 def IsCleanupRequested(step):
364 if IsSharedWorkdir(step):
365 return False
366 do_cleanup = step.getProperty("do_cleanup")
367 if do_cleanup:
368 return True
369 else:
370 return False
371
372 def IsExpireRequested(step):
373 if IsSharedWorkdir(step):
374 return False
375 else:
376 return not IsCleanupRequested(step)
377
378 def IsGitFreshRequested(step):
379 do_cleanup = step.getProperty("do_cleanup")
380 if do_cleanup:
381 return True
382 else:
383 return False
384
385 def IsGitCleanRequested(step):
386 return not IsGitFreshRequested(step)
387
388 def IsTaggingRequested(step):
389 val = step.getProperty("tag")
390 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
391 return True
392 else:
393 return False
394
395 def IsNoTaggingRequested(step):
396 return not IsTaggingRequested(step)
397
398 def IsNoMasterBuild(step):
399 return repo_branch != "master"
400
401 def GetBaseVersion():
402 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
403 return repo_branch.split('-')[1]
404 else:
405 return "master"
406
407 @properties.renderer
408 def GetVersionPrefix(props):
409 basever = GetBaseVersion()
410 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
411 return "%s/" % props["tag"]
412 elif basever != "master":
413 return "%s-SNAPSHOT/" % basever
414 else:
415 return ""
416
417 @properties.renderer
418 def GetNumJobs(props):
419 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
420 return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
421 else:
422 return "1"
423
424 @properties.renderer
425 def GetCC(props):
426 if props.hasProperty("cc_command"):
427 return props["cc_command"]
428 else:
429 return "gcc"
430
431 @properties.renderer
432 def GetCXX(props):
433 if props.hasProperty("cxx_command"):
434 return props["cxx_command"]
435 else:
436 return "g++"
437
438 @properties.renderer
439 def GetCwd(props):
440 if props.hasProperty("builddir"):
441 return props["builddir"]
442 elif props.hasProperty("workdir"):
443 return props["workdir"]
444 else:
445 return "/"
446
447 @properties.renderer
448 def GetCCache(props):
449 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
450 return props["ccache_command"]
451 else:
452 return ""
453
454 def GetNextBuild(builder, requests):
455 for r in requests:
456 if r.properties and r.properties.hasProperty("tag"):
457 return r
458 return requests[0]
459
460 def MakeEnv(overrides=None, tryccache=False):
461 env = {
462 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
463 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
464 }
465 if tryccache:
466 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
467 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
468 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
469 else:
470 env['CC'] = env['CCC']
471 env['CXX'] = env['CCXX']
472 env['CCACHE'] = ''
473 if overrides is not None:
474 env.update(overrides)
475 return env
476
477 @properties.renderer
478 def NetLockDl(props):
479 lock = None
480 if props.hasProperty("dl_lock"):
481 lock = NetLocks[props["dl_lock"]]
482 if lock is not None:
483 return [lock.access('exclusive')]
484 else:
485 return []
486
487 @properties.renderer
488 def NetLockUl(props):
489 lock = None
490 if props.hasProperty("ul_lock"):
491 lock = NetLocks[props["ul_lock"]]
492 if lock is not None:
493 return [lock.access('exclusive')]
494 else:
495 return []
496
497 @util.renderer
498 def TagPropertyValue(props):
499 if props.hasProperty("options"):
500 options = props.getProperty("options")
501 if type(options) is dict:
502 return options.get("tag")
503 return None
504
505 def IsTargetSelected(target):
506 def CheckTargetProperty(step):
507 try:
508 options = step.getProperty("options")
509 if type(options) is dict:
510 selected_target = options.get("target", "all")
511 if selected_target != "all" and selected_target != target:
512 return False
513 except KeyError:
514 pass
515
516 return True
517
518 return CheckTargetProperty
519
520 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
521 try:
522 seckey = base64.b64decode(seckey)
523 except:
524 return None
525
526 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
527 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
528
529
530 c['builders'] = []
531
532 dlLock = locks.WorkerLock("worker_dl")
533
534 checkBuiltin = re.sub('[\t\n ]+', ' ', """
535 checkBuiltin() {
536 local symbol op path file;
537 for file in $CHANGED_FILES; do
538 case "$file" in
539 package/*/*) : ;;
540 *) return 0 ;;
541 esac;
542 done;
543 while read symbol op path; do
544 case "$symbol" in package-*)
545 symbol="${symbol##*(}";
546 symbol="${symbol%)}";
547 for file in $CHANGED_FILES; do
548 case "$file" in "package/$path/"*)
549 grep -qsx "$symbol=y" .config && return 0
550 ;; esac;
551 done;
552 esac;
553 done < tmp/.packagedeps;
554 return 1;
555 }
556 """).strip()
557
558
559 class IfBuiltinShellCommand(ShellCommand):
560 def _quote(self, str):
561 if re.search("[^a-zA-Z0-9/_.-]", str):
562 return "'%s'" %(re.sub("'", "'\"'\"'", str))
563 return str
564
565 def setCommand(self, command):
566 if not isinstance(command, (str, unicode)):
567 command = ' '.join(map(self._quote, command))
568 self.command = [
569 '/bin/sh', '-c',
570 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
571 ]
572
573 def setupEnvironment(self, cmd):
574 workerEnv = self.workerEnvironment
575 if workerEnv is None:
576 workerEnv = { }
577 changedFiles = { }
578 for request in self.build.requests:
579 for source in request.sources:
580 for change in source.changes:
581 for file in change.files:
582 changedFiles[file] = True
583 fullSlaveEnv = workerEnv.copy()
584 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
585 cmd.args['env'] = fullSlaveEnv
586
587 workerNames = [ ]
588
589 for worker in c['workers']:
590 workerNames.append(worker.workername)
591
592 force_factory = BuildFactory()
593
594 c['builders'].append(BuilderConfig(
595 name = "00_force_build",
596 workernames = workerNames,
597 factory = force_factory))
598
599 for target in targets:
600 ts = target.split('/')
601
602 factory = BuildFactory()
603
604 # setup shared work directory if required
605 factory.addStep(ShellCommand(
606 name = "sharedwd",
607 description = "Setting up shared work directory",
608 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
609 workdir = ".",
610 haltOnFailure = True,
611 doStepIf = IsSharedWorkdir))
612
613 # find number of cores
614 factory.addStep(SetPropertyFromCommand(
615 name = "nproc",
616 property = "nproc",
617 description = "Finding number of CPUs",
618 command = ["nproc"]))
619
620 # find gcc and g++ compilers
621 factory.addStep(FileDownload(
622 name = "dlfindbinpl",
623 mastersrc = scripts_dir + '/findbin.pl',
624 workerdest = "../findbin.pl",
625 mode = 0o755))
626
627 factory.addStep(SetPropertyFromCommand(
628 name = "gcc",
629 property = "cc_command",
630 description = "Finding gcc command",
631 command = [
632 "../findbin.pl", "gcc",
633 cc_version[0] if cc_version is not None else '',
634 cc_version[1] if cc_version is not None else ''
635 ],
636 haltOnFailure = True))
637
638 factory.addStep(SetPropertyFromCommand(
639 name = "g++",
640 property = "cxx_command",
641 description = "Finding g++ command",
642 command = [
643 "../findbin.pl", "g++",
644 cc_version[0] if cc_version is not None else '',
645 cc_version[1] if cc_version is not None else ''
646 ],
647 haltOnFailure = True))
648
649 # see if ccache is available
650 factory.addStep(SetPropertyFromCommand(
651 property = "ccache_command",
652 command = ["which", "ccache"],
653 description = "Testing for ccache command",
654 haltOnFailure = False,
655 flunkOnFailure = False,
656 warnOnFailure = False,
657 ))
658
659 # expire tree if needed
660 if tree_expire > 0:
661 factory.addStep(FileDownload(
662 name = "dlexpiresh",
663 doStepIf = IsExpireRequested,
664 mastersrc = scripts_dir + '/expire.sh',
665 workerdest = "../expire.sh",
666 mode = 0o755))
667
668 factory.addStep(ShellCommand(
669 name = "expire",
670 description = "Checking for build tree expiry",
671 command = ["./expire.sh", str(tree_expire)],
672 workdir = ".",
673 haltOnFailure = True,
674 doStepIf = IsExpireRequested,
675 timeout = 2400))
676
677 # cleanup.sh if needed
678 factory.addStep(FileDownload(
679 name = "dlcleanupsh",
680 mastersrc = scripts_dir + '/cleanup.sh',
681 workerdest = "../cleanup.sh",
682 mode = 0o755,
683 doStepIf = IsCleanupRequested))
684
685 factory.addStep(ShellCommand(
686 name = "cleanold",
687 description = "Cleaning previous builds",
688 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
689 workdir = ".",
690 haltOnFailure = True,
691 doStepIf = IsCleanupRequested,
692 timeout = 2400))
693
694 factory.addStep(ShellCommand(
695 name = "cleanup",
696 description = "Cleaning work area",
697 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
698 workdir = ".",
699 haltOnFailure = True,
700 doStepIf = IsCleanupRequested,
701 timeout = 2400))
702
703 # user-requested clean targets
704 for tuple in CleanTargetMap:
705 factory.addStep(ShellCommand(
706 name = tuple[1],
707 description = 'User-requested "make %s"' % tuple[1],
708 command = ["make", tuple[1], "V=s"],
709 env = MakeEnv(),
710 doStepIf = IsMakeCleanRequested(tuple[0])
711 ))
712
713 # Workaround bug when switching from a checked out tag back to a branch
714 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
715 factory.addStep(ShellCommand(
716 name = "gitcheckout",
717 description = "Ensure that Git HEAD is sane",
718 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
719 haltOnFailure = True))
720
721 # check out the source
722 # Git() runs:
723 # if repo doesn't exist: 'git clone repourl'
724 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
725 # 'git fetch -t repourl branch; git reset --hard revision'
726 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
727 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
728 factory.addStep(Git(
729 name = "gitclean",
730 repourl = repo_url,
731 branch = repo_branch,
732 mode = 'full',
733 method = 'clean',
734 haltOnFailure = True,
735 doStepIf = IsGitCleanRequested,
736 ))
737
738 factory.addStep(Git(
739 name = "gitfresh",
740 repourl = repo_url,
741 branch = repo_branch,
742 mode = 'full',
743 method = 'fresh',
744 haltOnFailure = True,
745 doStepIf = IsGitFreshRequested,
746 ))
747
748 # update remote refs
749 factory.addStep(ShellCommand(
750 name = "fetchrefs",
751 description = "Fetching Git remote refs",
752 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
753 haltOnFailure = True
754 ))
755
756 # switch to tag
757 factory.addStep(ShellCommand(
758 name = "switchtag",
759 description = "Checking out Git tag",
760 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
761 haltOnFailure = True,
762 doStepIf = IsTaggingRequested
763 ))
764
765 # Verify that Git HEAD points to a tag or branch
766 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
767 factory.addStep(ShellCommand(
768 name = "gitverify",
769 description = "Ensure that Git HEAD is pointing to a branch or tag",
770 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
771 haltOnFailure = True))
772
773 factory.addStep(ShellCommand(
774 name = "rmtmp",
775 description = "Remove tmp folder",
776 command=["rm", "-rf", "tmp/"]))
777
778 # feed
779 # factory.addStep(ShellCommand(
780 # name = "feedsconf",
781 # description = "Copy the feeds.conf",
782 # command='''cp ~/feeds.conf ./feeds.conf''' ))
783
784 # feed
785 factory.addStep(ShellCommand(
786 name = "rmfeedlinks",
787 description = "Remove feed symlinks",
788 command=["rm", "-rf", "package/feeds/"]))
789
790 factory.addStep(StringDownload(
791 name = "ccachecc",
792 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
793 workerdest = "../ccache_cc.sh",
794 mode = 0o755,
795 ))
796
797 factory.addStep(StringDownload(
798 name = "ccachecxx",
799 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
800 workerdest = "../ccache_cxx.sh",
801 mode = 0o755,
802 ))
803
804 # Git SSH
805 if git_ssh:
806 factory.addStep(StringDownload(
807 name = "dlgitclonekey",
808 s = git_ssh_key,
809 workerdest = "../git-clone.key",
810 mode = 0o600,
811 ))
812
813 factory.addStep(ShellCommand(
814 name = "patchfeedsconf",
815 description = "Patching feeds.conf",
816 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
817 haltOnFailure = True
818 ))
819
820 # feed
821 factory.addStep(ShellCommand(
822 name = "updatefeeds",
823 description = "Updating feeds",
824 command=["./scripts/feeds", "update"],
825 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
826 haltOnFailure = True
827 ))
828
829 # Git SSH
830 if git_ssh:
831 factory.addStep(ShellCommand(
832 name = "rmfeedsconf",
833 description = "Removing feeds.conf",
834 command=["rm", "feeds.conf"],
835 haltOnFailure = True
836 ))
837
838 # feed
839 factory.addStep(ShellCommand(
840 name = "installfeeds",
841 description = "Installing feeds",
842 command=["./scripts/feeds", "install", "-a"],
843 env = MakeEnv(tryccache=True),
844 haltOnFailure = True
845 ))
846
847 # seed config
848 if config_seed is not None:
849 factory.addStep(StringDownload(
850 name = "dlconfigseed",
851 s = config_seed + '\n',
852 workerdest = ".config",
853 mode = 0o644
854 ))
855
856 # configure
857 factory.addStep(ShellCommand(
858 name = "newconfig",
859 description = "Seeding .config",
860 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
861 ))
862
863 factory.addStep(ShellCommand(
864 name = "delbin",
865 description = "Removing output directory",
866 command = ["rm", "-rf", "bin/"]
867 ))
868
869 factory.addStep(ShellCommand(
870 name = "defconfig",
871 description = "Populating .config",
872 command = ["make", "defconfig"],
873 env = MakeEnv()
874 ))
875
876 # check arch
877 factory.addStep(ShellCommand(
878 name = "checkarch",
879 description = "Checking architecture",
880 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
881 logEnviron = False,
882 want_stdout = False,
883 want_stderr = False,
884 haltOnFailure = True
885 ))
886
887 # find libc suffix
888 factory.addStep(SetPropertyFromCommand(
889 name = "libc",
890 property = "libc",
891 description = "Finding libc suffix",
892 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
893
894 # install build key
895 if usign_key is not None:
896 factory.addStep(StringDownload(
897 name = "dlkeybuildpub",
898 s = UsignSec2Pub(usign_key, usign_comment),
899 workerdest = "key-build.pub",
900 mode = 0o600,
901 ))
902
903 factory.addStep(StringDownload(
904 name = "dlkeybuild",
905 s = "# fake private key",
906 workerdest = "key-build",
907 mode = 0o600,
908 ))
909
910 factory.addStep(StringDownload(
911 name = "dlkeybuilducert",
912 s = "# fake certificate",
913 workerdest = "key-build.ucert",
914 mode = 0o600,
915 ))
916
917 # prepare dl
918 factory.addStep(ShellCommand(
919 name = "dldir",
920 description = "Preparing dl/",
921 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
922 logEnviron = False,
923 want_stdout = False
924 ))
925
926 # prepare tar
927 factory.addStep(ShellCommand(
928 name = "dltar",
929 description = "Building and installing GNU tar",
930 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
931 env = MakeEnv(tryccache=True),
932 haltOnFailure = True
933 ))
934
935 # populate dl
936 factory.addStep(ShellCommand(
937 name = "dlrun",
938 description = "Populating dl/",
939 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
940 env = MakeEnv(),
941 logEnviron = False,
942 locks = [dlLock.access('exclusive')],
943 ))
944
945 factory.addStep(ShellCommand(
946 name = "cleanbase",
947 description = "Cleaning base-files",
948 command=["make", "package/base-files/clean", "V=s"]
949 ))
950
951 # build
952 factory.addStep(ShellCommand(
953 name = "tools",
954 description = "Building and installing tools",
955 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
956 env = MakeEnv(tryccache=True),
957 haltOnFailure = True
958 ))
959
960 factory.addStep(ShellCommand(
961 name = "toolchain",
962 description = "Building and installing toolchain",
963 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
964 env = MakeEnv(),
965 haltOnFailure = True
966 ))
967
968 factory.addStep(ShellCommand(
969 name = "kmods",
970 description = "Building kmods",
971 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
972 env = MakeEnv(),
973 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
974 haltOnFailure = True
975 ))
976
977 # find kernel version
978 factory.addStep(SetPropertyFromCommand(
979 name = "kernelversion",
980 property = "kernelversion",
981 description = "Finding the effective Kernel version",
982 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
983 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
984 ))
985
986 factory.addStep(ShellCommand(
987 name = "pkgclean",
988 description = "Cleaning up package build",
989 command=["make", "package/cleanup", "V=s"]
990 ))
991
992 factory.addStep(ShellCommand(
993 name = "pkgbuild",
994 description = "Building packages",
995 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
996 env = MakeEnv(),
997 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
998 haltOnFailure = True
999 ))
1000
1001 # factory.addStep(IfBuiltinShellCommand(
1002 factory.addStep(ShellCommand(
1003 name = "pkginstall",
1004 description = "Installing packages",
1005 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1006 env = MakeEnv(),
1007 haltOnFailure = True
1008 ))
1009
1010 factory.addStep(ShellCommand(
1011 name = "pkgindex",
1012 description = "Indexing packages",
1013 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1014 env = MakeEnv(),
1015 haltOnFailure = True
1016 ))
1017
1018 if enable_kmod_archive and embed_kmod_repository:
1019 # embed kmod repository. Must happen before 'images'
1020
1021 # find rootfs staging directory
1022 factory.addStep(SetPropertyFromCommand(
1023 name = "stageroot",
1024 property = "stageroot",
1025 description = "Finding the rootfs staging directory",
1026 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1027 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1028 want_stderr = False
1029 ))
1030
1031 factory.addStep(ShellCommand(
1032 name = "filesdir",
1033 description = "Creating file overlay directory",
1034 command=["mkdir", "-p", "files/etc/opkg"],
1035 haltOnFailure = True
1036 ))
1037
1038 factory.addStep(ShellCommand(
1039 name = "kmodconfig",
1040 description = "Embedding kmod repository configuration",
1041 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1042 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1043 haltOnFailure = True
1044 ))
1045
1046 #factory.addStep(IfBuiltinShellCommand(
1047 factory.addStep(ShellCommand(
1048 name = "images",
1049 description = "Building and installing images",
1050 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1051 env = MakeEnv(),
1052 haltOnFailure = True
1053 ))
1054
1055 factory.addStep(ShellCommand(
1056 name = "buildinfo",
1057 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1058 command = "make -j1 buildinfo V=s || true",
1059 env = MakeEnv(),
1060 haltOnFailure = True
1061 ))
1062
1063 factory.addStep(ShellCommand(
1064 name = "json_overview_image_info",
1065 description = "Generate profiles.json in target folder",
1066 command = "make -j1 json_overview_image_info V=s || true",
1067 env = MakeEnv(),
1068 haltOnFailure = True
1069 ))
1070
1071 factory.addStep(ShellCommand(
1072 name = "checksums",
1073 description = "Calculating checksums",
1074 command=["make", "-j1", "checksum", "V=s"],
1075 env = MakeEnv(),
1076 haltOnFailure = True
1077 ))
1078
1079 if enable_kmod_archive:
1080 factory.addStep(ShellCommand(
1081 name = "kmoddir",
1082 description = "Creating kmod directory",
1083 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1084 haltOnFailure = True
1085 ))
1086
1087 factory.addStep(ShellCommand(
1088 name = "kmodprepare",
1089 description = "Preparing kmod archive",
1090 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1091 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1092 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1093 haltOnFailure = True
1094 ))
1095
1096 factory.addStep(ShellCommand(
1097 name = "kmodindex",
1098 description = "Indexing kmod archive",
1099 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1100 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1101 env = MakeEnv(),
1102 haltOnFailure = True
1103 ))
1104
1105 # sign
1106 if ini.has_option("gpg", "key") or usign_key is not None:
1107 factory.addStep(MasterShellCommand(
1108 name = "signprepare",
1109 description = "Preparing temporary signing directory",
1110 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1111 haltOnFailure = True
1112 ))
1113
1114 factory.addStep(ShellCommand(
1115 name = "signpack",
1116 description = "Packing files to sign",
1117 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1118 haltOnFailure = True
1119 ))
1120
1121 factory.addStep(FileUpload(
1122 workersrc = "sign.tar.gz",
1123 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1124 haltOnFailure = True
1125 ))
1126
1127 factory.addStep(MasterShellCommand(
1128 name = "signfiles",
1129 description = "Signing files",
1130 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1131 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1132 haltOnFailure = True
1133 ))
1134
1135 factory.addStep(FileDownload(
1136 name = "dlsigntargz",
1137 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1138 workerdest = "sign.tar.gz",
1139 haltOnFailure = True
1140 ))
1141
1142 factory.addStep(ShellCommand(
1143 name = "signunpack",
1144 description = "Unpacking signed files",
1145 command = ["tar", "-xzf", "sign.tar.gz"],
1146 haltOnFailure = True
1147 ))
1148
1149 # upload
1150 factory.addStep(ShellCommand(
1151 name = "dirprepare",
1152 description = "Preparing upload directory structure",
1153 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1154 haltOnFailure = True
1155 ))
1156
1157 factory.addStep(ShellCommand(
1158 name = "linkprepare",
1159 description = "Preparing repository symlink",
1160 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1161 doStepIf = IsNoMasterBuild,
1162 haltOnFailure = True
1163 ))
1164
1165 if enable_kmod_archive:
1166 factory.addStep(ShellCommand(
1167 name = "kmoddirprepare",
1168 description = "Preparing kmod archive upload directory",
1169 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1170 haltOnFailure = True
1171 ))
1172
1173 factory.addStep(ShellCommand(
1174 name = "dirupload",
1175 description = "Uploading directory structure",
1176 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1177 env={'RSYNC_PASSWORD': rsync_bin_key},
1178 haltOnFailure = True,
1179 logEnviron = False,
1180 ))
1181
1182 # download remote sha256sums to 'target-sha256sums'
1183 factory.addStep(ShellCommand(
1184 name = "target-sha256sums",
1185 description = "Fetching remote sha256sums for target",
1186 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1187 env={'RSYNC_PASSWORD': rsync_bin_key},
1188 logEnviron = False,
1189 haltOnFailure = False,
1190 flunkOnFailure = False,
1191 warnOnFailure = False,
1192 ))
1193
1194 # build list of files to upload
1195 factory.addStep(FileDownload(
1196 name = "dlsha2rsyncpl",
1197 mastersrc = scripts_dir + '/sha2rsync.pl',
1198 workerdest = "../sha2rsync.pl",
1199 mode = 0o755,
1200 ))
1201
1202 factory.addStep(ShellCommand(
1203 name = "buildlist",
1204 description = "Building list of files to upload",
1205 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1206 haltOnFailure = True,
1207 ))
1208
1209 factory.addStep(FileDownload(
1210 name = "dlrsync.sh",
1211 mastersrc = scripts_dir + '/rsync.sh',
1212 workerdest = "../rsync.sh",
1213 mode = 0o755
1214 ))
1215
1216 # upload new files and update existing ones
1217 factory.addStep(ShellCommand(
1218 name = "targetupload",
1219 description = "Uploading target files",
1220 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1221 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1222 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1223 env={'RSYNC_PASSWORD': rsync_bin_key},
1224 haltOnFailure = True,
1225 logEnviron = False,
1226 ))
1227
1228 # delete files which don't exist locally
1229 factory.addStep(ShellCommand(
1230 name = "targetprune",
1231 description = "Pruning target files",
1232 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1233 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1234 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1235 env={'RSYNC_PASSWORD': rsync_bin_key},
1236 haltOnFailure = True,
1237 logEnviron = False,
1238 ))
1239
1240 if enable_kmod_archive:
1241 factory.addStep(ShellCommand(
1242 name = "kmodupload",
1243 description = "Uploading kmod archive",
1244 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1245 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1246 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1247 env={'RSYNC_PASSWORD': rsync_bin_key},
1248 haltOnFailure = True,
1249 logEnviron = False,
1250 ))
1251
1252 if rsync_src_url is not None:
1253 factory.addStep(ShellCommand(
1254 name = "sourcelist",
1255 description = "Finding source archives to upload",
1256 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1257 haltOnFailure = True
1258 ))
1259
1260 factory.addStep(ShellCommand(
1261 name = "sourceupload",
1262 description = "Uploading source archives",
1263 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1264 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1265 env={'RSYNC_PASSWORD': rsync_src_key},
1266 haltOnFailure = True,
1267 logEnviron = False,
1268 ))
1269
1270 if False:
1271 factory.addStep(ShellCommand(
1272 name = "packageupload",
1273 description = "Uploading package files",
1274 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1275 env={'RSYNC_PASSWORD': rsync_bin_key},
1276 haltOnFailure = False,
1277 logEnviron = False,
1278 ))
1279
1280 # logs
1281 if False:
1282 factory.addStep(ShellCommand(
1283 name = "upload",
1284 description = "Uploading logs",
1285 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1286 env={'RSYNC_PASSWORD': rsync_bin_key},
1287 haltOnFailure = False,
1288 alwaysRun = True,
1289 logEnviron = False,
1290 ))
1291
1292 factory.addStep(ShellCommand(
1293 name = "df",
1294 description = "Reporting disk usage",
1295 command=["df", "-h", "."],
1296 env={'LC_ALL': 'C'},
1297 haltOnFailure = False,
1298 alwaysRun = True
1299 ))
1300
1301 factory.addStep(ShellCommand(
1302 name = "ccachestat",
1303 description = "Reporting ccache stats",
1304 command=["ccache", "-s"],
1305 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1306 want_stderr = False,
1307 haltOnFailure = False,
1308 flunkOnFailure = False,
1309 warnOnFailure = False,
1310 alwaysRun = True,
1311 ))
1312
1313 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1314
1315 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1316 force_factory.addStep(steps.Trigger(
1317 name = "trigger_%s" % target,
1318 description = "Triggering %s build" % target,
1319 schedulerNames = [ "trigger_%s" % target ],
1320 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1321 doStepIf = IsTargetSelected(target)
1322 ))
1323
1324
1325 ####### STATUS TARGETS
1326
1327 # 'status' is a list of Status Targets. The results of each build will be
1328 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1329 # including web pages, email senders, and IRC bots.
1330
1331 if ini.has_option("phase1", "status_bind"):
1332 c['www'] = {
1333 'port': ini.get("phase1", "status_bind"),
1334 'plugins': {
1335 'waterfall_view': True,
1336 'console_view': True,
1337 'grid_view': True
1338 }
1339 }
1340
1341 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1342 c['www']['auth'] = util.UserPasswordAuth([
1343 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1344 ])
1345 c['www']['authz'] = util.Authz(
1346 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1347 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1348 )
1349
1350 c['services'] = []
1351 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1352 irc_host = ini.get("irc", "host")
1353 irc_port = 6667
1354 irc_chan = ini.get("irc", "channel")
1355 irc_nick = ini.get("irc", "nickname")
1356 irc_pass = None
1357
1358 if ini.has_option("irc", "port"):
1359 irc_port = ini.getint("irc", "port")
1360
1361 if ini.has_option("irc", "password"):
1362 irc_pass = ini.get("irc", "password")
1363
1364 irc = reporters.IRC(irc_host, irc_nick,
1365 port = irc_port,
1366 password = irc_pass,
1367 channels = [ irc_chan ],
1368 notify_events = [ 'exception', 'problem', 'recovery' ]
1369 )
1370
1371 c['services'].append(irc)
1372
1373 c['revlink'] = util.RevlinkMatch([
1374 r'https://git.openwrt.org/openwrt/(.*).git'
1375 ],
1376 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1377
1378 ####### DB URL
1379
1380 c['db'] = {
1381 # This specifies what database buildbot uses to store its state. You can leave
1382 # this at its default for all but the largest installations.
1383 'db_url' : "sqlite:///state.sqlite",
1384 }
1385
1386 c['buildbotNetUsageData'] = None