phase1: workers are running a single build
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41
42
43 if not os.path.exists("twistd.pid"):
44 with open("twistd.pid", "w") as pidfile:
45 pidfile.write("{}".format(os.getpid()))
46
47 # This is a sample buildmaster config file. It must be installed as
48 # 'master.cfg' in your buildmaster's base directory.
49
50 ini = configparser.ConfigParser()
51 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
52
53 if "general" not in ini or "phase1" not in ini:
54 raise ValueError("Fix your configuration")
55
56 inip1 = ini['phase1']
57
58 # Globals
59 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
60 scripts_dir = os.path.abspath("../scripts")
61
62 repo_url = ini['repo'].get("url")
63
64 rsync_defopts = ["-v", "-4", "--timeout=120"]
65
66 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
67 # rsync_bin_defopts += ["--contimeout=20"]
68
69 branches = {}
70
71 def ini_parse_branch(section):
72 b = {}
73 name = section.get("name")
74
75 if not name:
76 raise ValueError("missing 'name' in " + repr(section))
77 if name in branches:
78 raise ValueError("duplicate branch name in " + repr(section))
79
80 b["name"] = name
81 b["bin_url"] = section.get("binary_url")
82 b["bin_key"] = section.get("binary_password")
83
84 b["src_url"] = section.get("source_url")
85 b["src_key"] = section.get("source_password")
86
87 b["gpg_key"] = section.get("gpg_key")
88
89 b["usign_key"] = section.get("usign_key")
90 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
91 b["usign_comment"] = section.get("usign_comment", usign_comment)
92
93 b["config_seed"] = section.get("config_seed")
94
95 b["kmod_archive"] = section.getboolean("kmod_archive", False)
96
97 branches[name] = b
98 log.msg("Configured branch: {}".format(name))
99
100 # PB port can be either a numeric port or a connection string
101 pb_port = inip1.get("port") or 9989
102
103 # This is the dictionary that the buildmaster pays attention to. We also use
104 # a shorter alias to save typing.
105 c = BuildmasterConfig = {}
106
107 ####### PROJECT IDENTITY
108
109 # the 'title' string will appear at the top of this buildbot
110 # installation's html.WebStatus home page (linked to the
111 # 'titleURL') and is embedded in the title of the waterfall HTML page.
112
113 c['title'] = ini['general'].get("title")
114 c['titleURL'] = ini['general'].get("title_url")
115
116 # the 'buildbotURL' string should point to the location where the buildbot's
117 # internal web server (usually the html.WebStatus page) is visible. This
118 # typically uses the port number set in the Waterfall 'status' entry, but
119 # with an externally-visible host name which the buildbot cannot figure out
120 # without some help.
121
122 c['buildbotURL'] = inip1.get("buildbot_url")
123
124 ####### BUILDWORKERS
125
126 # The 'workers' list defines the set of recognized buildworkers. Each element is
127 # a Worker object, specifying a unique worker name and password. The same
128 # worker name and password must be configured on the worker.
129
130 c['workers'] = []
131 NetLocks = dict()
132
133 for section in ini.sections():
134 if section.startswith("branch "):
135 ini_parse_branch(ini[section])
136
137 if section.startswith("worker "):
138 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
139 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
140 sl_props = { 'dl_lock':None, 'ul_lock':None }
141 name = ini.get(section, "name")
142 password = ini.get(section, "password")
143 if ini.has_option(section, "dl_lock"):
144 lockname = ini.get(section, "dl_lock")
145 sl_props['dl_lock'] = lockname
146 if lockname not in NetLocks:
147 NetLocks[lockname] = locks.MasterLock(lockname)
148 if ini.has_option(section, "ul_lock"):
149 lockname = ini.get(section, "ul_lock")
150 sl_props['ul_lock'] = lockname
151 if lockname not in NetLocks:
152 NetLocks[lockname] = locks.MasterLock(lockname)
153 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
154
155 c['protocols'] = {'pb': {'port': pb_port}}
156
157 # coalesce builds
158 c['collapseRequests'] = True
159
160 # Reduce amount of backlog data
161 c['configurators'] = [util.JanitorConfigurator(
162 logHorizon=timedelta(days=3),
163 hour=6,
164 )]
165
166 @defer.inlineCallbacks
167 def getNewestCompleteTime(bldr):
168 """Returns the complete_at of the latest completed and not SKIPPED
169 build request for this builder, or None if there are no such build
170 requests. We need to filter out SKIPPED requests because we're
171 using collapseRequests=True which is unfortunately marking all
172 previous requests as complete when new buildset is created.
173
174 @returns: datetime instance or None, via Deferred
175 """
176
177 bldrid = yield bldr.getBuilderId()
178 completed = yield bldr.master.data.get(
179 ('builders', bldrid, 'buildrequests'),
180 [
181 resultspec.Filter('complete', 'eq', [True]),
182 resultspec.Filter('results', 'ne', [results.SKIPPED]),
183 ],
184 order=['-complete_at'], limit=1)
185 if not completed:
186 return
187
188 complete_at = completed[0]['complete_at']
189
190 last_build = yield bldr.master.data.get(
191 ('builds', ),
192 [
193 resultspec.Filter('builderid', 'eq', [bldrid]),
194 ],
195 order=['-started_at'], limit=1)
196
197 if last_build and last_build[0]:
198 last_complete_at = last_build[0]['complete_at']
199 if last_complete_at and (last_complete_at > complete_at):
200 return last_complete_at
201
202 return complete_at
203
204 @defer.inlineCallbacks
205 def prioritizeBuilders(master, builders):
206 """Returns sorted list of builders by their last timestamp of completed and
207 not skipped build.
208
209 @returns: list of sorted builders
210 """
211
212 def is_building(bldr):
213 return bool(bldr.building) or bool(bldr.old_building)
214
215 def bldr_info(bldr):
216 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
217 d.addCallback(lambda complete_at: (complete_at, bldr))
218 return d
219
220 def bldr_sort(item):
221 (complete_at, bldr) = item
222
223 if bldr.name == "00_force_build":
224 date = datetime.min
225 complete_at = date.replace(tzinfo=tzutc())
226 return (complete_at, bldr.name)
227
228 if not complete_at:
229 date = datetime.min
230 complete_at = date.replace(tzinfo=tzutc())
231
232 if is_building(bldr):
233 date = datetime.max
234 complete_at = date.replace(tzinfo=tzutc())
235
236 return (complete_at, bldr.name)
237
238 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
239 results.sort(key=bldr_sort)
240
241 for r in results:
242 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
243
244 return [r[1] for r in results]
245
246 c['prioritizeBuilders'] = prioritizeBuilders
247
248 ####### CHANGESOURCES
249
250 branchNames = [branches[b]["name"] for b in branches]
251
252 # find targets
253 targets = set()
254
255 def populateTargets():
256 log.msg("Populating targets, this will take time")
257 sourcegit = work_dir + '/source.git'
258 for branch in branchNames:
259 if os.path.isdir(sourcegit):
260 subprocess.call(["rm", "-rf", sourcegit])
261
262 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
263
264 os.makedirs(sourcegit + '/tmp', exist_ok=True)
265 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
266 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
267
268 while True:
269 line = findtargets.stdout.readline()
270 if not line:
271 break
272 ta = line.decode().strip().split(' ')
273 targets.add(ta[0])
274
275 subprocess.call(["rm", "-rf", sourcegit])
276
277 populateTargets()
278
279 # the 'change_source' setting tells the buildmaster how it should find out
280 # about source code changes. Here we point to the buildbot clone of pyflakes.
281
282 c['change_source'] = []
283 c['change_source'].append(GitPoller(
284 repo_url,
285 workdir=work_dir+'/work.git', branches=branchNames,
286 pollAtLaunch=True, pollinterval=300))
287
288 ####### SCHEDULERS
289
290 # Configure the Schedulers, which decide how to react to incoming changes. In this
291 # case, just kick off a 'basebuild' build
292
293 class TagChoiceParameter(BaseParameter):
294 spec_attributes = ["strict", "choices"]
295 type = "list"
296 strict = True
297
298 def __init__(self, name, label=None, **kw):
299 super().__init__(name, label, **kw)
300 self._choice_list = []
301
302 @property
303 def choices(self):
304 taglist = []
305 branchvers = []
306
307 for b in branchNames:
308 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
309 if basever:
310 branchvers.append(basever[1])
311
312 alltags = subprocess.Popen(
313 ['git', 'ls-remote', '--tags', repo_url],
314 stdout = subprocess.PIPE)
315
316 while True:
317 line = alltags.stdout.readline()
318
319 if not line:
320 break
321
322 (ref, tag) = line.split()
323
324 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
325
326 # only list tags matching configured branches
327 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
328 taglist.append(tagver[1])
329
330 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
331 taglist.insert(0, '')
332
333 self._choice_list = taglist
334
335 return self._choice_list
336
337 def updateFromKwargs(self, properties, kwargs, **unused):
338 tag = self.getFromKwargs(kwargs)
339 properties[self.name] = tag
340
341 # find the commit matching the tag
342 findrev = subprocess.Popen(['git', 'rev-parse', 'tags/'+tag], stdout=subprocess.PIPE, cwd=work_dir+'/work.git')
343 findrev.wait(timeout=10)
344 line = findrev.stdout.readline()
345
346 if findrev.returncode!=0 or not line:
347 raise ValidationError("Couldn't find tag")
348
349 properties['force_revision'] = line.decode().strip()
350
351 # find the branch matching the tag
352 branch = None
353 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
354 for b in branchNames:
355 if b.endswith(branchver[1]):
356 branch = b
357
358 if not branch:
359 raise ValidationError("Couldn't find branch")
360
361 properties['force_branch'] = branch
362
363 def parse_from_arg(self, s):
364 if self.strict and s not in self._choice_list:
365 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
366 return s
367
368 c['schedulers'] = []
369 c['schedulers'].append(AnyBranchScheduler(
370 name = "all",
371 change_filter = util.ChangeFilter(branch=branchNames),
372 treeStableTimer = 15*60,
373 builderNames = list(targets)))
374
375 c['schedulers'].append(ForceScheduler(
376 name = "force",
377 buttonName = "Force builds",
378 label = "Force build details",
379 builderNames = [ "00_force_build" ],
380
381 codebases = [
382 util.CodebaseParameter(
383 "",
384 label = "Repository",
385 branch = util.FixedParameter(name = "branch", default = ""),
386 revision = util.FixedParameter(name = "revision", default = ""),
387 repository = util.FixedParameter(name = "repository", default = ""),
388 project = util.FixedParameter(name = "project", default = "")
389 )
390 ],
391
392 reason = util.StringParameter(
393 name = "reason",
394 label = "Reason",
395 default = "Trigger build",
396 required = True,
397 size = 80
398 ),
399
400 properties = [
401 util.ChoiceStringParameter(
402 name = "target",
403 label = "Build target",
404 default = "all",
405 choices = [ "all" ] + list(targets)
406 ),
407 TagChoiceParameter(
408 name = "tag",
409 label = "Build tag",
410 default = ""
411 )
412 ]
413 ))
414
415 ####### BUILDERS
416
417 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
418 # what steps, and which workers can execute them. Note that any particular build will
419 # only take place on one worker.
420
421 def IsNoMasterBuild(step):
422 return step.getProperty("branch") != "master"
423
424 def IsUsignEnabled(step):
425 branch = step.getProperty("branch")
426 return branch and branches[branch].get("usign_key")
427
428 def IsSignEnabled(step):
429 branch = step.getProperty("branch")
430 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
431
432 def IsKmodArchiveEnabled(step):
433 branch = step.getProperty("branch")
434 return branch and branches[branch].get("kmod_archive")
435
436 def GetBaseVersion(branch):
437 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
438 return branch.split('-')[1]
439 else:
440 return "master"
441
442 @properties.renderer
443 def GetVersionPrefix(props):
444 branch = props.getProperty("branch")
445 basever = GetBaseVersion(branch)
446 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
447 return "%s/" % props["tag"][1:]
448 elif basever != "master":
449 return "%s-SNAPSHOT/" % basever
450 else:
451 return ""
452
453 @util.renderer
454 def GetConfigSeed(props):
455 branch = props.getProperty("branch")
456 return branch and branches[branch].get("config_seed") or ""
457
458 @util.renderer
459 def GetRsyncParams(props, srcorbin, urlorkey):
460 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
461 branch = props.getProperty("branch")
462 opt = srcorbin + "_" + urlorkey
463 return branch and branches[branch].get(opt)
464
465 @util.renderer
466 def GetUsignKey(props):
467 branch = props.getProperty("branch")
468 return branch and branches[branch].get("usign_key")
469
470 def GetNextBuild(builder, requests):
471 for r in requests:
472 if r.properties:
473 # order tagged build first
474 if r.properties.hasProperty("tag"):
475 return r
476 # then order by branch order
477 pbranch = r.properties.getProperty("branch")
478 for name in branchNames:
479 if pbranch == name:
480 return r
481
482 r = requests[0]
483 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
484 return r
485
486 def MakeEnv(overrides=None, tryccache=False):
487 env = {
488 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
489 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
490 }
491 if tryccache:
492 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
493 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
494 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
495 else:
496 env['CC'] = env['CCC']
497 env['CXX'] = env['CCXX']
498 env['CCACHE'] = ''
499 if overrides is not None:
500 env.update(overrides)
501 return env
502
503 @properties.renderer
504 def NetLockDl(props, extralock=None):
505 lock = None
506 if props.hasProperty("dl_lock"):
507 lock = NetLocks[props["dl_lock"]]
508 if lock is not None:
509 return [lock.access('exclusive')]
510 else:
511 return []
512
513 @properties.renderer
514 def NetLockUl(props):
515 lock = None
516 if props.hasProperty("ul_lock"):
517 lock = NetLocks[props["ul_lock"]]
518 if lock is not None:
519 return [lock.access('exclusive')]
520 else:
521 return []
522
523 def IsTargetSelected(target):
524 def CheckTargetProperty(step):
525 selected_target = step.getProperty("target", "all")
526 if selected_target != "all" and selected_target != target:
527 return False
528 return True
529
530 return CheckTargetProperty
531
532 @util.renderer
533 def UsignSec2Pub(props):
534 branch = props.getProperty("branch")
535 try:
536 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
537 seckey = branches[branch].get("usign_key")
538 seckey = base64.b64decode(seckey)
539 except:
540 return None
541
542 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
543 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
544
545
546 c['builders'] = []
547
548 workerNames = [ ]
549
550 for worker in c['workers']:
551 workerNames.append(worker.workername)
552
553 force_factory = BuildFactory()
554
555 c['builders'].append(BuilderConfig(
556 name = "00_force_build",
557 workernames = workerNames,
558 factory = force_factory))
559
560 for target in targets:
561 ts = target.split('/')
562
563 factory = BuildFactory()
564
565 # setup shared work directory if required
566 factory.addStep(ShellCommand(
567 name = "sharedwd",
568 descriptionDone = "Shared work directory set up",
569 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
570 workdir = ".",
571 haltOnFailure = True,
572 ))
573
574 # find number of cores
575 factory.addStep(SetPropertyFromCommand(
576 name = "nproc",
577 property = "nproc",
578 description = "Finding number of CPUs",
579 command = ["nproc"],
580 ))
581
582 # find gcc and g++ compilers
583 factory.addStep(FileDownload(
584 name = "dlfindbinpl",
585 mastersrc = scripts_dir + '/findbin.pl',
586 workerdest = "../findbin.pl",
587 mode = 0o755,
588 ))
589
590 factory.addStep(SetPropertyFromCommand(
591 name = "gcc",
592 property = "cc_command",
593 description = "Finding gcc command",
594 command = ["../findbin.pl", "gcc", "", ""],
595 haltOnFailure = True,
596 ))
597
598 factory.addStep(SetPropertyFromCommand(
599 name = "g++",
600 property = "cxx_command",
601 description = "Finding g++ command",
602 command = ["../findbin.pl", "g++", "", ""],
603 haltOnFailure = True,
604 ))
605
606 # see if ccache is available
607 factory.addStep(SetPropertyFromCommand(
608 name = "ccache",
609 property = "ccache_command",
610 description = "Testing for ccache command",
611 command = ["which", "ccache"],
612 haltOnFailure = False,
613 flunkOnFailure = False,
614 warnOnFailure = False,
615 hideStepIf = lambda r, s: r==results.FAILURE,
616 ))
617
618 # check out the source
619 # Git() runs:
620 # if repo doesn't exist: 'git clone repourl'
621 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
622 # git cat-file -e <commit>
623 # git checkout -f <commit>
624 # git checkout -B <branch>
625 # git rev-parse HEAD
626 factory.addStep(Git(
627 name = "git",
628 repourl = repo_url,
629 mode = 'full',
630 method = 'fresh',
631 locks = NetLockDl,
632 haltOnFailure = True,
633 ))
634
635 # update remote refs
636 factory.addStep(ShellCommand(
637 name = "fetchrefs",
638 description = "Fetching Git remote refs",
639 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
640 haltOnFailure = True,
641 ))
642
643 # Verify that Git HEAD points to a tag or branch
644 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
645 factory.addStep(ShellCommand(
646 name = "gitverify",
647 description = "Ensure that Git HEAD is pointing to a branch or tag",
648 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
649 haltOnFailure = True,
650 ))
651
652 factory.addStep(ShellCommand(
653 name = "rmtmp",
654 description = "Remove tmp folder",
655 command=["rm", "-rf", "tmp/"],
656 ))
657
658 # feed
659 factory.addStep(ShellCommand(
660 name = "rmfeedlinks",
661 description = "Remove feed symlinks",
662 command=["rm", "-rf", "package/feeds/"],
663 ))
664
665 factory.addStep(StringDownload(
666 name = "ccachecc",
667 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
668 workerdest = "../ccache_cc.sh",
669 mode = 0o755,
670 ))
671
672 factory.addStep(StringDownload(
673 name = "ccachecxx",
674 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
675 workerdest = "../ccache_cxx.sh",
676 mode = 0o755,
677 ))
678
679 # feed
680 factory.addStep(ShellCommand(
681 name = "updatefeeds",
682 description = "Updating feeds",
683 command=["./scripts/feeds", "update"],
684 env = MakeEnv(tryccache=True),
685 haltOnFailure = True,
686 locks = NetLockDl,
687 ))
688
689 # feed
690 factory.addStep(ShellCommand(
691 name = "installfeeds",
692 description = "Installing feeds",
693 command=["./scripts/feeds", "install", "-a"],
694 env = MakeEnv(tryccache=True),
695 haltOnFailure = True,
696 ))
697
698 # seed config
699 factory.addStep(StringDownload(
700 name = "dlconfigseed",
701 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
702 workerdest = ".config",
703 mode = 0o644,
704 ))
705
706 # configure
707 factory.addStep(ShellCommand(
708 name = "newconfig",
709 descriptionDone = ".config seeded",
710 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
711 ))
712
713 factory.addStep(ShellCommand(
714 name = "delbin",
715 description = "Removing output directory",
716 command = ["rm", "-rf", "bin/"],
717 ))
718
719 factory.addStep(ShellCommand(
720 name = "defconfig",
721 description = "Populating .config",
722 command = ["make", "defconfig"],
723 env = MakeEnv(),
724 ))
725
726 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
727 factory.addStep(ShellCommand(
728 name = "checkarch",
729 description = "Checking architecture",
730 descriptionDone = "Architecture validated",
731 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
732 logEnviron = False,
733 want_stdout = False,
734 want_stderr = False,
735 haltOnFailure = True,
736 flunkOnFailure = False, # this is not a build FAILURE
737 ))
738
739 # find libc suffix
740 factory.addStep(SetPropertyFromCommand(
741 name = "libc",
742 property = "libc",
743 description = "Finding libc suffix",
744 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
745 ))
746
747 # install build key
748 factory.addStep(StringDownload(
749 name = "dlkeybuildpub",
750 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
751 workerdest = "key-build.pub",
752 mode = 0o600,
753 doStepIf = IsUsignEnabled,
754 ))
755
756 factory.addStep(StringDownload(
757 name = "dlkeybuild",
758 s = "# fake private key",
759 workerdest = "key-build",
760 mode = 0o600,
761 doStepIf = IsUsignEnabled,
762 ))
763
764 factory.addStep(StringDownload(
765 name = "dlkeybuilducert",
766 s = "# fake certificate",
767 workerdest = "key-build.ucert",
768 mode = 0o600,
769 doStepIf = IsUsignEnabled,
770 ))
771
772 # prepare dl
773 factory.addStep(ShellCommand(
774 name = "dldir",
775 description = "Preparing dl/",
776 descriptionDone = "dl/ prepared",
777 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
778 workdir = Property("builddir"),
779 logEnviron = False,
780 want_stdout = False,
781 ))
782
783 # prepare tar
784 factory.addStep(ShellCommand(
785 name = "dltar",
786 description = "Building and installing GNU tar",
787 descriptionDone = "GNU tar built and installed",
788 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
789 env = MakeEnv(tryccache=True),
790 haltOnFailure = True,
791 ))
792
793 # populate dl
794 factory.addStep(ShellCommand(
795 name = "dlrun",
796 description = "Populating dl/",
797 descriptionDone = "dl/ populated",
798 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
799 env = MakeEnv(),
800 logEnviron = False,
801 locks = NetLockDl,
802 ))
803
804 factory.addStep(ShellCommand(
805 name = "cleanbase",
806 description = "Cleaning base-files",
807 command=["make", "package/base-files/clean", "V=s"],
808 ))
809
810 # build
811 factory.addStep(ShellCommand(
812 name = "tools",
813 description = "Building and installing tools",
814 descriptionDone = "Tools built and installed",
815 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
816 env = MakeEnv(tryccache=True),
817 haltOnFailure = True,
818 ))
819
820 factory.addStep(ShellCommand(
821 name = "toolchain",
822 description = "Building and installing toolchain",
823 descriptionDone = "Toolchain built and installed",
824 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
825 env = MakeEnv(),
826 haltOnFailure = True,
827 ))
828
829 factory.addStep(ShellCommand(
830 name = "kmods",
831 description = "Building kmods",
832 descriptionDone = "Kmods built",
833 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
834 env = MakeEnv(),
835 haltOnFailure = True,
836 ))
837
838 # find kernel version
839 factory.addStep(SetPropertyFromCommand(
840 name = "kernelversion",
841 property = "kernelversion",
842 description = "Finding the effective Kernel version",
843 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
844 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
845 ))
846
847 factory.addStep(ShellCommand(
848 name = "pkgclean",
849 description = "Cleaning up package build",
850 descriptionDone = "Package build cleaned up",
851 command=["make", "package/cleanup", "V=s"],
852 ))
853
854 factory.addStep(ShellCommand(
855 name = "pkgbuild",
856 description = "Building packages",
857 descriptionDone = "Packages built",
858 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
859 env = MakeEnv(),
860 haltOnFailure = True,
861 ))
862
863 factory.addStep(ShellCommand(
864 name = "pkginstall",
865 description = "Installing packages",
866 descriptionDone = "Packages installed",
867 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
868 env = MakeEnv(),
869 haltOnFailure = True,
870 ))
871
872 factory.addStep(ShellCommand(
873 name = "pkgindex",
874 description = "Indexing packages",
875 descriptionDone = "Packages indexed",
876 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
877 env = MakeEnv(),
878 haltOnFailure = True,
879 ))
880
881 factory.addStep(ShellCommand(
882 name = "images",
883 description = "Building and installing images",
884 descriptionDone = "Images built and installed",
885 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
886 env = MakeEnv(),
887 haltOnFailure = True,
888 ))
889
890 factory.addStep(ShellCommand(
891 name = "buildinfo",
892 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
893 command = "make -j1 buildinfo V=s || true",
894 env = MakeEnv(),
895 haltOnFailure = True,
896 ))
897
898 factory.addStep(ShellCommand(
899 name = "json_overview_image_info",
900 description = "Generating profiles.json in target folder",
901 command = "make -j1 json_overview_image_info V=s || true",
902 env = MakeEnv(),
903 haltOnFailure = True,
904 ))
905
906 factory.addStep(ShellCommand(
907 name = "checksums",
908 description = "Calculating checksums",
909 descriptionDone = "Checksums calculated",
910 command=["make", "-j1", "checksum", "V=s"],
911 env = MakeEnv(),
912 haltOnFailure = True,
913 ))
914
915 factory.addStep(ShellCommand(
916 name = "kmoddir",
917 descriptionDone = "Kmod directory created",
918 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
919 haltOnFailure = True,
920 doStepIf = IsKmodArchiveEnabled,
921 ))
922
923 factory.addStep(ShellCommand(
924 name = "kmodprepare",
925 description = "Preparing kmod archive",
926 descriptionDone = "Kmod archive prepared",
927 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
928 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
929 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
930 haltOnFailure = True,
931 doStepIf = IsKmodArchiveEnabled,
932 ))
933
934 factory.addStep(ShellCommand(
935 name = "kmodindex",
936 description = "Indexing kmod archive",
937 descriptionDone = "Kmod archive indexed",
938 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
939 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
940 env = MakeEnv(),
941 haltOnFailure = True,
942 doStepIf = IsKmodArchiveEnabled,
943 ))
944
945 # sign
946 factory.addStep(MasterShellCommand(
947 name = "signprepare",
948 descriptionDone = "Temporary signing directory prepared",
949 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
950 haltOnFailure = True,
951 doStepIf = IsSignEnabled,
952
953 ))
954
955 factory.addStep(ShellCommand(
956 name = "signpack",
957 description = "Packing files to sign",
958 descriptionDone = "Files to sign packed",
959 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
960 haltOnFailure = True,
961 doStepIf = IsSignEnabled,
962 ))
963
964 factory.addStep(FileUpload(
965 workersrc = "sign.tar.gz",
966 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
967 haltOnFailure = True,
968 doStepIf = IsSignEnabled,
969 ))
970
971 factory.addStep(MasterShellCommand(
972 name = "signfiles",
973 description = "Signing files",
974 descriptionDone = "Files signed",
975 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
976 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
977 haltOnFailure = True,
978 doStepIf = IsSignEnabled,
979 ))
980
981 factory.addStep(FileDownload(
982 name = "dlsigntargz",
983 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
984 workerdest = "sign.tar.gz",
985 haltOnFailure = True,
986 doStepIf = IsSignEnabled,
987 ))
988
989 factory.addStep(ShellCommand(
990 name = "signunpack",
991 description = "Unpacking signed files",
992 descriptionDone = "Signed files unpacked",
993 command = ["tar", "-xzf", "sign.tar.gz"],
994 haltOnFailure = True,
995 doStepIf = IsSignEnabled,
996 ))
997
998 # upload
999 factory.addStep(ShellCommand(
1000 name = "dirprepare",
1001 descriptionDone = "Upload directory structure prepared",
1002 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1003 haltOnFailure = True,
1004 ))
1005
1006 factory.addStep(ShellCommand(
1007 name = "linkprepare",
1008 descriptionDone = "Repository symlink prepared",
1009 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1010 doStepIf = IsNoMasterBuild,
1011 haltOnFailure = True,
1012 ))
1013
1014 factory.addStep(ShellCommand(
1015 name = "kmoddirprepare",
1016 descriptionDone = "Kmod archive upload directory prepared",
1017 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1018 haltOnFailure = True,
1019 doStepIf = IsKmodArchiveEnabled,
1020 ))
1021
1022 factory.addStep(ShellCommand(
1023 name = "dirupload",
1024 description = "Uploading directory structure",
1025 descriptionDone = "Directory structure uploaded",
1026 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1027 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1028 haltOnFailure = True,
1029 logEnviron = False,
1030 locks = NetLockUl,
1031 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1032 ))
1033
1034 # download remote sha256sums to 'target-sha256sums'
1035 factory.addStep(ShellCommand(
1036 name = "target-sha256sums",
1037 description = "Fetching remote sha256sums for target",
1038 descriptionDone = "Remote sha256sums for target fetched",
1039 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1040 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1041 logEnviron = False,
1042 haltOnFailure = False,
1043 flunkOnFailure = False,
1044 warnOnFailure = False,
1045 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1046 ))
1047
1048 # build list of files to upload
1049 factory.addStep(FileDownload(
1050 name = "dlsha2rsyncpl",
1051 mastersrc = scripts_dir + '/sha2rsync.pl',
1052 workerdest = "../sha2rsync.pl",
1053 mode = 0o755,
1054 ))
1055
1056 factory.addStep(ShellCommand(
1057 name = "buildlist",
1058 description = "Building list of files to upload",
1059 descriptionDone = "List of files to upload built",
1060 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1061 haltOnFailure = True,
1062 ))
1063
1064 factory.addStep(FileDownload(
1065 name = "dlrsync.sh",
1066 mastersrc = scripts_dir + '/rsync.sh',
1067 workerdest = "../rsync.sh",
1068 mode = 0o755,
1069 ))
1070
1071 # upload new files and update existing ones
1072 factory.addStep(ShellCommand(
1073 name = "targetupload",
1074 description = "Uploading target files",
1075 descriptionDone = "Target files uploaded",
1076 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1077 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1078 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1079 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1080 haltOnFailure = True,
1081 logEnviron = False,
1082 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1083 ))
1084
1085 # delete files which don't exist locally
1086 factory.addStep(ShellCommand(
1087 name = "targetprune",
1088 description = "Pruning target files",
1089 descriptionDone = "Target files pruned",
1090 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1091 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1092 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1093 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1094 haltOnFailure = True,
1095 logEnviron = False,
1096 locks = NetLockUl,
1097 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1098 ))
1099
1100 factory.addStep(ShellCommand(
1101 name = "kmodupload",
1102 description = "Uploading kmod archive",
1103 descriptionDone = "Kmod archive uploaded",
1104 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1105 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1106 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1107 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1108 haltOnFailure = True,
1109 logEnviron = False,
1110 locks = NetLockUl,
1111 doStepIf = util.Transform(lambda a, b: bool(a and b), IsKmodArchiveEnabled, GetRsyncParams.withArgs("bin", "url")),
1112 ))
1113
1114 factory.addStep(ShellCommand(
1115 name = "sourcelist",
1116 description = "Finding source archives to upload",
1117 descriptionDone = "Source archives to upload found",
1118 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1119 haltOnFailure = True,
1120 ))
1121
1122 factory.addStep(ShellCommand(
1123 name = "sourceupload",
1124 description = "Uploading source archives",
1125 descriptionDone = "Source archives uploaded",
1126 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1127 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1128 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1129 haltOnFailure = True,
1130 logEnviron = False,
1131 locks = NetLockUl,
1132 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1133 ))
1134
1135 factory.addStep(ShellCommand(
1136 name = "df",
1137 description = "Reporting disk usage",
1138 command=["df", "-h", "."],
1139 env={'LC_ALL': 'C'},
1140 logEnviron = False,
1141 haltOnFailure = False,
1142 flunkOnFailure = False,
1143 warnOnFailure = False,
1144 alwaysRun = True,
1145 ))
1146
1147 factory.addStep(ShellCommand(
1148 name = "du",
1149 description = "Reporting estimated file space usage",
1150 command=["du", "-sh", "."],
1151 env={'LC_ALL': 'C'},
1152 logEnviron = False,
1153 haltOnFailure = False,
1154 flunkOnFailure = False,
1155 warnOnFailure = False,
1156 alwaysRun = True,
1157 ))
1158
1159 factory.addStep(ShellCommand(
1160 name = "ccachestat",
1161 description = "Reporting ccache stats",
1162 command=["ccache", "-s"],
1163 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1164 logEnviron = False,
1165 want_stderr = False,
1166 haltOnFailure = False,
1167 flunkOnFailure = False,
1168 warnOnFailure = False,
1169 hideStepIf = lambda r, s: r==results.FAILURE,
1170 ))
1171
1172 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1173
1174 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1175 force_factory.addStep(steps.Trigger(
1176 name = "trigger_%s" % target,
1177 description = "Triggering %s build" % target,
1178 schedulerNames = [ "trigger_%s" % target ],
1179 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
1180 set_properties = { "reason": Property("reason"), "tag": Property("tag"), },
1181 doStepIf = IsTargetSelected(target),
1182 ))
1183
1184
1185 ####### STATUS TARGETS
1186
1187 # 'status' is a list of Status Targets. The results of each build will be
1188 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1189 # including web pages, email senders, and IRC bots.
1190
1191 if "status_bind" in inip1:
1192 c['www'] = {
1193 'port': inip1.get("status_bind"),
1194 'plugins': {
1195 'waterfall_view': True,
1196 'console_view': True,
1197 'grid_view': True
1198 }
1199 }
1200
1201 if "status_user" in inip1 and "status_password" in inip1:
1202 c['www']['auth'] = util.UserPasswordAuth([
1203 (inip1.get("status_user"), inip1.get("status_password"))
1204 ])
1205 c['www']['authz'] = util.Authz(
1206 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1207 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1208 )
1209
1210 c['services'] = []
1211 if ini.has_section("irc"):
1212 iniirc = ini['irc']
1213 irc_host = iniirc.get("host", None)
1214 irc_port = iniirc.getint("port", 6667)
1215 irc_chan = iniirc.get("channel", None)
1216 irc_nick = iniirc.get("nickname", None)
1217 irc_pass = iniirc.get("password", None)
1218
1219 if irc_host and irc_nick and irc_chan:
1220 irc = reporters.IRC(irc_host, irc_nick,
1221 port = irc_port,
1222 password = irc_pass,
1223 channels = [ irc_chan ],
1224 notify_events = [ 'exception', 'problem', 'recovery' ]
1225 )
1226
1227 c['services'].append(irc)
1228
1229 c['revlink'] = util.RevlinkMatch([
1230 r'https://git.openwrt.org/openwrt/(.*).git'
1231 ],
1232 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1233
1234 ####### DB URL
1235
1236 c['db'] = {
1237 # This specifies what database buildbot uses to store its state. You can leave
1238 # this at its default for all but the largest installations.
1239 'db_url' : "sqlite:///state.sqlite",
1240 }
1241
1242 c['buildbotNetUsageData'] = None