2 # ex: set syntax=python:
9 from buildbot import locks
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
14 buildbot_url = ini.get("general", "buildbot_url")
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
35 if ini.has_option("general", "port"):
36 slave_port = ini.getint("general", "port")
38 if ini.has_option("general", "persistent"):
39 persistent = ini.getboolean("general", "persistent")
41 if ini.has_option("general", "other_builds"):
42 other_builds = ini.getint("general", "other_builds")
44 if ini.has_option("general", "expire"):
45 tree_expire = ini.getint("general", "expire")
50 for section in ini.sections():
51 if section.startswith("slave "):
52 if ini.has_option(section, "name") and ini.has_option(section, "password"):
53 name = ini.get(section, "name")
54 password = ini.get(section, "password")
56 if ini.has_option(section, "builds"):
57 max_builds[name] = ini.getint(section, "builds")
58 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
60 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
61 # This must match the value configured into the buildslaves (with their
63 c['slavePortnum'] = slave_port
66 c['mergeRequests'] = True
68 # Reduce amount of backlog data
69 c['buildHorizon'] = 30
74 home_dir = os.path.abspath(ini.get("general", "homedir"))
76 rsync_bin_url = ini.get("rsync", "binary_url")
77 rsync_bin_key = ini.get("rsync", "binary_password")
82 if ini.has_option("rsync", "source_url"):
83 rsync_src_url = ini.get("rsync", "source_url")
84 rsync_src_key = ini.get("rsync", "source_password")
88 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
90 if ini.has_option("rsync", "sdk_url"):
91 rsync_sdk_url = ini.get("rsync", "sdk_url")
93 if ini.has_option("rsync", "sdk_password"):
94 rsync_sdk_key = ini.get("rsync", "sdk_password")
96 if ini.has_option("rsync", "sdk_pattern"):
97 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
101 gpg_comment = "Unattended build signature"
102 gpg_passfile = "/dev/null"
104 if ini.has_option("gpg", "home"):
105 gpg_home = ini.get("gpg", "home")
107 if ini.has_option("gpg", "keyid"):
108 gpg_keyid = ini.get("gpg", "keyid")
110 if ini.has_option("gpg", "comment"):
111 gpg_comment = ini.get("gpg", "comment")
113 if ini.has_option("gpg", "passfile"):
114 gpg_passfile = ini.get("gpg", "passfile")
121 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
122 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
125 line = findarches.stdout.readline()
128 at = line.strip().split()
130 archnames.append(at[0])
135 feedbranches = dict()
137 from buildbot.changes.gitpoller import GitPoller
138 c['change_source'] = []
140 def parse_feed_entry(line):
141 parts = line.strip().split()
142 if parts[0] == "src-git":
144 url = parts[2].strip().split(';')
145 branch = url[1] if len(url) > 1 else 'master'
146 feedbranches[url[0]] = branch
147 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
149 make = subprocess.Popen(['make', '--no-print-directory', '-C', home_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
150 env = dict(os.environ, TOPDIR=home_dir+'/source.git'), stdout = subprocess.PIPE)
152 line = make.stdout.readline()
154 parse_feed_entry(line)
156 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
158 parse_feed_entry(line)
163 # Configure the Schedulers, which decide how to react to incoming changes. In this
164 # case, just kick off a 'basebuild' build
166 def branch_change_filter(change):
167 return change.branch == feedbranches[change.repository]
169 from buildbot.schedulers.basic import SingleBranchScheduler
170 from buildbot.schedulers.forcesched import ForceScheduler
171 from buildbot.changes import filter
173 c['schedulers'].append(SingleBranchScheduler(
175 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
177 builderNames=archnames))
179 c['schedulers'].append(ForceScheduler(
181 builderNames=archnames))
185 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
186 # what steps, and which slaves can execute them. Note that any particular build will
187 # only take place on one slave.
189 from buildbot.process.factory import BuildFactory
190 from buildbot.steps.source import Git
191 from buildbot.steps.shell import ShellCommand
192 from buildbot.steps.shell import SetProperty
193 from buildbot.steps.transfer import FileUpload
194 from buildbot.steps.transfer import FileDownload
195 from buildbot.steps.transfer import StringDownload
196 from buildbot.steps.master import MasterShellCommand
197 from buildbot.process.properties import WithProperties
200 def GetDirectorySuffix(props):
201 verpat = re.compile('^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
202 if props.hasProperty("release_version"):
203 m = verpat.match(props["release_version"])
205 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
208 def GetNumJobs(props):
209 if props.hasProperty("slavename") and props.hasProperty("nproc"):
210 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
215 if props.hasProperty("builddir"):
216 return props["builddir"]
217 elif props.hasProperty("workdir"):
218 return props["workdir"]
225 dlLock = locks.SlaveLock("slave_dl")
229 for slave in c['slaves']:
230 slaveNames.append(slave.slavename)
233 ts = arch[1].split('/')
235 factory = BuildFactory()
237 # find number of cores
238 factory.addStep(SetProperty(
241 description = "Finding number of CPUs",
242 command = ["nproc"]))
245 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
248 factory.addStep(ShellCommand(
250 description = "Cleaning previous builds",
251 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
252 haltOnFailure = True,
255 factory.addStep(ShellCommand(
257 description = "Cleaning work area",
258 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
259 haltOnFailure = True,
262 # expire tree if needed
263 elif tree_expire > 0:
264 factory.addStep(FileDownload(
265 mastersrc = home_dir+"/expire.sh",
266 slavedest = "../expire.sh",
269 factory.addStep(ShellCommand(
271 description = "Checking for build tree expiry",
272 command = ["./expire.sh", str(tree_expire)],
274 haltOnFailure = True,
277 factory.addStep(ShellCommand(
279 description = "Preparing SDK directory",
280 command = ["mkdir", "-p", "sdk"],
281 haltOnFailure = True))
283 factory.addStep(ShellCommand(
284 name = "downloadsdk",
285 description = "Downloading SDK archive",
286 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
287 env={'RSYNC_PASSWORD': rsync_sdk_key},
288 haltOnFailure = True,
291 factory.addStep(ShellCommand(
293 description = "Unpacking SDK archive",
294 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
295 haltOnFailure = True))
297 factory.addStep(ShellCommand(
299 description = "Updating SDK",
300 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
301 haltOnFailure = True))
303 factory.addStep(StringDownload(
304 name = "writeversionmk",
305 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
306 slavedest = "sdk/getversion.mk",
309 factory.addStep(SetProperty(
311 property = "release_version",
312 description = "Finding SDK release version",
313 workdir = "build/sdk",
314 command = ["make", "-f", "getversion.mk"]))
316 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
317 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
319 factory.addStep(ShellCommand(
321 description = "Preparing download directory",
322 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
323 haltOnFailure = True))
325 factory.addStep(ShellCommand(
327 description = "Preparing SDK configuration",
328 workdir = "build/sdk",
329 command = ["sh", "-c", "rm -f .config && make defconfig"]))
331 factory.addStep(FileDownload(
332 mastersrc = home_dir+'/ccache.sh',
333 slavedest = 'sdk/ccache.sh',
336 factory.addStep(ShellCommand(
338 description = "Preparing ccache",
339 workdir = "build/sdk",
340 command = ["./ccache.sh"],
341 haltOnFailure = True))
343 factory.addStep(ShellCommand(
344 name = "updatefeeds",
345 description = "Updating feeds",
346 workdir = "build/sdk",
347 command = ["./scripts/feeds", "update", "-f"],
348 haltOnFailure = True))
350 factory.addStep(ShellCommand(
351 name = "installfeeds",
352 description = "Installing feeds",
353 workdir = "build/sdk",
354 command = ["./scripts/feeds", "install", "-a"],
355 haltOnFailure = True))
357 factory.addStep(ShellCommand(
359 description = "Clearing failure logs",
360 workdir = "build/sdk",
361 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
362 haltOnFailure = False
365 factory.addStep(ShellCommand(
367 description = "Building packages",
368 workdir = "build/sdk",
370 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"],
371 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
372 haltOnFailure = True))
374 factory.addStep(ShellCommand(
375 name = "mkfeedsconf",
376 description = "Generating pinned feeds.conf",
377 workdir = "build/sdk",
378 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
380 if gpg_keyid is not None:
381 factory.addStep(MasterShellCommand(
382 name = "signprepare",
383 description = "Preparing temporary signing directory",
384 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
388 factory.addStep(ShellCommand(
390 description = "Packing files to sign",
391 workdir = "build/sdk",
392 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
396 factory.addStep(FileUpload(
397 slavesrc = "sdk/sign.tar.gz",
398 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
402 factory.addStep(MasterShellCommand(
404 description = "Signing files",
405 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
406 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
410 factory.addStep(FileDownload(
411 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
412 slavedest = "sdk/sign.tar.gz",
416 factory.addStep(ShellCommand(
418 description = "Unpacking signed files",
419 workdir = "build/sdk",
420 command = ["tar", "-xzf", "sign.tar.gz"],
424 factory.addStep(ShellCommand(
425 name = "uploadprepare",
426 description = "Preparing package directory",
427 workdir = "build/sdk",
428 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
429 env={'RSYNC_PASSWORD': rsync_bin_key},
430 haltOnFailure = True,
434 factory.addStep(ShellCommand(
435 name = "packageupload",
436 description = "Uploading package files",
437 workdir = "build/sdk",
438 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
439 env={'RSYNC_PASSWORD': rsync_bin_key},
440 haltOnFailure = True,
444 factory.addStep(ShellCommand(
446 description = "Preparing log directory",
447 workdir = "build/sdk",
448 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
449 env={'RSYNC_PASSWORD': rsync_bin_key},
450 haltOnFailure = True,
454 factory.addStep(ShellCommand(
456 description = "Finding failure logs",
457 workdir = "build/sdk/logs/package/feeds",
458 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
459 haltOnFailure = False
462 factory.addStep(ShellCommand(
464 description = "Collecting failure logs",
465 workdir = "build/sdk",
466 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
467 haltOnFailure = False
470 factory.addStep(ShellCommand(
472 description = "Uploading failure logs",
473 workdir = "build/sdk",
474 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
475 env={'RSYNC_PASSWORD': rsync_bin_key},
476 haltOnFailure = False,
480 if rsync_src_url is not None:
481 factory.addStep(ShellCommand(
482 name = "sourceupload",
483 description = "Uploading source archives",
484 workdir = "build/sdk",
485 command = ["rsync", "-4", "--progress", "--checksum", "--delay-updates",
486 WithProperties("--partial-dir=.~tmp~%s~%%(slavename)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
487 env={'RSYNC_PASSWORD': rsync_src_key},
488 haltOnFailure = False,
492 factory.addStep(ShellCommand(
494 description = "Reporting disk usage",
495 command=["df", "-h", "."],
497 haltOnFailure = False,
501 from buildbot.config import BuilderConfig
503 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
506 ####### STATUS arches
508 # 'status' is a list of Status arches. The results of each build will be
509 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
510 # including web pages, email senders, and IRC bots.
514 from buildbot.status import html
515 from buildbot.status.web import authz, auth
517 if ini.has_option("status", "bind"):
518 if ini.has_option("status", "user") and ini.has_option("status", "password"):
519 authz_cfg=authz.Authz(
520 # change any of these to True to enable; see the manual for more
522 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
523 gracefulShutdown = 'auth',
524 forceBuild = 'auth', # use this to test your slave once it is set up
525 forceAllBuilds = 'auth',
528 stopAllBuilds = 'auth',
529 cancelPendingBuild = 'auth',
531 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
533 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
535 ####### PROJECT IDENTITY
537 # the 'title' string will appear at the top of this buildbot
538 # installation's html.WebStatus home page (linked to the
539 # 'titleURL') and is embedded in the title of the waterfall HTML page.
541 c['title'] = ini.get("general", "title")
542 c['titleURL'] = ini.get("general", "title_url")
544 # the 'buildbotURL' string should point to the location where the buildbot's
545 # internal web server (usually the html.WebStatus page) is visible. This
546 # typically uses the port number set in the Waterfall 'status' entry, but
547 # with an externally-visible host name which the buildbot cannot figure out
550 c['buildbotURL'] = buildbot_url
555 # This specifies what database buildbot uses to store its state. You can leave
556 # this at its default for all but the largest installations.
557 'db_url' : "sqlite:///state.sqlite",