2 # ex: set syntax=python:
9 from buildbot import locks
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
14 buildbot_url = ini.get("general", "buildbot_url")
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
35 if ini.has_option("general", "port"):
36 slave_port = ini.getint("general", "port")
38 if ini.has_option("general", "persistent"):
39 persistent = ini.getboolean("general", "persistent")
41 if ini.has_option("general", "other_builds"):
42 other_builds = ini.getint("general", "other_builds")
44 if ini.has_option("general", "expire"):
45 tree_expire = ini.getint("general", "expire")
50 for section in ini.sections():
51 if section.startswith("slave "):
52 if ini.has_option(section, "name") and ini.has_option(section, "password"):
53 name = ini.get(section, "name")
54 password = ini.get(section, "password")
56 if ini.has_option(section, "builds"):
57 max_builds[name] = ini.getint(section, "builds")
58 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
60 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
61 # This must match the value configured into the buildslaves (with their
63 c['slavePortnum'] = slave_port
66 c['mergeRequests'] = True
68 # Reduce amount of backlog data
69 c['buildHorizon'] = 30
74 home_dir = os.path.abspath(ini.get("general", "homedir"))
76 rsync_bin_url = ini.get("rsync", "binary_url")
77 rsync_bin_key = ini.get("rsync", "binary_password")
82 if ini.has_option("rsync", "source_url"):
83 rsync_src_url = ini.get("rsync", "source_url")
84 rsync_src_key = ini.get("rsync", "source_password")
88 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
90 if ini.has_option("rsync", "sdk_url"):
91 rsync_sdk_url = ini.get("rsync", "sdk_url")
93 if ini.has_option("rsync", "sdk_password"):
94 rsync_sdk_key = ini.get("rsync", "sdk_password")
96 if ini.has_option("rsync", "sdk_pattern"):
97 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
101 gpg_comment = "Unattended build signature"
102 gpg_passfile = "/dev/null"
104 if ini.has_option("gpg", "home"):
105 gpg_home = ini.get("gpg", "home")
107 if ini.has_option("gpg", "keyid"):
108 gpg_keyid = ini.get("gpg", "keyid")
110 if ini.has_option("gpg", "comment"):
111 gpg_comment = ini.get("gpg", "comment")
113 if ini.has_option("gpg", "passfile"):
114 gpg_passfile = ini.get("gpg", "passfile")
121 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
122 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
125 line = findarches.stdout.readline()
128 at = line.strip().split()
130 archnames.append(at[0])
135 feedbranches = dict()
137 from buildbot.changes.gitpoller import GitPoller
138 c['change_source'] = []
140 def parse_feed_entry(line):
141 parts = line.strip().split()
142 if parts[0] == "src-git":
144 url = parts[2].strip().split(';')
145 branch = url[1] if len(url) > 1 else 'master'
146 feedbranches[url[0]] = branch
147 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
149 make = subprocess.Popen(['make', '--no-print-directory', '-C', home_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
150 env = dict(os.environ, TOPDIR=home_dir+'/source.git'), stdout = subprocess.PIPE)
152 line = make.stdout.readline()
154 parse_feed_entry(line)
156 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
158 parse_feed_entry(line)
163 # Configure the Schedulers, which decide how to react to incoming changes. In this
164 # case, just kick off a 'basebuild' build
166 def branch_change_filter(change):
167 return change.branch == feedbranches[change.repository]
169 from buildbot.schedulers.basic import SingleBranchScheduler
170 from buildbot.schedulers.forcesched import ForceScheduler
171 from buildbot.changes import filter
173 c['schedulers'].append(SingleBranchScheduler(
175 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
177 builderNames=archnames))
179 c['schedulers'].append(ForceScheduler(
181 builderNames=archnames))
185 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
186 # what steps, and which slaves can execute them. Note that any particular build will
187 # only take place on one slave.
189 from buildbot.process.factory import BuildFactory
190 from buildbot.steps.source import Git
191 from buildbot.steps.shell import ShellCommand
192 from buildbot.steps.shell import SetProperty
193 from buildbot.steps.transfer import FileUpload
194 from buildbot.steps.transfer import FileDownload
195 from buildbot.steps.master import MasterShellCommand
196 from buildbot.process.properties import WithProperties
199 def GetDirectorySuffix(props):
200 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
201 return "-%s" % props["slavename"].split('-')[1]
205 def GetNumJobs(props):
206 if props.hasProperty("slavename") and props.hasProperty("nproc"):
207 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
212 if props.hasProperty("builddir"):
213 return props["builddir"]
214 elif props.hasProperty("workdir"):
215 return props["workdir"]
222 dlLock = locks.SlaveLock("slave_dl")
226 for slave in c['slaves']:
227 slaveNames.append(slave.slavename)
230 ts = arch[1].split('/')
232 factory = BuildFactory()
234 # find number of cores
235 factory.addStep(SetProperty(
238 description = "Finding number of CPUs",
239 command = ["nproc"]))
242 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
245 factory.addStep(ShellCommand(
247 description = "Cleaning previous builds",
248 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
249 haltOnFailure = True,
252 factory.addStep(ShellCommand(
254 description = "Cleaning work area",
255 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
256 haltOnFailure = True,
259 # expire tree if needed
260 elif tree_expire > 0:
261 factory.addStep(FileDownload(
262 mastersrc = home_dir+"/expire.sh",
263 slavedest = "../expire.sh",
266 factory.addStep(ShellCommand(
268 description = "Checking for build tree expiry",
269 command = ["./expire.sh", str(tree_expire)],
271 haltOnFailure = True,
274 factory.addStep(ShellCommand(
276 description = "Preparing SDK directory",
277 command = ["mkdir", "-p", "sdk"],
278 haltOnFailure = True))
280 factory.addStep(ShellCommand(
281 name = "downloadsdk",
282 description = "Downloading SDK archive",
283 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
284 env={'RSYNC_PASSWORD': rsync_sdk_key},
285 haltOnFailure = True,
288 factory.addStep(ShellCommand(
290 description = "Unpacking SDK archive",
291 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
292 haltOnFailure = True))
294 factory.addStep(ShellCommand(
296 description = "Updating SDK",
297 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
298 haltOnFailure = True))
300 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
301 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
303 factory.addStep(ShellCommand(
305 description = "Preparing download directory",
306 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
307 haltOnFailure = True))
309 factory.addStep(ShellCommand(
311 description = "Preparing SDK configuration",
312 workdir = "build/sdk",
313 command = ["sh", "-c", "rm -f .config && make defconfig"]))
315 factory.addStep(FileDownload(
316 mastersrc = home_dir+'/ccache.sh',
317 slavedest = 'sdk/ccache.sh',
320 factory.addStep(ShellCommand(
322 description = "Preparing ccache",
323 workdir = "build/sdk",
324 command = ["./ccache.sh"],
325 haltOnFailure = True))
327 factory.addStep(ShellCommand(
328 name = "updatefeeds",
329 description = "Updating feeds",
330 workdir = "build/sdk",
331 command = ["./scripts/feeds", "update", "-f"]))
333 factory.addStep(ShellCommand(
334 name = "installfeeds",
335 description = "Installing feeds",
336 workdir = "build/sdk",
337 command = ["./scripts/feeds", "install", "-a"]))
339 factory.addStep(ShellCommand(
341 description = "Clearing failure log list",
342 workdir = "build/sdk",
343 command = ["rm", "-f", "logs/package/error.txt"],
344 haltOnFailure = False
347 factory.addStep(ShellCommand(
349 description = "Building packages",
350 workdir = "build/sdk",
352 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"],
353 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
354 haltOnFailure = True))
356 factory.addStep(ShellCommand(
357 name = "mkfeedsconf",
358 description = "Generating pinned feeds.conf",
359 workdir = "build/sdk",
360 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
362 if gpg_keyid is not None:
363 factory.addStep(MasterShellCommand(
364 name = "signprepare",
365 description = "Preparing temporary signing directory",
366 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
370 factory.addStep(ShellCommand(
372 description = "Packing files to sign",
373 workdir = "build/sdk",
374 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
378 factory.addStep(FileUpload(
379 slavesrc = "sdk/sign.tar.gz",
380 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
384 factory.addStep(MasterShellCommand(
386 description = "Signing files",
387 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
388 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
392 factory.addStep(FileDownload(
393 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
394 slavedest = "sdk/sign.tar.gz",
398 factory.addStep(ShellCommand(
400 description = "Unpacking signed files",
401 workdir = "build/sdk",
402 command = ["tar", "-xzf", "sign.tar.gz"],
406 factory.addStep(ShellCommand(
407 name = "uploadprepare",
408 description = "Preparing package directory",
409 workdir = "build/sdk",
410 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
411 env={'RSYNC_PASSWORD': rsync_bin_key},
412 haltOnFailure = True,
416 factory.addStep(ShellCommand(
417 name = "packageupload",
418 description = "Uploading package files",
419 workdir = "build/sdk",
420 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
421 env={'RSYNC_PASSWORD': rsync_bin_key},
422 haltOnFailure = True,
426 factory.addStep(ShellCommand(
428 description = "Preparing log directory",
429 workdir = "build/sdk",
430 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
431 env={'RSYNC_PASSWORD': rsync_bin_key},
432 haltOnFailure = True,
436 factory.addStep(ShellCommand(
438 description = "Finding failure logs",
439 workdir = "build/sdk/logs/package/feeds",
440 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
441 haltOnFailure = False
444 factory.addStep(ShellCommand(
446 description = "Collecting failure logs",
447 workdir = "build/sdk",
448 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
449 haltOnFailure = False
452 factory.addStep(ShellCommand(
454 description = "Uploading failure logs",
455 workdir = "build/sdk",
456 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
457 env={'RSYNC_PASSWORD': rsync_bin_key},
458 haltOnFailure = False,
462 if rsync_src_url is not None:
463 factory.addStep(ShellCommand(
464 name = "sourceupload",
465 description = "Uploading source archives",
466 workdir = "build/sdk",
467 command = ["rsync", "-4", "--progress", "--checksum", "--delay-updates",
468 WithProperties("--partial-dir=.~tmp~%s~%%(slavename)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
469 env={'RSYNC_PASSWORD': rsync_src_key},
470 haltOnFailure = False,
474 from buildbot.config import BuilderConfig
476 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
479 ####### STATUS arches
481 # 'status' is a list of Status arches. The results of each build will be
482 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
483 # including web pages, email senders, and IRC bots.
487 from buildbot.status import html
488 from buildbot.status.web import authz, auth
490 if ini.has_option("status", "bind"):
491 if ini.has_option("status", "user") and ini.has_option("status", "password"):
492 authz_cfg=authz.Authz(
493 # change any of these to True to enable; see the manual for more
495 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
496 gracefulShutdown = 'auth',
497 forceBuild = 'auth', # use this to test your slave once it is set up
498 forceAllBuilds = 'auth',
501 stopAllBuilds = 'auth',
502 cancelPendingBuild = 'auth',
504 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
506 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
508 ####### PROJECT IDENTITY
510 # the 'title' string will appear at the top of this buildbot
511 # installation's html.WebStatus home page (linked to the
512 # 'titleURL') and is embedded in the title of the waterfall HTML page.
514 c['title'] = ini.get("general", "title")
515 c['titleURL'] = ini.get("general", "title_url")
517 # the 'buildbotURL' string should point to the location where the buildbot's
518 # internal web server (usually the html.WebStatus page) is visible. This
519 # typically uses the port number set in the Waterfall 'status' entry, but
520 # with an externally-visible host name which the buildbot cannot figure out
523 c['buildbotURL'] = buildbot_url
528 # This specifies what database buildbot uses to store its state. You can leave
529 # this at its default for all but the largest installations.
530 'db_url' : "sqlite:///state.sqlite",