2 # ex: set syntax=python:
9 from buildbot import locks
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
14 buildbot_url = ini.get("general", "buildbot_url")
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
35 if ini.has_option("general", "port"):
36 slave_port = ini.getint("general", "port")
38 if ini.has_option("general", "persistent"):
39 persistent = ini.getboolean("general", "persistent")
41 if ini.has_option("general", "other_builds"):
42 other_builds = ini.getint("general", "other_builds")
44 if ini.has_option("general", "expire"):
45 tree_expire = ini.getint("general", "expire")
50 for section in ini.sections():
51 if section.startswith("slave "):
52 if ini.has_option(section, "name") and ini.has_option(section, "password"):
53 name = ini.get(section, "name")
54 password = ini.get(section, "password")
56 if ini.has_option(section, "builds"):
57 max_builds[name] = ini.getint(section, "builds")
58 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
60 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
61 # This must match the value configured into the buildslaves (with their
63 c['slavePortnum'] = slave_port
66 c['mergeRequests'] = True
68 # Reduce amount of backlog data
69 c['buildHorizon'] = 30
74 home_dir = os.path.abspath(ini.get("general", "homedir"))
76 rsync_bin_url = ini.get("rsync", "binary_url")
77 rsync_bin_key = ini.get("rsync", "binary_password")
82 if ini.has_option("rsync", "source_url"):
83 rsync_src_url = ini.get("rsync", "source_url")
84 rsync_src_key = ini.get("rsync", "source_password")
88 rsync_sdk_pat = "lede-sdk-*.tar.xz"
90 if ini.has_option("rsync", "sdk_url"):
91 rsync_sdk_url = ini.get("rsync", "sdk_url")
93 if ini.has_option("rsync", "sdk_password"):
94 rsync_sdk_key = ini.get("rsync", "sdk_password")
96 if ini.has_option("rsync", "sdk_pattern"):
97 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
101 gpg_comment = "Unattended build signature"
102 gpg_passfile = "/dev/null"
104 if ini.has_option("gpg", "home"):
105 gpg_home = ini.get("gpg", "home")
107 if ini.has_option("gpg", "keyid"):
108 gpg_keyid = ini.get("gpg", "keyid")
110 if ini.has_option("gpg", "comment"):
111 gpg_comment = ini.get("gpg", "comment")
113 if ini.has_option("gpg", "passfile"):
114 gpg_passfile = ini.get("gpg", "passfile")
121 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
122 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
125 line = findarches.stdout.readline()
128 at = line.strip().split()
130 archnames.append(at[0])
135 feedbranches = dict()
137 from buildbot.changes.gitpoller import GitPoller
138 c['change_source'] = []
140 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
142 parts = line.strip().split()
143 if parts[0] == "src-git":
145 url = parts[2].strip().split(';')
146 branch = url[1] if len(url) > 1 else 'master'
147 feedbranches[url[0]] = branch
148 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
153 # Configure the Schedulers, which decide how to react to incoming changes. In this
154 # case, just kick off a 'basebuild' build
156 def branch_change_filter(change):
157 return change.branch == feedbranches[change.repository]
159 from buildbot.schedulers.basic import SingleBranchScheduler
160 from buildbot.schedulers.forcesched import ForceScheduler
161 from buildbot.changes import filter
163 c['schedulers'].append(SingleBranchScheduler(
165 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
167 builderNames=archnames))
169 c['schedulers'].append(ForceScheduler(
171 builderNames=archnames))
175 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
176 # what steps, and which slaves can execute them. Note that any particular build will
177 # only take place on one slave.
179 from buildbot.process.factory import BuildFactory
180 from buildbot.steps.source import Git
181 from buildbot.steps.shell import ShellCommand
182 from buildbot.steps.shell import SetProperty
183 from buildbot.steps.transfer import FileUpload
184 from buildbot.steps.transfer import FileDownload
185 from buildbot.steps.master import MasterShellCommand
186 from buildbot.process.properties import WithProperties
189 def GetDirectorySuffix(props):
190 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
191 return "-%s" % props["slavename"].split('-')[1]
195 def GetNumJobs(props):
196 if props.hasProperty("slavename") and props.hasProperty("nproc"):
197 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
202 if props.hasProperty("builddir"):
203 return props["builddir"]
204 elif props.hasProperty("workdir"):
205 return props["workdir"]
212 dlLock = locks.SlaveLock("slave_dl")
216 for slave in c['slaves']:
217 slaveNames.append(slave.slavename)
220 ts = arch[1].split('/')
222 factory = BuildFactory()
224 # find number of cores
225 factory.addStep(SetProperty(
228 description = "Finding number of CPUs",
229 command = ["nproc"]))
232 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
235 factory.addStep(ShellCommand(
237 description = "Cleaning previous builds",
238 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
239 haltOnFailure = True,
242 factory.addStep(ShellCommand(
244 description = "Cleaning work area",
245 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
246 haltOnFailure = True,
249 # expire tree if needed
250 elif tree_expire > 0:
251 factory.addStep(FileDownload(
252 mastersrc = home_dir+"/expire.sh",
253 slavedest = "../expire.sh",
256 factory.addStep(ShellCommand(
258 description = "Checking for build tree expiry",
259 command = ["./expire.sh", str(tree_expire)],
261 haltOnFailure = True,
264 factory.addStep(ShellCommand(
266 description = "Preparing SDK directory",
267 command = ["mkdir", "-p", "sdk"],
268 haltOnFailure = True))
270 factory.addStep(ShellCommand(
271 name = "downloadsdk",
272 description = "Downloading SDK archive",
273 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
274 env={'RSYNC_PASSWORD': rsync_sdk_key},
275 haltOnFailure = True,
278 factory.addStep(ShellCommand(
280 description = "Unpacking SDK archive",
281 command = ["tar", "--keep-newer-files", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
282 haltOnFailure = True))
284 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
285 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
287 factory.addStep(ShellCommand(
289 description = "Preparing download directory",
290 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
292 factory.addStep(ShellCommand(
294 description = "Preparing SDK configuration",
295 workdir = "build/sdk",
296 command = ["sh", "-c", "rm -f .config && make defconfig"]))
298 factory.addStep(FileDownload(
299 mastersrc = home_dir+'/ccache.sh',
300 slavedest = 'sdk/ccache.sh',
303 factory.addStep(ShellCommand(
305 description = "Preparing ccache",
306 workdir = "build/sdk",
307 command = ["./ccache.sh"]))
309 factory.addStep(ShellCommand(
310 name = "updatefeeds",
311 description = "Updating feeds",
312 workdir = "build/sdk",
313 command = ["./scripts/feeds", "update"]))
315 factory.addStep(ShellCommand(
316 name = "installfeeds",
317 description = "Installing feeds",
318 workdir = "build/sdk",
319 command = ["./scripts/feeds", "install", "-a"]))
321 factory.addStep(ShellCommand(
323 description = "Building packages",
324 workdir = "build/sdk",
326 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"],
327 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
328 haltOnFailure = True))
330 factory.addStep(ShellCommand(
331 name = "mkfeedsconf",
332 description = "Generating pinned feeds.conf",
333 workdir = "build/sdk",
334 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
336 if gpg_keyid is not None:
337 factory.addStep(MasterShellCommand(
338 name = "signprepare",
339 description = "Preparing temporary signing directory",
340 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
344 factory.addStep(ShellCommand(
346 description = "Packing files to sign",
347 workdir = "build/sdk",
348 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
352 factory.addStep(FileUpload(
353 slavesrc = "sdk/sign.tar.gz",
354 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
358 factory.addStep(MasterShellCommand(
360 description = "Signing files",
361 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
362 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
366 factory.addStep(FileDownload(
367 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
368 slavedest = "sdk/sign.tar.gz",
372 factory.addStep(ShellCommand(
374 description = "Unpacking signed files",
375 workdir = "build/sdk",
376 command = ["tar", "-xzf", "sign.tar.gz"],
380 factory.addStep(ShellCommand(
381 name = "uploadprepare",
382 description = "Preparing package directory",
383 workdir = "build/sdk",
384 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
385 env={'RSYNC_PASSWORD': rsync_bin_key},
386 haltOnFailure = True,
390 factory.addStep(ShellCommand(
391 name = "packageupload",
392 description = "Uploading package files",
393 workdir = "build/sdk",
394 command = ["rsync", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
395 env={'RSYNC_PASSWORD': rsync_bin_key},
396 haltOnFailure = True,
400 factory.addStep(ShellCommand(
402 description = "Preparing log directory",
403 workdir = "build/sdk",
404 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
405 env={'RSYNC_PASSWORD': rsync_bin_key},
406 haltOnFailure = True,
410 factory.addStep(ShellCommand(
412 description = "Finding failure logs",
413 workdir = "build/sdk/logs/package/feeds",
414 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
415 haltOnFailure = False
418 factory.addStep(ShellCommand(
420 description = "Collecting failure logs",
421 workdir = "build/sdk",
422 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
423 haltOnFailure = False
426 factory.addStep(ShellCommand(
428 description = "Uploading failure logs",
429 workdir = "build/sdk",
430 command = ["rsync", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
431 env={'RSYNC_PASSWORD': rsync_bin_key},
432 haltOnFailure = False,
436 if rsync_src_url is not None:
437 factory.addStep(ShellCommand(
438 name = "sourceupload",
439 description = "Uploading source archives",
440 workdir = "build/sdk",
441 command = ["rsync", "--progress", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
442 env={'RSYNC_PASSWORD': rsync_src_key},
443 haltOnFailure = False,
447 from buildbot.config import BuilderConfig
449 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
452 ####### STATUS arches
454 # 'status' is a list of Status arches. The results of each build will be
455 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
456 # including web pages, email senders, and IRC bots.
460 from buildbot.status import html
461 from buildbot.status.web import authz, auth
463 if ini.has_option("status", "bind"):
464 if ini.has_option("status", "user") and ini.has_option("status", "password"):
465 authz_cfg=authz.Authz(
466 # change any of these to True to enable; see the manual for more
468 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
469 gracefulShutdown = 'auth',
470 forceBuild = 'auth', # use this to test your slave once it is set up
471 forceAllBuilds = 'auth',
474 stopAllBuilds = 'auth',
475 cancelPendingBuild = 'auth',
477 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
479 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
481 ####### PROJECT IDENTITY
483 # the 'title' string will appear at the top of this buildbot
484 # installation's html.WebStatus home page (linked to the
485 # 'titleURL') and is embedded in the title of the waterfall HTML page.
487 c['title'] = ini.get("general", "title")
488 c['titleURL'] = ini.get("general", "title_url")
490 # the 'buildbotURL' string should point to the location where the buildbot's
491 # internal web server (usually the html.WebStatus page) is visible. This
492 # typically uses the port number set in the Waterfall 'status' entry, but
493 # with an externally-visible host name which the buildbot cannot figure out
496 c['buildbotURL'] = buildbot_url
501 # This specifies what database buildbot uses to store its state. You can leave
502 # this at its default for all but the largest installations.
503 'db_url' : "sqlite:///state.sqlite",