Allow overriding GnuPG home directory
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31
32 if ini.has_option("general", "port"):
33 slave_port = ini.getint("general", "port")
34
35 c['slaves'] = []
36
37 for section in ini.sections():
38 if section.startswith("slave "):
39 if ini.has_option(section, "name") and ini.has_option(section, "password"):
40 name = ini.get(section, "name")
41 password = ini.get(section, "password")
42 max_builds = 1
43 if ini.has_option(section, "builds"):
44 max_builds = ini.getint(section, "builds")
45 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds))
46
47 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
48 # This must match the value configured into the buildslaves (with their
49 # --master option)
50 c['slavePortnum'] = slave_port
51
52 # coalesce builds
53 c['mergeRequests'] = True
54
55 ####### CHANGESOURCES
56
57 home_dir = os.path.abspath(ini.get("general", "homedir"))
58
59 rsync_bin_url = ini.get("rsync", "binary_url")
60 rsync_bin_key = ini.get("rsync", "binary_password")
61
62 rsync_src_url = None
63 rsync_src_key = None
64
65 if ini.has_option("rsync", "source_url"):
66 rsync_src_url = ini.get("rsync", "source_url")
67 rsync_src_key = ini.get("rsync", "source_password")
68
69 rsync_sdk_url = None
70 rsync_sdk_key = None
71 rsync_sdk_pat = "lede-sdk-*.tar.xz"
72
73 if ini.has_option("rsync", "sdk_url"):
74 rsync_sdk_url = ini.get("rsync", "sdk_url")
75
76 if ini.has_option("rsync", "sdk_password"):
77 rsync_sdk_key = ini.get("rsync", "sdk_password")
78
79 if ini.has_option("rsync", "sdk_pattern"):
80 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
81
82 gpg_home = "~/.gnupg"
83 gpg_keyid = None
84 gpg_comment = "Unattended build signature"
85 gpg_passfile = "/dev/null"
86
87 if ini.has_option("gpg", "home"):
88 gpg_home = ini.get("gpg", "home")
89
90 if ini.has_option("gpg", "keyid"):
91 gpg_keyid = ini.get("gpg", "keyid")
92
93 if ini.has_option("gpg", "comment"):
94 gpg_comment = ini.get("gpg", "comment")
95
96 if ini.has_option("gpg", "passfile"):
97 gpg_passfile = ini.get("gpg", "passfile")
98
99
100 # find arches
101 arches = [ ]
102 archnames = [ ]
103
104 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
105 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
106
107 while True:
108 line = findarches.stdout.readline()
109 if not line:
110 break
111 at = line.strip().split()
112 arches.append(at)
113 archnames.append(at[0])
114
115
116 # find feeds
117 feeds = []
118
119 from buildbot.changes.gitpoller import GitPoller
120 c['change_source'] = []
121
122 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
123 for line in f:
124 parts = line.strip().split()
125 if parts[0] == "src-git":
126 feeds.append(parts)
127 url = parts[2].strip().split(';')
128 branch = url[1] if len(url) > 1 else 'master'
129 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
130
131
132 ####### SCHEDULERS
133
134 # Configure the Schedulers, which decide how to react to incoming changes. In this
135 # case, just kick off a 'basebuild' build
136
137 from buildbot.schedulers.basic import SingleBranchScheduler
138 from buildbot.schedulers.forcesched import ForceScheduler
139 from buildbot.changes import filter
140 c['schedulers'] = []
141 c['schedulers'].append(SingleBranchScheduler(
142 name="all",
143 change_filter=filter.ChangeFilter(branch='master'),
144 treeStableTimer=60,
145 builderNames=archnames))
146
147 c['schedulers'].append(ForceScheduler(
148 name="force",
149 builderNames=archnames))
150
151 ####### BUILDERS
152
153 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
154 # what steps, and which slaves can execute them. Note that any particular build will
155 # only take place on one slave.
156
157 from buildbot.process.factory import BuildFactory
158 from buildbot.steps.source import Git
159 from buildbot.steps.shell import ShellCommand
160 from buildbot.steps.shell import SetProperty
161 from buildbot.steps.transfer import FileUpload
162 from buildbot.steps.transfer import FileDownload
163 from buildbot.steps.master import MasterShellCommand
164 from buildbot.process.properties import WithProperties
165
166 c['builders'] = []
167
168 dlLock = locks.SlaveLock("slave_dl")
169
170 slaveNames = [ ]
171
172 for slave in c['slaves']:
173 slaveNames.append(slave.slavename)
174
175 for arch in arches:
176 ts = arch[1].split('/')
177
178 factory = BuildFactory()
179
180 # find number of cores
181 factory.addStep(SetProperty(
182 name = "nproc",
183 property = "nproc",
184 description = "Finding number of CPUs",
185 command = ["nproc"]))
186
187 # prepare workspace
188 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
189
190 factory.addStep(ShellCommand(
191 name = "cleanold",
192 description = "Cleaning previous builds",
193 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
194 haltOnFailure = True,
195 timeout = 2400))
196
197 factory.addStep(ShellCommand(
198 name = "cleanup",
199 description = "Cleaning work area",
200 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
201 haltOnFailure = True,
202 timeout = 2400))
203
204 factory.addStep(ShellCommand(
205 name = "mksdkdir",
206 description = "Preparing SDK directory",
207 command = ["mkdir", "sdk"],
208 haltOnFailure = True))
209
210 factory.addStep(ShellCommand(
211 name = "downloadsdk",
212 description = "Downloading SDK archive",
213 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
214 env={'RSYNC_PASSWORD': rsync_sdk_key},
215 haltOnFailure = True,
216 logEnviron = False))
217
218 factory.addStep(ShellCommand(
219 name = "unpacksdk",
220 description = "Unpacking SDK archive",
221 command = ["tar", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
222 haltOnFailure = True))
223
224 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
225 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
226
227 factory.addStep(ShellCommand(
228 name = "mkdldir",
229 description = "Preparing download directory",
230 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
231
232 factory.addStep(ShellCommand(
233 name = "mkconf",
234 description = "Preparing SDK configuration",
235 workdir = "build/sdk",
236 command = ["sh", "-c", "rm -f .config && make defconfig"]))
237
238 factory.addStep(ShellCommand(
239 name = "updatefeeds",
240 description = "Updating feeds",
241 workdir = "build/sdk",
242 command = ["./scripts/feeds", "update"]))
243
244 factory.addStep(ShellCommand(
245 name = "installfeeds",
246 description = "Installing feeds",
247 workdir = "build/sdk",
248 command = ["./scripts/feeds", "install", "-a"]))
249
250 factory.addStep(ShellCommand(
251 name = "compile",
252 description = "Building packages",
253 workdir = "build/sdk",
254 command = ["make", WithProperties("-j%(nproc:~4)s"), "V=s", "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y"]))
255
256 if gpg_keyid is not None:
257 factory.addStep(MasterShellCommand(
258 name = "signprepare",
259 description = "Preparing temporary signing directory",
260 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
261 haltOnFailure = True
262 ))
263
264 factory.addStep(ShellCommand(
265 name = "signpack",
266 description = "Packing files to sign",
267 workdir = "build/sdk",
268 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
269 haltOnFailure = True
270 ))
271
272 factory.addStep(FileUpload(
273 slavesrc = "sdk/sign.tar.gz",
274 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
275 haltOnFailure = True
276 ))
277
278 factory.addStep(MasterShellCommand(
279 name = "signfiles",
280 description = "Signing files",
281 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
282 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
283 haltOnFailure = True,
284 logEnviron = False
285 ))
286
287 factory.addStep(FileDownload(
288 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
289 slavedest = "sdk/sign.tar.gz",
290 haltOnFailure = True
291 ))
292
293 factory.addStep(ShellCommand(
294 name = "signunpack",
295 description = "Unpacking signed files",
296 workdir = "build/sdk",
297 command = ["tar", "-xzf", "sign.tar.gz"],
298 haltOnFailure = True
299 ))
300
301 factory.addStep(ShellCommand(
302 name = "uploadprepare",
303 description = "Preparing package directory",
304 workdir = "build/sdk",
305 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/packages/" %(rsync_bin_url)],
306 env={'RSYNC_PASSWORD': rsync_bin_key},
307 haltOnFailure = True,
308 logEnviron = False
309 ))
310
311 factory.addStep(ShellCommand(
312 name = "packageupload",
313 description = "Uploading package files",
314 workdir = "build/sdk",
315 command = ["rsync", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), "%s/packages/%s/" %(rsync_bin_url, arch[0])],
316 env={'RSYNC_PASSWORD': rsync_bin_key},
317 haltOnFailure = True,
318 logEnviron = False
319 ))
320
321 factory.addStep(ShellCommand(
322 name = "logprepare",
323 description = "Preparing log directory",
324 workdir = "build/sdk",
325 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
326 env={'RSYNC_PASSWORD': rsync_bin_key},
327 haltOnFailure = True,
328 logEnviron = False
329 ))
330
331 factory.addStep(ShellCommand(
332 name = "logfind",
333 description = "Finding failure logs",
334 workdir = "build/sdk/logs/package/feeds",
335 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
336 haltOnFailure = False
337 ))
338
339 factory.addStep(ShellCommand(
340 name = "logcollect",
341 description = "Collecting failure logs",
342 workdir = "build/sdk",
343 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
344 haltOnFailure = False
345 ))
346
347 factory.addStep(ShellCommand(
348 name = "logupload",
349 description = "Uploading failure logs",
350 workdir = "build/sdk",
351 command = ["rsync", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
352 env={'RSYNC_PASSWORD': rsync_bin_key},
353 haltOnFailure = False,
354 logEnviron = False
355 ))
356
357 if rsync_src_url is not None:
358 factory.addStep(ShellCommand(
359 name = "sourceupload",
360 description = "Uploading source archives",
361 workdir = "build/sdk",
362 command = ["rsync", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
363 env={'RSYNC_PASSWORD': rsync_src_key},
364 haltOnFailure = False,
365 logEnviron = False
366 ))
367
368 from buildbot.config import BuilderConfig
369
370 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
371
372
373 ####### STATUS arches
374
375 # 'status' is a list of Status arches. The results of each build will be
376 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
377 # including web pages, email senders, and IRC bots.
378
379 c['status'] = []
380
381 from buildbot.status import html
382 from buildbot.status.web import authz, auth
383
384 if ini.has_option("status", "bind"):
385 if ini.has_option("status", "user") and ini.has_option("status", "password"):
386 authz_cfg=authz.Authz(
387 # change any of these to True to enable; see the manual for more
388 # options
389 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
390 gracefulShutdown = 'auth',
391 forceBuild = 'auth', # use this to test your slave once it is set up
392 forceAllBuilds = 'auth',
393 pingBuilder = False,
394 stopBuild = 'auth',
395 stopAllBuilds = 'auth',
396 cancelPendingBuild = 'auth',
397 )
398 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
399 else:
400 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
401
402 ####### PROJECT IDENTITY
403
404 # the 'title' string will appear at the top of this buildbot
405 # installation's html.WebStatus home page (linked to the
406 # 'titleURL') and is embedded in the title of the waterfall HTML page.
407
408 c['title'] = ini.get("general", "title")
409 c['titleURL'] = ini.get("general", "title_url")
410
411 # the 'buildbotURL' string should point to the location where the buildbot's
412 # internal web server (usually the html.WebStatus page) is visible. This
413 # typically uses the port number set in the Waterfall 'status' entry, but
414 # with an externally-visible host name which the buildbot cannot figure out
415 # without some help.
416
417 c['buildbotURL'] = buildbot_url
418
419 ####### DB URL
420
421 c['db'] = {
422 # This specifies what database buildbot uses to store its state. You can leave
423 # this at its default for all but the largest installations.
424 'db_url' : "sqlite:///state.sqlite",
425 }