phase1, phase2: add unified ccache handling
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32 other_builds = 0
33 tree_expire = 0
34
35 if ini.has_option("general", "port"):
36 slave_port = ini.getint("general", "port")
37
38 if ini.has_option("general", "persistent"):
39 persistent = ini.getboolean("general", "persistent")
40
41 if ini.has_option("general", "other_builds"):
42 other_builds = ini.getint("general", "other_builds")
43
44 if ini.has_option("general", "expire"):
45 tree_expire = ini.getint("general", "expire")
46
47 c['slaves'] = []
48 max_builds = dict()
49
50 for section in ini.sections():
51 if section.startswith("slave "):
52 if ini.has_option(section, "name") and ini.has_option(section, "password"):
53 name = ini.get(section, "name")
54 password = ini.get(section, "password")
55 max_builds[name] = 1
56 if ini.has_option(section, "builds"):
57 max_builds[name] = ini.getint(section, "builds")
58 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
59
60 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
61 # This must match the value configured into the buildslaves (with their
62 # --master option)
63 c['slavePortnum'] = slave_port
64
65 # coalesce builds
66 c['mergeRequests'] = True
67
68 # Reduce amount of backlog data
69 c['buildHorizon'] = 30
70 c['logHorizon'] = 20
71
72 ####### CHANGESOURCES
73
74 home_dir = os.path.abspath(ini.get("general", "homedir"))
75
76 rsync_bin_url = ini.get("rsync", "binary_url")
77 rsync_bin_key = ini.get("rsync", "binary_password")
78
79 rsync_src_url = None
80 rsync_src_key = None
81
82 if ini.has_option("rsync", "source_url"):
83 rsync_src_url = ini.get("rsync", "source_url")
84 rsync_src_key = ini.get("rsync", "source_password")
85
86 rsync_sdk_url = None
87 rsync_sdk_key = None
88 rsync_sdk_pat = "lede-sdk-*.tar.xz"
89
90 if ini.has_option("rsync", "sdk_url"):
91 rsync_sdk_url = ini.get("rsync", "sdk_url")
92
93 if ini.has_option("rsync", "sdk_password"):
94 rsync_sdk_key = ini.get("rsync", "sdk_password")
95
96 if ini.has_option("rsync", "sdk_pattern"):
97 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
98
99 gpg_home = "~/.gnupg"
100 gpg_keyid = None
101 gpg_comment = "Unattended build signature"
102 gpg_passfile = "/dev/null"
103
104 if ini.has_option("gpg", "home"):
105 gpg_home = ini.get("gpg", "home")
106
107 if ini.has_option("gpg", "keyid"):
108 gpg_keyid = ini.get("gpg", "keyid")
109
110 if ini.has_option("gpg", "comment"):
111 gpg_comment = ini.get("gpg", "comment")
112
113 if ini.has_option("gpg", "passfile"):
114 gpg_passfile = ini.get("gpg", "passfile")
115
116
117 # find arches
118 arches = [ ]
119 archnames = [ ]
120
121 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
122 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
123
124 while True:
125 line = findarches.stdout.readline()
126 if not line:
127 break
128 at = line.strip().split()
129 arches.append(at)
130 archnames.append(at[0])
131
132
133 # find feeds
134 feeds = []
135 feedbranches = dict()
136
137 from buildbot.changes.gitpoller import GitPoller
138 c['change_source'] = []
139
140 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
141 for line in f:
142 parts = line.strip().split()
143 if parts[0] == "src-git":
144 feeds.append(parts)
145 url = parts[2].strip().split(';')
146 branch = url[1] if len(url) > 1 else 'master'
147 feedbranches[url[0]] = branch
148 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
149
150
151 ####### SCHEDULERS
152
153 # Configure the Schedulers, which decide how to react to incoming changes. In this
154 # case, just kick off a 'basebuild' build
155
156 def branch_change_filter(change):
157 return change.branch == feedbranches[change.repository]
158
159 from buildbot.schedulers.basic import SingleBranchScheduler
160 from buildbot.schedulers.forcesched import ForceScheduler
161 from buildbot.changes import filter
162 c['schedulers'] = []
163 c['schedulers'].append(SingleBranchScheduler(
164 name="all",
165 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
166 treeStableTimer=60,
167 builderNames=archnames))
168
169 c['schedulers'].append(ForceScheduler(
170 name="force",
171 builderNames=archnames))
172
173 ####### BUILDERS
174
175 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
176 # what steps, and which slaves can execute them. Note that any particular build will
177 # only take place on one slave.
178
179 from buildbot.process.factory import BuildFactory
180 from buildbot.steps.source import Git
181 from buildbot.steps.shell import ShellCommand
182 from buildbot.steps.shell import SetProperty
183 from buildbot.steps.transfer import FileUpload
184 from buildbot.steps.transfer import FileDownload
185 from buildbot.steps.master import MasterShellCommand
186 from buildbot.process.properties import WithProperties
187
188
189 def GetDirectorySuffix(props):
190 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
191 return "-%s" % props["slavename"].split('-')[1]
192 else:
193 return ""
194
195 def GetNumJobs(props):
196 if props.hasProperty("slavename") and props.hasProperty("nproc"):
197 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
198 else:
199 return 1
200
201
202 c['builders'] = []
203
204 dlLock = locks.SlaveLock("slave_dl")
205
206 slaveNames = [ ]
207
208 for slave in c['slaves']:
209 slaveNames.append(slave.slavename)
210
211 for arch in arches:
212 ts = arch[1].split('/')
213
214 factory = BuildFactory()
215
216 # find number of cores
217 factory.addStep(SetProperty(
218 name = "nproc",
219 property = "nproc",
220 description = "Finding number of CPUs",
221 command = ["nproc"]))
222
223 # prepare workspace
224 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
225
226 if not persistent:
227 factory.addStep(ShellCommand(
228 name = "cleanold",
229 description = "Cleaning previous builds",
230 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
231 haltOnFailure = True,
232 timeout = 2400))
233
234 factory.addStep(ShellCommand(
235 name = "cleanup",
236 description = "Cleaning work area",
237 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
238 haltOnFailure = True,
239 timeout = 2400))
240
241 # expire tree if needed
242 elif tree_expire > 0:
243 factory.addStep(FileDownload(
244 mastersrc = home_dir+"/expire.sh",
245 slavedest = "../expire.sh",
246 mode = 0755))
247
248 factory.addStep(ShellCommand(
249 name = "expire",
250 description = "Checking for build tree expiry",
251 command = ["./expire.sh", str(tree_expire)],
252 workdir = ".",
253 haltOnFailure = True,
254 timeout = 2400))
255
256 factory.addStep(ShellCommand(
257 name = "mksdkdir",
258 description = "Preparing SDK directory",
259 command = ["mkdir", "-p", "sdk"],
260 haltOnFailure = True))
261
262 factory.addStep(ShellCommand(
263 name = "downloadsdk",
264 description = "Downloading SDK archive",
265 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
266 env={'RSYNC_PASSWORD': rsync_sdk_key},
267 haltOnFailure = True,
268 logEnviron = False))
269
270 factory.addStep(ShellCommand(
271 name = "unpacksdk",
272 description = "Unpacking SDK archive",
273 command = ["tar", "--keep-newer-files", "--no-overwrite-dir", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
274 haltOnFailure = True))
275
276 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
277 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
278
279 factory.addStep(ShellCommand(
280 name = "mkdldir",
281 description = "Preparing download directory",
282 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
283
284 factory.addStep(ShellCommand(
285 name = "mkconf",
286 description = "Preparing SDK configuration",
287 workdir = "build/sdk",
288 command = ["sh", "-c", "rm -f .config && make defconfig"]))
289
290 factory.addStep(FileDownload(
291 mastersrc = home_dir+'/ccache.sh',
292 slavedest = 'sdk/ccache.sh',
293 mode = 0755))
294
295 factory.addStep(ShellCommand(
296 name = "prepccache",
297 description = "Preparing ccache",
298 workdir = "build/sdk",
299 command = ["./ccache.sh"]))
300
301 factory.addStep(ShellCommand(
302 name = "updatefeeds",
303 description = "Updating feeds",
304 workdir = "build/sdk",
305 command = ["./scripts/feeds", "update"]))
306
307 factory.addStep(ShellCommand(
308 name = "installfeeds",
309 description = "Installing feeds",
310 workdir = "build/sdk",
311 command = ["./scripts/feeds", "install", "-a"]))
312
313 factory.addStep(ShellCommand(
314 name = "compile",
315 description = "Building packages",
316 workdir = "build/sdk",
317 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"]))
318
319 factory.addStep(ShellCommand(
320 name = "mkfeedsconf",
321 description = "Generating pinned feeds.conf",
322 workdir = "build/sdk",
323 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
324
325 if gpg_keyid is not None:
326 factory.addStep(MasterShellCommand(
327 name = "signprepare",
328 description = "Preparing temporary signing directory",
329 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
330 haltOnFailure = True
331 ))
332
333 factory.addStep(ShellCommand(
334 name = "signpack",
335 description = "Packing files to sign",
336 workdir = "build/sdk",
337 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
338 haltOnFailure = True
339 ))
340
341 factory.addStep(FileUpload(
342 slavesrc = "sdk/sign.tar.gz",
343 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
344 haltOnFailure = True
345 ))
346
347 factory.addStep(MasterShellCommand(
348 name = "signfiles",
349 description = "Signing files",
350 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
351 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
352 haltOnFailure = True
353 ))
354
355 factory.addStep(FileDownload(
356 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
357 slavedest = "sdk/sign.tar.gz",
358 haltOnFailure = True
359 ))
360
361 factory.addStep(ShellCommand(
362 name = "signunpack",
363 description = "Unpacking signed files",
364 workdir = "build/sdk",
365 command = ["tar", "-xzf", "sign.tar.gz"],
366 haltOnFailure = True
367 ))
368
369 factory.addStep(ShellCommand(
370 name = "uploadprepare",
371 description = "Preparing package directory",
372 workdir = "build/sdk",
373 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
374 env={'RSYNC_PASSWORD': rsync_bin_key},
375 haltOnFailure = True,
376 logEnviron = False
377 ))
378
379 factory.addStep(ShellCommand(
380 name = "packageupload",
381 description = "Uploading package files",
382 workdir = "build/sdk",
383 command = ["rsync", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
384 env={'RSYNC_PASSWORD': rsync_bin_key},
385 haltOnFailure = True,
386 logEnviron = False
387 ))
388
389 factory.addStep(ShellCommand(
390 name = "logprepare",
391 description = "Preparing log directory",
392 workdir = "build/sdk",
393 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
394 env={'RSYNC_PASSWORD': rsync_bin_key},
395 haltOnFailure = True,
396 logEnviron = False
397 ))
398
399 factory.addStep(ShellCommand(
400 name = "logfind",
401 description = "Finding failure logs",
402 workdir = "build/sdk/logs/package/feeds",
403 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
404 haltOnFailure = False
405 ))
406
407 factory.addStep(ShellCommand(
408 name = "logcollect",
409 description = "Collecting failure logs",
410 workdir = "build/sdk",
411 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
412 haltOnFailure = False
413 ))
414
415 factory.addStep(ShellCommand(
416 name = "logupload",
417 description = "Uploading failure logs",
418 workdir = "build/sdk",
419 command = ["rsync", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
420 env={'RSYNC_PASSWORD': rsync_bin_key},
421 haltOnFailure = False,
422 logEnviron = False
423 ))
424
425 if rsync_src_url is not None:
426 factory.addStep(ShellCommand(
427 name = "sourceupload",
428 description = "Uploading source archives",
429 workdir = "build/sdk",
430 command = ["rsync", "--progress", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
431 env={'RSYNC_PASSWORD': rsync_src_key},
432 haltOnFailure = False,
433 logEnviron = False
434 ))
435
436 from buildbot.config import BuilderConfig
437
438 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
439
440
441 ####### STATUS arches
442
443 # 'status' is a list of Status arches. The results of each build will be
444 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
445 # including web pages, email senders, and IRC bots.
446
447 c['status'] = []
448
449 from buildbot.status import html
450 from buildbot.status.web import authz, auth
451
452 if ini.has_option("status", "bind"):
453 if ini.has_option("status", "user") and ini.has_option("status", "password"):
454 authz_cfg=authz.Authz(
455 # change any of these to True to enable; see the manual for more
456 # options
457 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
458 gracefulShutdown = 'auth',
459 forceBuild = 'auth', # use this to test your slave once it is set up
460 forceAllBuilds = 'auth',
461 pingBuilder = False,
462 stopBuild = 'auth',
463 stopAllBuilds = 'auth',
464 cancelPendingBuild = 'auth',
465 )
466 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
467 else:
468 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
469
470 ####### PROJECT IDENTITY
471
472 # the 'title' string will appear at the top of this buildbot
473 # installation's html.WebStatus home page (linked to the
474 # 'titleURL') and is embedded in the title of the waterfall HTML page.
475
476 c['title'] = ini.get("general", "title")
477 c['titleURL'] = ini.get("general", "title_url")
478
479 # the 'buildbotURL' string should point to the location where the buildbot's
480 # internal web server (usually the html.WebStatus page) is visible. This
481 # typically uses the port number set in the Waterfall 'status' entry, but
482 # with an externally-visible host name which the buildbot cannot figure out
483 # without some help.
484
485 c['buildbotURL'] = buildbot_url
486
487 ####### DB URL
488
489 c['db'] = {
490 # This specifies what database buildbot uses to store its state. You can leave
491 # this at its default for all but the largest installations.
492 'db_url' : "sqlite:///state.sqlite",
493 }