phase2: set CCACHE_BASEDIR
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32 other_builds = 0
33 tree_expire = 0
34
35 if ini.has_option("general", "port"):
36 slave_port = ini.getint("general", "port")
37
38 if ini.has_option("general", "persistent"):
39 persistent = ini.getboolean("general", "persistent")
40
41 if ini.has_option("general", "other_builds"):
42 other_builds = ini.getint("general", "other_builds")
43
44 if ini.has_option("general", "expire"):
45 tree_expire = ini.getint("general", "expire")
46
47 c['slaves'] = []
48 max_builds = dict()
49
50 for section in ini.sections():
51 if section.startswith("slave "):
52 if ini.has_option(section, "name") and ini.has_option(section, "password"):
53 name = ini.get(section, "name")
54 password = ini.get(section, "password")
55 max_builds[name] = 1
56 if ini.has_option(section, "builds"):
57 max_builds[name] = ini.getint(section, "builds")
58 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
59
60 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
61 # This must match the value configured into the buildslaves (with their
62 # --master option)
63 c['slavePortnum'] = slave_port
64
65 # coalesce builds
66 c['mergeRequests'] = True
67
68 # Reduce amount of backlog data
69 c['buildHorizon'] = 30
70 c['logHorizon'] = 20
71
72 ####### CHANGESOURCES
73
74 home_dir = os.path.abspath(ini.get("general", "homedir"))
75
76 rsync_bin_url = ini.get("rsync", "binary_url")
77 rsync_bin_key = ini.get("rsync", "binary_password")
78
79 rsync_src_url = None
80 rsync_src_key = None
81
82 if ini.has_option("rsync", "source_url"):
83 rsync_src_url = ini.get("rsync", "source_url")
84 rsync_src_key = ini.get("rsync", "source_password")
85
86 rsync_sdk_url = None
87 rsync_sdk_key = None
88 rsync_sdk_pat = "lede-sdk-*.tar.xz"
89
90 if ini.has_option("rsync", "sdk_url"):
91 rsync_sdk_url = ini.get("rsync", "sdk_url")
92
93 if ini.has_option("rsync", "sdk_password"):
94 rsync_sdk_key = ini.get("rsync", "sdk_password")
95
96 if ini.has_option("rsync", "sdk_pattern"):
97 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
98
99 gpg_home = "~/.gnupg"
100 gpg_keyid = None
101 gpg_comment = "Unattended build signature"
102 gpg_passfile = "/dev/null"
103
104 if ini.has_option("gpg", "home"):
105 gpg_home = ini.get("gpg", "home")
106
107 if ini.has_option("gpg", "keyid"):
108 gpg_keyid = ini.get("gpg", "keyid")
109
110 if ini.has_option("gpg", "comment"):
111 gpg_comment = ini.get("gpg", "comment")
112
113 if ini.has_option("gpg", "passfile"):
114 gpg_passfile = ini.get("gpg", "passfile")
115
116
117 # find arches
118 arches = [ ]
119 archnames = [ ]
120
121 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
122 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
123
124 while True:
125 line = findarches.stdout.readline()
126 if not line:
127 break
128 at = line.strip().split()
129 arches.append(at)
130 archnames.append(at[0])
131
132
133 # find feeds
134 feeds = []
135 feedbranches = dict()
136
137 from buildbot.changes.gitpoller import GitPoller
138 c['change_source'] = []
139
140 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
141 for line in f:
142 parts = line.strip().split()
143 if parts[0] == "src-git":
144 feeds.append(parts)
145 url = parts[2].strip().split(';')
146 branch = url[1] if len(url) > 1 else 'master'
147 feedbranches[url[0]] = branch
148 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
149
150
151 ####### SCHEDULERS
152
153 # Configure the Schedulers, which decide how to react to incoming changes. In this
154 # case, just kick off a 'basebuild' build
155
156 def branch_change_filter(change):
157 return change.branch == feedbranches[change.repository]
158
159 from buildbot.schedulers.basic import SingleBranchScheduler
160 from buildbot.schedulers.forcesched import ForceScheduler
161 from buildbot.changes import filter
162 c['schedulers'] = []
163 c['schedulers'].append(SingleBranchScheduler(
164 name="all",
165 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
166 treeStableTimer=60,
167 builderNames=archnames))
168
169 c['schedulers'].append(ForceScheduler(
170 name="force",
171 builderNames=archnames))
172
173 ####### BUILDERS
174
175 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
176 # what steps, and which slaves can execute them. Note that any particular build will
177 # only take place on one slave.
178
179 from buildbot.process.factory import BuildFactory
180 from buildbot.steps.source import Git
181 from buildbot.steps.shell import ShellCommand
182 from buildbot.steps.shell import SetProperty
183 from buildbot.steps.transfer import FileUpload
184 from buildbot.steps.transfer import FileDownload
185 from buildbot.steps.master import MasterShellCommand
186 from buildbot.process.properties import WithProperties
187
188
189 def GetDirectorySuffix(props):
190 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
191 return "-%s" % props["slavename"].split('-')[1]
192 else:
193 return ""
194
195 def GetNumJobs(props):
196 if props.hasProperty("slavename") and props.hasProperty("nproc"):
197 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
198 else:
199 return 1
200
201 def GetCwd(props):
202 if props.hasProperty("builddir"):
203 return props["builddir"]
204 elif props.hasProperty("workdir"):
205 return props["workdir"]
206 else:
207 return "/"
208
209
210 c['builders'] = []
211
212 dlLock = locks.SlaveLock("slave_dl")
213
214 slaveNames = [ ]
215
216 for slave in c['slaves']:
217 slaveNames.append(slave.slavename)
218
219 for arch in arches:
220 ts = arch[1].split('/')
221
222 factory = BuildFactory()
223
224 # find number of cores
225 factory.addStep(SetProperty(
226 name = "nproc",
227 property = "nproc",
228 description = "Finding number of CPUs",
229 command = ["nproc"]))
230
231 # prepare workspace
232 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
233
234 if not persistent:
235 factory.addStep(ShellCommand(
236 name = "cleanold",
237 description = "Cleaning previous builds",
238 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
239 haltOnFailure = True,
240 timeout = 2400))
241
242 factory.addStep(ShellCommand(
243 name = "cleanup",
244 description = "Cleaning work area",
245 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
246 haltOnFailure = True,
247 timeout = 2400))
248
249 # expire tree if needed
250 elif tree_expire > 0:
251 factory.addStep(FileDownload(
252 mastersrc = home_dir+"/expire.sh",
253 slavedest = "../expire.sh",
254 mode = 0755))
255
256 factory.addStep(ShellCommand(
257 name = "expire",
258 description = "Checking for build tree expiry",
259 command = ["./expire.sh", str(tree_expire)],
260 workdir = ".",
261 haltOnFailure = True,
262 timeout = 2400))
263
264 factory.addStep(ShellCommand(
265 name = "mksdkdir",
266 description = "Preparing SDK directory",
267 command = ["mkdir", "-p", "sdk"],
268 haltOnFailure = True))
269
270 factory.addStep(ShellCommand(
271 name = "downloadsdk",
272 description = "Downloading SDK archive",
273 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
274 env={'RSYNC_PASSWORD': rsync_sdk_key},
275 haltOnFailure = True,
276 logEnviron = False))
277
278 factory.addStep(ShellCommand(
279 name = "unpacksdk",
280 description = "Unpacking SDK archive",
281 command = ["tar", "--keep-newer-files", "--no-overwrite-dir", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
282 haltOnFailure = True))
283
284 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
285 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
286
287 factory.addStep(ShellCommand(
288 name = "mkdldir",
289 description = "Preparing download directory",
290 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
291
292 factory.addStep(ShellCommand(
293 name = "mkconf",
294 description = "Preparing SDK configuration",
295 workdir = "build/sdk",
296 command = ["sh", "-c", "rm -f .config && make defconfig"]))
297
298 factory.addStep(FileDownload(
299 mastersrc = home_dir+'/ccache.sh',
300 slavedest = 'sdk/ccache.sh',
301 mode = 0755))
302
303 factory.addStep(ShellCommand(
304 name = "prepccache",
305 description = "Preparing ccache",
306 workdir = "build/sdk",
307 command = ["./ccache.sh"]))
308
309 factory.addStep(ShellCommand(
310 name = "updatefeeds",
311 description = "Updating feeds",
312 workdir = "build/sdk",
313 command = ["./scripts/feeds", "update"]))
314
315 factory.addStep(ShellCommand(
316 name = "installfeeds",
317 description = "Installing feeds",
318 workdir = "build/sdk",
319 command = ["./scripts/feeds", "install", "-a"]))
320
321 factory.addStep(ShellCommand(
322 name = "compile",
323 description = "Building packages",
324 workdir = "build/sdk",
325 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"],
326 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)}))
327
328 factory.addStep(ShellCommand(
329 name = "mkfeedsconf",
330 description = "Generating pinned feeds.conf",
331 workdir = "build/sdk",
332 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
333
334 if gpg_keyid is not None:
335 factory.addStep(MasterShellCommand(
336 name = "signprepare",
337 description = "Preparing temporary signing directory",
338 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
339 haltOnFailure = True
340 ))
341
342 factory.addStep(ShellCommand(
343 name = "signpack",
344 description = "Packing files to sign",
345 workdir = "build/sdk",
346 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
347 haltOnFailure = True
348 ))
349
350 factory.addStep(FileUpload(
351 slavesrc = "sdk/sign.tar.gz",
352 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
353 haltOnFailure = True
354 ))
355
356 factory.addStep(MasterShellCommand(
357 name = "signfiles",
358 description = "Signing files",
359 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
360 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
361 haltOnFailure = True
362 ))
363
364 factory.addStep(FileDownload(
365 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
366 slavedest = "sdk/sign.tar.gz",
367 haltOnFailure = True
368 ))
369
370 factory.addStep(ShellCommand(
371 name = "signunpack",
372 description = "Unpacking signed files",
373 workdir = "build/sdk",
374 command = ["tar", "-xzf", "sign.tar.gz"],
375 haltOnFailure = True
376 ))
377
378 factory.addStep(ShellCommand(
379 name = "uploadprepare",
380 description = "Preparing package directory",
381 workdir = "build/sdk",
382 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
383 env={'RSYNC_PASSWORD': rsync_bin_key},
384 haltOnFailure = True,
385 logEnviron = False
386 ))
387
388 factory.addStep(ShellCommand(
389 name = "packageupload",
390 description = "Uploading package files",
391 workdir = "build/sdk",
392 command = ["rsync", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
393 env={'RSYNC_PASSWORD': rsync_bin_key},
394 haltOnFailure = True,
395 logEnviron = False
396 ))
397
398 factory.addStep(ShellCommand(
399 name = "logprepare",
400 description = "Preparing log directory",
401 workdir = "build/sdk",
402 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
403 env={'RSYNC_PASSWORD': rsync_bin_key},
404 haltOnFailure = True,
405 logEnviron = False
406 ))
407
408 factory.addStep(ShellCommand(
409 name = "logfind",
410 description = "Finding failure logs",
411 workdir = "build/sdk/logs/package/feeds",
412 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
413 haltOnFailure = False
414 ))
415
416 factory.addStep(ShellCommand(
417 name = "logcollect",
418 description = "Collecting failure logs",
419 workdir = "build/sdk",
420 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
421 haltOnFailure = False
422 ))
423
424 factory.addStep(ShellCommand(
425 name = "logupload",
426 description = "Uploading failure logs",
427 workdir = "build/sdk",
428 command = ["rsync", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
429 env={'RSYNC_PASSWORD': rsync_bin_key},
430 haltOnFailure = False,
431 logEnviron = False
432 ))
433
434 if rsync_src_url is not None:
435 factory.addStep(ShellCommand(
436 name = "sourceupload",
437 description = "Uploading source archives",
438 workdir = "build/sdk",
439 command = ["rsync", "--progress", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
440 env={'RSYNC_PASSWORD': rsync_src_key},
441 haltOnFailure = False,
442 logEnviron = False
443 ))
444
445 from buildbot.config import BuilderConfig
446
447 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
448
449
450 ####### STATUS arches
451
452 # 'status' is a list of Status arches. The results of each build will be
453 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
454 # including web pages, email senders, and IRC bots.
455
456 c['status'] = []
457
458 from buildbot.status import html
459 from buildbot.status.web import authz, auth
460
461 if ini.has_option("status", "bind"):
462 if ini.has_option("status", "user") and ini.has_option("status", "password"):
463 authz_cfg=authz.Authz(
464 # change any of these to True to enable; see the manual for more
465 # options
466 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
467 gracefulShutdown = 'auth',
468 forceBuild = 'auth', # use this to test your slave once it is set up
469 forceAllBuilds = 'auth',
470 pingBuilder = False,
471 stopBuild = 'auth',
472 stopAllBuilds = 'auth',
473 cancelPendingBuild = 'auth',
474 )
475 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
476 else:
477 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
478
479 ####### PROJECT IDENTITY
480
481 # the 'title' string will appear at the top of this buildbot
482 # installation's html.WebStatus home page (linked to the
483 # 'titleURL') and is embedded in the title of the waterfall HTML page.
484
485 c['title'] = ini.get("general", "title")
486 c['titleURL'] = ini.get("general", "title_url")
487
488 # the 'buildbotURL' string should point to the location where the buildbot's
489 # internal web server (usually the html.WebStatus page) is visible. This
490 # typically uses the port number set in the Waterfall 'status' entry, but
491 # with an externally-visible host name which the buildbot cannot figure out
492 # without some help.
493
494 c['buildbotURL'] = buildbot_url
495
496 ####### DB URL
497
498 c['db'] = {
499 # This specifies what database buildbot uses to store its state. You can leave
500 # this at its default for all but the largest installations.
501 'db_url' : "sqlite:///state.sqlite",
502 }