9ce8f5005f83c008ce2f822970ba043c5baca15d
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32 other_builds = 0
33 tree_expire = 0
34
35 if ini.has_option("general", "port"):
36 slave_port = ini.getint("general", "port")
37
38 if ini.has_option("general", "persistent"):
39 persistent = ini.getboolean("general", "persistent")
40
41 if ini.has_option("general", "other_builds"):
42 other_builds = ini.getint("general", "other_builds")
43
44 if ini.has_option("general", "expire"):
45 tree_expire = ini.getint("general", "expire")
46
47 c['slaves'] = []
48 max_builds = dict()
49
50 for section in ini.sections():
51 if section.startswith("slave "):
52 if ini.has_option(section, "name") and ini.has_option(section, "password"):
53 name = ini.get(section, "name")
54 password = ini.get(section, "password")
55 max_builds[name] = 1
56 if ini.has_option(section, "builds"):
57 max_builds[name] = ini.getint(section, "builds")
58 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
59
60 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
61 # This must match the value configured into the buildslaves (with their
62 # --master option)
63 c['slavePortnum'] = slave_port
64
65 # coalesce builds
66 c['mergeRequests'] = True
67
68 # Reduce amount of backlog data
69 c['buildHorizon'] = 30
70 c['logHorizon'] = 20
71
72 ####### CHANGESOURCES
73
74 home_dir = os.path.abspath(ini.get("general", "homedir"))
75
76 rsync_bin_url = ini.get("rsync", "binary_url")
77 rsync_bin_key = ini.get("rsync", "binary_password")
78
79 rsync_src_url = None
80 rsync_src_key = None
81
82 if ini.has_option("rsync", "source_url"):
83 rsync_src_url = ini.get("rsync", "source_url")
84 rsync_src_key = ini.get("rsync", "source_password")
85
86 rsync_sdk_url = None
87 rsync_sdk_key = None
88 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
89
90 if ini.has_option("rsync", "sdk_url"):
91 rsync_sdk_url = ini.get("rsync", "sdk_url")
92
93 if ini.has_option("rsync", "sdk_password"):
94 rsync_sdk_key = ini.get("rsync", "sdk_password")
95
96 if ini.has_option("rsync", "sdk_pattern"):
97 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
98
99 gpg_home = "~/.gnupg"
100 gpg_keyid = None
101 gpg_comment = "Unattended build signature"
102 gpg_passfile = "/dev/null"
103
104 if ini.has_option("gpg", "home"):
105 gpg_home = ini.get("gpg", "home")
106
107 if ini.has_option("gpg", "keyid"):
108 gpg_keyid = ini.get("gpg", "keyid")
109
110 if ini.has_option("gpg", "comment"):
111 gpg_comment = ini.get("gpg", "comment")
112
113 if ini.has_option("gpg", "passfile"):
114 gpg_passfile = ini.get("gpg", "passfile")
115
116
117 # find arches
118 arches = [ ]
119 archnames = [ ]
120
121 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
122 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
123
124 while True:
125 line = findarches.stdout.readline()
126 if not line:
127 break
128 at = line.strip().split()
129 arches.append(at)
130 archnames.append(at[0])
131
132
133 # find feeds
134 feeds = []
135 feedbranches = dict()
136
137 from buildbot.changes.gitpoller import GitPoller
138 c['change_source'] = []
139
140 def parse_feed_entry(line):
141 parts = line.strip().split()
142 if parts[0] == "src-git":
143 feeds.append(parts)
144 url = parts[2].strip().split(';')
145 branch = url[1] if len(url) > 1 else 'master'
146 feedbranches[url[0]] = branch
147 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
148
149 make = subprocess.Popen(['make', '--no-print-directory', '-C', home_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
150 env = dict(os.environ, TOPDIR=home_dir+'/source.git'), stdout = subprocess.PIPE)
151
152 line = make.stdout.readline()
153 if line:
154 parse_feed_entry(line)
155
156 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
157 for line in f:
158 parse_feed_entry(line)
159
160
161 ####### SCHEDULERS
162
163 # Configure the Schedulers, which decide how to react to incoming changes. In this
164 # case, just kick off a 'basebuild' build
165
166 def branch_change_filter(change):
167 return change.branch == feedbranches[change.repository]
168
169 from buildbot.schedulers.basic import SingleBranchScheduler
170 from buildbot.schedulers.forcesched import ForceScheduler
171 from buildbot.changes import filter
172 c['schedulers'] = []
173 c['schedulers'].append(SingleBranchScheduler(
174 name="all",
175 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
176 treeStableTimer=60,
177 builderNames=archnames))
178
179 c['schedulers'].append(ForceScheduler(
180 name="force",
181 builderNames=archnames))
182
183 ####### BUILDERS
184
185 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
186 # what steps, and which slaves can execute them. Note that any particular build will
187 # only take place on one slave.
188
189 from buildbot.process.factory import BuildFactory
190 from buildbot.steps.source import Git
191 from buildbot.steps.shell import ShellCommand
192 from buildbot.steps.shell import SetProperty
193 from buildbot.steps.transfer import FileUpload
194 from buildbot.steps.transfer import FileDownload
195 from buildbot.steps.master import MasterShellCommand
196 from buildbot.process.properties import WithProperties
197
198
199 def GetDirectorySuffix(props):
200 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
201 return "-%s" % props["slavename"].split('-')[1]
202 else:
203 return ""
204
205 def GetNumJobs(props):
206 if props.hasProperty("slavename") and props.hasProperty("nproc"):
207 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
208 else:
209 return 1
210
211 def GetCwd(props):
212 if props.hasProperty("builddir"):
213 return props["builddir"]
214 elif props.hasProperty("workdir"):
215 return props["workdir"]
216 else:
217 return "/"
218
219
220 c['builders'] = []
221
222 dlLock = locks.SlaveLock("slave_dl")
223
224 slaveNames = [ ]
225
226 for slave in c['slaves']:
227 slaveNames.append(slave.slavename)
228
229 for arch in arches:
230 ts = arch[1].split('/')
231
232 factory = BuildFactory()
233
234 # find number of cores
235 factory.addStep(SetProperty(
236 name = "nproc",
237 property = "nproc",
238 description = "Finding number of CPUs",
239 command = ["nproc"]))
240
241 # prepare workspace
242 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
243
244 if not persistent:
245 factory.addStep(ShellCommand(
246 name = "cleanold",
247 description = "Cleaning previous builds",
248 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
249 haltOnFailure = True,
250 timeout = 2400))
251
252 factory.addStep(ShellCommand(
253 name = "cleanup",
254 description = "Cleaning work area",
255 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
256 haltOnFailure = True,
257 timeout = 2400))
258
259 # expire tree if needed
260 elif tree_expire > 0:
261 factory.addStep(FileDownload(
262 mastersrc = home_dir+"/expire.sh",
263 slavedest = "../expire.sh",
264 mode = 0755))
265
266 factory.addStep(ShellCommand(
267 name = "expire",
268 description = "Checking for build tree expiry",
269 command = ["./expire.sh", str(tree_expire)],
270 workdir = ".",
271 haltOnFailure = True,
272 timeout = 2400))
273
274 factory.addStep(ShellCommand(
275 name = "mksdkdir",
276 description = "Preparing SDK directory",
277 command = ["mkdir", "-p", "sdk"],
278 haltOnFailure = True))
279
280 factory.addStep(ShellCommand(
281 name = "downloadsdk",
282 description = "Downloading SDK archive",
283 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
284 env={'RSYNC_PASSWORD': rsync_sdk_key},
285 haltOnFailure = True,
286 logEnviron = False))
287
288 factory.addStep(ShellCommand(
289 name = "unpacksdk",
290 description = "Unpacking SDK archive",
291 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
292 haltOnFailure = True))
293
294 factory.addStep(ShellCommand(
295 name = "updatesdk",
296 description = "Updating SDK",
297 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
298 haltOnFailure = True))
299
300 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
301 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
302
303 factory.addStep(ShellCommand(
304 name = "mkdldir",
305 description = "Preparing download directory",
306 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
307 haltOnFailure = True))
308
309 factory.addStep(ShellCommand(
310 name = "mkconf",
311 description = "Preparing SDK configuration",
312 workdir = "build/sdk",
313 command = ["sh", "-c", "rm -f .config && make defconfig"]))
314
315 factory.addStep(FileDownload(
316 mastersrc = home_dir+'/ccache.sh',
317 slavedest = 'sdk/ccache.sh',
318 mode = 0755))
319
320 factory.addStep(ShellCommand(
321 name = "prepccache",
322 description = "Preparing ccache",
323 workdir = "build/sdk",
324 command = ["./ccache.sh"],
325 haltOnFailure = True))
326
327 factory.addStep(ShellCommand(
328 name = "updatefeeds",
329 description = "Updating feeds",
330 workdir = "build/sdk",
331 command = ["./scripts/feeds", "update", "-f"]))
332
333 factory.addStep(ShellCommand(
334 name = "installfeeds",
335 description = "Installing feeds",
336 workdir = "build/sdk",
337 command = ["./scripts/feeds", "install", "-a"]))
338
339 factory.addStep(ShellCommand(
340 name = "logclear",
341 description = "Clearing failure log list",
342 workdir = "build/sdk",
343 command = ["rm", "-f", "logs/package/error.txt"],
344 haltOnFailure = False
345 ))
346
347 factory.addStep(ShellCommand(
348 name = "compile",
349 description = "Building packages",
350 workdir = "build/sdk",
351 timeout = 3600,
352 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"],
353 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
354 haltOnFailure = True))
355
356 factory.addStep(ShellCommand(
357 name = "mkfeedsconf",
358 description = "Generating pinned feeds.conf",
359 workdir = "build/sdk",
360 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
361
362 if gpg_keyid is not None:
363 factory.addStep(MasterShellCommand(
364 name = "signprepare",
365 description = "Preparing temporary signing directory",
366 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
367 haltOnFailure = True
368 ))
369
370 factory.addStep(ShellCommand(
371 name = "signpack",
372 description = "Packing files to sign",
373 workdir = "build/sdk",
374 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
375 haltOnFailure = True
376 ))
377
378 factory.addStep(FileUpload(
379 slavesrc = "sdk/sign.tar.gz",
380 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
381 haltOnFailure = True
382 ))
383
384 factory.addStep(MasterShellCommand(
385 name = "signfiles",
386 description = "Signing files",
387 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
388 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
389 haltOnFailure = True
390 ))
391
392 factory.addStep(FileDownload(
393 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
394 slavedest = "sdk/sign.tar.gz",
395 haltOnFailure = True
396 ))
397
398 factory.addStep(ShellCommand(
399 name = "signunpack",
400 description = "Unpacking signed files",
401 workdir = "build/sdk",
402 command = ["tar", "-xzf", "sign.tar.gz"],
403 haltOnFailure = True
404 ))
405
406 factory.addStep(ShellCommand(
407 name = "uploadprepare",
408 description = "Preparing package directory",
409 workdir = "build/sdk",
410 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
411 env={'RSYNC_PASSWORD': rsync_bin_key},
412 haltOnFailure = True,
413 logEnviron = False
414 ))
415
416 factory.addStep(ShellCommand(
417 name = "packageupload",
418 description = "Uploading package files",
419 workdir = "build/sdk",
420 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
421 env={'RSYNC_PASSWORD': rsync_bin_key},
422 haltOnFailure = True,
423 logEnviron = False
424 ))
425
426 factory.addStep(ShellCommand(
427 name = "logprepare",
428 description = "Preparing log directory",
429 workdir = "build/sdk",
430 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
431 env={'RSYNC_PASSWORD': rsync_bin_key},
432 haltOnFailure = True,
433 logEnviron = False
434 ))
435
436 factory.addStep(ShellCommand(
437 name = "logfind",
438 description = "Finding failure logs",
439 workdir = "build/sdk/logs/package/feeds",
440 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
441 haltOnFailure = False
442 ))
443
444 factory.addStep(ShellCommand(
445 name = "logcollect",
446 description = "Collecting failure logs",
447 workdir = "build/sdk",
448 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
449 haltOnFailure = False
450 ))
451
452 factory.addStep(ShellCommand(
453 name = "logupload",
454 description = "Uploading failure logs",
455 workdir = "build/sdk",
456 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
457 env={'RSYNC_PASSWORD': rsync_bin_key},
458 haltOnFailure = False,
459 logEnviron = False
460 ))
461
462 if rsync_src_url is not None:
463 factory.addStep(ShellCommand(
464 name = "sourceupload",
465 description = "Uploading source archives",
466 workdir = "build/sdk",
467 command = ["rsync", "-4", "--progress", "--checksum", "--delay-updates",
468 WithProperties("--partial-dir=.~tmp~%s~%%(slavename)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
469 env={'RSYNC_PASSWORD': rsync_src_key},
470 haltOnFailure = False,
471 logEnviron = False
472 ))
473
474 from buildbot.config import BuilderConfig
475
476 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
477
478
479 ####### STATUS arches
480
481 # 'status' is a list of Status arches. The results of each build will be
482 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
483 # including web pages, email senders, and IRC bots.
484
485 c['status'] = []
486
487 from buildbot.status import html
488 from buildbot.status.web import authz, auth
489
490 if ini.has_option("status", "bind"):
491 if ini.has_option("status", "user") and ini.has_option("status", "password"):
492 authz_cfg=authz.Authz(
493 # change any of these to True to enable; see the manual for more
494 # options
495 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
496 gracefulShutdown = 'auth',
497 forceBuild = 'auth', # use this to test your slave once it is set up
498 forceAllBuilds = 'auth',
499 pingBuilder = False,
500 stopBuild = 'auth',
501 stopAllBuilds = 'auth',
502 cancelPendingBuild = 'auth',
503 )
504 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
505 else:
506 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
507
508 ####### PROJECT IDENTITY
509
510 # the 'title' string will appear at the top of this buildbot
511 # installation's html.WebStatus home page (linked to the
512 # 'titleURL') and is embedded in the title of the waterfall HTML page.
513
514 c['title'] = ini.get("general", "title")
515 c['titleURL'] = ini.get("general", "title_url")
516
517 # the 'buildbotURL' string should point to the location where the buildbot's
518 # internal web server (usually the html.WebStatus page) is visible. This
519 # typically uses the port number set in the Waterfall 'status' entry, but
520 # with an externally-visible host name which the buildbot cannot figure out
521 # without some help.
522
523 c['buildbotURL'] = buildbot_url
524
525 ####### DB URL
526
527 c['db'] = {
528 # This specifies what database buildbot uses to store its state. You can leave
529 # this at its default for all but the largest installations.
530 'db_url' : "sqlite:///state.sqlite",
531 }