b1d3860627e5aef400e0fed9e87904d7178fa627
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32 other_builds = 0
33
34 if ini.has_option("general", "port"):
35 slave_port = ini.getint("general", "port")
36
37 if ini.has_option("general", "persistent"):
38 persistent = ini.getboolean("general", "persistent")
39
40 if ini.has_option("general", "other_builds"):
41 other_builds = ini.getint("general", "other_builds")
42
43 c['slaves'] = []
44 max_builds = dict()
45
46 for section in ini.sections():
47 if section.startswith("slave "):
48 if ini.has_option(section, "name") and ini.has_option(section, "password"):
49 name = ini.get(section, "name")
50 password = ini.get(section, "password")
51 max_builds[name] = 1
52 if ini.has_option(section, "builds"):
53 max_builds[name] = ini.getint(section, "builds")
54 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
55
56 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
57 # This must match the value configured into the buildslaves (with their
58 # --master option)
59 c['slavePortnum'] = slave_port
60
61 # coalesce builds
62 c['mergeRequests'] = True
63
64 # Reduce amount of backlog data
65 c['buildHorizon'] = 30
66 c['logHorizon'] = 20
67
68 ####### CHANGESOURCES
69
70 home_dir = os.path.abspath(ini.get("general", "homedir"))
71
72 rsync_bin_url = ini.get("rsync", "binary_url")
73 rsync_bin_key = ini.get("rsync", "binary_password")
74
75 rsync_src_url = None
76 rsync_src_key = None
77
78 if ini.has_option("rsync", "source_url"):
79 rsync_src_url = ini.get("rsync", "source_url")
80 rsync_src_key = ini.get("rsync", "source_password")
81
82 rsync_sdk_url = None
83 rsync_sdk_key = None
84 rsync_sdk_pat = "lede-sdk-*.tar.xz"
85
86 if ini.has_option("rsync", "sdk_url"):
87 rsync_sdk_url = ini.get("rsync", "sdk_url")
88
89 if ini.has_option("rsync", "sdk_password"):
90 rsync_sdk_key = ini.get("rsync", "sdk_password")
91
92 if ini.has_option("rsync", "sdk_pattern"):
93 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
94
95 gpg_home = "~/.gnupg"
96 gpg_keyid = None
97 gpg_comment = "Unattended build signature"
98 gpg_passfile = "/dev/null"
99
100 if ini.has_option("gpg", "home"):
101 gpg_home = ini.get("gpg", "home")
102
103 if ini.has_option("gpg", "keyid"):
104 gpg_keyid = ini.get("gpg", "keyid")
105
106 if ini.has_option("gpg", "comment"):
107 gpg_comment = ini.get("gpg", "comment")
108
109 if ini.has_option("gpg", "passfile"):
110 gpg_passfile = ini.get("gpg", "passfile")
111
112
113 # find arches
114 arches = [ ]
115 archnames = [ ]
116
117 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
118 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
119
120 while True:
121 line = findarches.stdout.readline()
122 if not line:
123 break
124 at = line.strip().split()
125 arches.append(at)
126 archnames.append(at[0])
127
128
129 # find feeds
130 feeds = []
131 feedbranches = dict()
132
133 from buildbot.changes.gitpoller import GitPoller
134 c['change_source'] = []
135
136 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
137 for line in f:
138 parts = line.strip().split()
139 if parts[0] == "src-git":
140 feeds.append(parts)
141 url = parts[2].strip().split(';')
142 branch = url[1] if len(url) > 1 else 'master'
143 feedbranches[url[0]] = branch
144 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
145
146
147 ####### SCHEDULERS
148
149 # Configure the Schedulers, which decide how to react to incoming changes. In this
150 # case, just kick off a 'basebuild' build
151
152 def branch_change_filter(change):
153 return change.branch == feedbranches[change.repository]
154
155 from buildbot.schedulers.basic import SingleBranchScheduler
156 from buildbot.schedulers.forcesched import ForceScheduler
157 from buildbot.changes import filter
158 c['schedulers'] = []
159 c['schedulers'].append(SingleBranchScheduler(
160 name="all",
161 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
162 treeStableTimer=60,
163 builderNames=archnames))
164
165 c['schedulers'].append(ForceScheduler(
166 name="force",
167 builderNames=archnames))
168
169 ####### BUILDERS
170
171 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
172 # what steps, and which slaves can execute them. Note that any particular build will
173 # only take place on one slave.
174
175 from buildbot.process.factory import BuildFactory
176 from buildbot.steps.source import Git
177 from buildbot.steps.shell import ShellCommand
178 from buildbot.steps.shell import SetProperty
179 from buildbot.steps.transfer import FileUpload
180 from buildbot.steps.transfer import FileDownload
181 from buildbot.steps.master import MasterShellCommand
182 from buildbot.process.properties import WithProperties
183
184
185 def GetDirectorySuffix(props):
186 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
187 return "-%s" % props["slavename"].split('-')[1]
188 else:
189 return ""
190
191 def GetNumJobs(props):
192 if props.hasProperty("slavename") and props.hasProperty("nproc"):
193 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
194 else:
195 return 1
196
197
198 c['builders'] = []
199
200 dlLock = locks.SlaveLock("slave_dl")
201
202 slaveNames = [ ]
203
204 for slave in c['slaves']:
205 slaveNames.append(slave.slavename)
206
207 for arch in arches:
208 ts = arch[1].split('/')
209
210 factory = BuildFactory()
211
212 # find number of cores
213 factory.addStep(SetProperty(
214 name = "nproc",
215 property = "nproc",
216 description = "Finding number of CPUs",
217 command = ["nproc"]))
218
219 # prepare workspace
220 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
221
222 if not persistent:
223 factory.addStep(ShellCommand(
224 name = "cleanold",
225 description = "Cleaning previous builds",
226 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
227 haltOnFailure = True,
228 timeout = 2400))
229
230 factory.addStep(ShellCommand(
231 name = "cleanup",
232 description = "Cleaning work area",
233 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
234 haltOnFailure = True,
235 timeout = 2400))
236
237 factory.addStep(ShellCommand(
238 name = "mksdkdir",
239 description = "Preparing SDK directory",
240 command = ["mkdir", "-p", "sdk"],
241 haltOnFailure = True))
242
243 factory.addStep(ShellCommand(
244 name = "downloadsdk",
245 description = "Downloading SDK archive",
246 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
247 env={'RSYNC_PASSWORD': rsync_sdk_key},
248 haltOnFailure = True,
249 logEnviron = False))
250
251 factory.addStep(ShellCommand(
252 name = "unpacksdk",
253 description = "Unpacking SDK archive",
254 command = ["tar", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
255 haltOnFailure = True))
256
257 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
258 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
259
260 factory.addStep(ShellCommand(
261 name = "mkdldir",
262 description = "Preparing download directory",
263 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
264
265 factory.addStep(ShellCommand(
266 name = "mkconf",
267 description = "Preparing SDK configuration",
268 workdir = "build/sdk",
269 command = ["sh", "-c", "rm -f .config && make defconfig"]))
270
271 factory.addStep(ShellCommand(
272 name = "updatefeeds",
273 description = "Updating feeds",
274 workdir = "build/sdk",
275 command = ["./scripts/feeds", "update"]))
276
277 factory.addStep(ShellCommand(
278 name = "installfeeds",
279 description = "Installing feeds",
280 workdir = "build/sdk",
281 command = ["./scripts/feeds", "install", "-a"]))
282
283 factory.addStep(ShellCommand(
284 name = "compile",
285 description = "Building packages",
286 workdir = "build/sdk",
287 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "V=s", "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y"]))
288
289 factory.addStep(ShellCommand(
290 name = "mkfeedsconf",
291 description = "Generating pinned feeds.conf",
292 workdir = "build/sdk",
293 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
294
295 if gpg_keyid is not None:
296 factory.addStep(MasterShellCommand(
297 name = "signprepare",
298 description = "Preparing temporary signing directory",
299 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
300 haltOnFailure = True
301 ))
302
303 factory.addStep(ShellCommand(
304 name = "signpack",
305 description = "Packing files to sign",
306 workdir = "build/sdk",
307 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
308 haltOnFailure = True
309 ))
310
311 factory.addStep(FileUpload(
312 slavesrc = "sdk/sign.tar.gz",
313 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
314 haltOnFailure = True
315 ))
316
317 factory.addStep(MasterShellCommand(
318 name = "signfiles",
319 description = "Signing files",
320 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
321 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
322 haltOnFailure = True
323 ))
324
325 factory.addStep(FileDownload(
326 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
327 slavedest = "sdk/sign.tar.gz",
328 haltOnFailure = True
329 ))
330
331 factory.addStep(ShellCommand(
332 name = "signunpack",
333 description = "Unpacking signed files",
334 workdir = "build/sdk",
335 command = ["tar", "-xzf", "sign.tar.gz"],
336 haltOnFailure = True
337 ))
338
339 factory.addStep(ShellCommand(
340 name = "uploadprepare",
341 description = "Preparing package directory",
342 workdir = "build/sdk",
343 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
344 env={'RSYNC_PASSWORD': rsync_bin_key},
345 haltOnFailure = True,
346 logEnviron = False
347 ))
348
349 factory.addStep(ShellCommand(
350 name = "packageupload",
351 description = "Uploading package files",
352 workdir = "build/sdk",
353 command = ["rsync", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
354 env={'RSYNC_PASSWORD': rsync_bin_key},
355 haltOnFailure = True,
356 logEnviron = False
357 ))
358
359 factory.addStep(ShellCommand(
360 name = "logprepare",
361 description = "Preparing log directory",
362 workdir = "build/sdk",
363 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
364 env={'RSYNC_PASSWORD': rsync_bin_key},
365 haltOnFailure = True,
366 logEnviron = False
367 ))
368
369 factory.addStep(ShellCommand(
370 name = "logfind",
371 description = "Finding failure logs",
372 workdir = "build/sdk/logs/package/feeds",
373 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
374 haltOnFailure = False
375 ))
376
377 factory.addStep(ShellCommand(
378 name = "logcollect",
379 description = "Collecting failure logs",
380 workdir = "build/sdk",
381 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
382 haltOnFailure = False
383 ))
384
385 factory.addStep(ShellCommand(
386 name = "logupload",
387 description = "Uploading failure logs",
388 workdir = "build/sdk",
389 command = ["rsync", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
390 env={'RSYNC_PASSWORD': rsync_bin_key},
391 haltOnFailure = False,
392 logEnviron = False
393 ))
394
395 if rsync_src_url is not None:
396 factory.addStep(ShellCommand(
397 name = "sourceupload",
398 description = "Uploading source archives",
399 workdir = "build/sdk",
400 command = ["rsync", "--progress", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
401 env={'RSYNC_PASSWORD': rsync_src_key},
402 haltOnFailure = False,
403 logEnviron = False
404 ))
405
406 from buildbot.config import BuilderConfig
407
408 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
409
410
411 ####### STATUS arches
412
413 # 'status' is a list of Status arches. The results of each build will be
414 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
415 # including web pages, email senders, and IRC bots.
416
417 c['status'] = []
418
419 from buildbot.status import html
420 from buildbot.status.web import authz, auth
421
422 if ini.has_option("status", "bind"):
423 if ini.has_option("status", "user") and ini.has_option("status", "password"):
424 authz_cfg=authz.Authz(
425 # change any of these to True to enable; see the manual for more
426 # options
427 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
428 gracefulShutdown = 'auth',
429 forceBuild = 'auth', # use this to test your slave once it is set up
430 forceAllBuilds = 'auth',
431 pingBuilder = False,
432 stopBuild = 'auth',
433 stopAllBuilds = 'auth',
434 cancelPendingBuild = 'auth',
435 )
436 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
437 else:
438 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
439
440 ####### PROJECT IDENTITY
441
442 # the 'title' string will appear at the top of this buildbot
443 # installation's html.WebStatus home page (linked to the
444 # 'titleURL') and is embedded in the title of the waterfall HTML page.
445
446 c['title'] = ini.get("general", "title")
447 c['titleURL'] = ini.get("general", "title_url")
448
449 # the 'buildbotURL' string should point to the location where the buildbot's
450 # internal web server (usually the html.WebStatus page) is visible. This
451 # typically uses the port number set in the Waterfall 'status' entry, but
452 # with an externally-visible host name which the buildbot cannot figure out
453 # without some help.
454
455 c['buildbotURL'] = buildbot_url
456
457 ####### DB URL
458
459 c['db'] = {
460 # This specifies what database buildbot uses to store its state. You can leave
461 # this at its default for all but the largest installations.
462 'db_url' : "sqlite:///state.sqlite",
463 }