global: calculate suitable number of jobs
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32 other_builds = 0
33
34 if ini.has_option("general", "port"):
35 slave_port = ini.getint("general", "port")
36
37 if ini.has_option("general", "persistent"):
38 persistent = ini.getboolean("general", "persistent")
39
40 if ini.has_option("general", "other_builds"):
41 other_builds = ini.getint("general", "other_builds")
42
43 c['slaves'] = []
44 max_builds = dict()
45
46 for section in ini.sections():
47 if section.startswith("slave "):
48 if ini.has_option(section, "name") and ini.has_option(section, "password"):
49 name = ini.get(section, "name")
50 password = ini.get(section, "password")
51 max_builds[name] = 1
52 if ini.has_option(section, "builds"):
53 max_builds[name] = ini.getint(section, "builds")
54 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
55
56 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
57 # This must match the value configured into the buildslaves (with their
58 # --master option)
59 c['slavePortnum'] = slave_port
60
61 # coalesce builds
62 c['mergeRequests'] = True
63
64 ####### CHANGESOURCES
65
66 home_dir = os.path.abspath(ini.get("general", "homedir"))
67
68 rsync_bin_url = ini.get("rsync", "binary_url")
69 rsync_bin_key = ini.get("rsync", "binary_password")
70
71 rsync_src_url = None
72 rsync_src_key = None
73
74 if ini.has_option("rsync", "source_url"):
75 rsync_src_url = ini.get("rsync", "source_url")
76 rsync_src_key = ini.get("rsync", "source_password")
77
78 rsync_sdk_url = None
79 rsync_sdk_key = None
80 rsync_sdk_pat = "lede-sdk-*.tar.xz"
81
82 if ini.has_option("rsync", "sdk_url"):
83 rsync_sdk_url = ini.get("rsync", "sdk_url")
84
85 if ini.has_option("rsync", "sdk_password"):
86 rsync_sdk_key = ini.get("rsync", "sdk_password")
87
88 if ini.has_option("rsync", "sdk_pattern"):
89 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
90
91 gpg_home = "~/.gnupg"
92 gpg_keyid = None
93 gpg_comment = "Unattended build signature"
94 gpg_passfile = "/dev/null"
95
96 if ini.has_option("gpg", "home"):
97 gpg_home = ini.get("gpg", "home")
98
99 if ini.has_option("gpg", "keyid"):
100 gpg_keyid = ini.get("gpg", "keyid")
101
102 if ini.has_option("gpg", "comment"):
103 gpg_comment = ini.get("gpg", "comment")
104
105 if ini.has_option("gpg", "passfile"):
106 gpg_passfile = ini.get("gpg", "passfile")
107
108
109 # find arches
110 arches = [ ]
111 archnames = [ ]
112
113 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
114 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
115
116 while True:
117 line = findarches.stdout.readline()
118 if not line:
119 break
120 at = line.strip().split()
121 arches.append(at)
122 archnames.append(at[0])
123
124
125 # find feeds
126 feeds = []
127 feedbranches = dict()
128
129 from buildbot.changes.gitpoller import GitPoller
130 c['change_source'] = []
131
132 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
133 for line in f:
134 parts = line.strip().split()
135 if parts[0] == "src-git":
136 feeds.append(parts)
137 url = parts[2].strip().split(';')
138 branch = url[1] if len(url) > 1 else 'master'
139 feedbranches[url[0]] = branch
140 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
141
142
143 ####### SCHEDULERS
144
145 # Configure the Schedulers, which decide how to react to incoming changes. In this
146 # case, just kick off a 'basebuild' build
147
148 def branch_change_filter(change):
149 return change.branch == feedbranches[change.repository]
150
151 from buildbot.schedulers.basic import SingleBranchScheduler
152 from buildbot.schedulers.forcesched import ForceScheduler
153 from buildbot.changes import filter
154 c['schedulers'] = []
155 c['schedulers'].append(SingleBranchScheduler(
156 name="all",
157 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
158 treeStableTimer=60,
159 builderNames=archnames))
160
161 c['schedulers'].append(ForceScheduler(
162 name="force",
163 builderNames=archnames))
164
165 ####### BUILDERS
166
167 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
168 # what steps, and which slaves can execute them. Note that any particular build will
169 # only take place on one slave.
170
171 from buildbot.process.factory import BuildFactory
172 from buildbot.steps.source import Git
173 from buildbot.steps.shell import ShellCommand
174 from buildbot.steps.shell import SetProperty
175 from buildbot.steps.transfer import FileUpload
176 from buildbot.steps.transfer import FileDownload
177 from buildbot.steps.master import MasterShellCommand
178 from buildbot.process.properties import WithProperties
179
180
181 def GetDirectorySuffix(props):
182 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
183 return "-%s" % props["slavename"].split('-')[1]
184 else:
185 return ""
186
187 def GetNumJobs(props):
188 if props.hasProperty("slavename") and props.hasProperty("nproc"):
189 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
190 else:
191 return 1
192
193
194 c['builders'] = []
195
196 dlLock = locks.SlaveLock("slave_dl")
197
198 slaveNames = [ ]
199
200 for slave in c['slaves']:
201 slaveNames.append(slave.slavename)
202
203 for arch in arches:
204 ts = arch[1].split('/')
205
206 factory = BuildFactory()
207
208 # find number of cores
209 factory.addStep(SetProperty(
210 name = "nproc",
211 property = "nproc",
212 description = "Finding number of CPUs",
213 command = ["nproc"]))
214
215 # prepare workspace
216 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
217
218 if not persistent:
219 factory.addStep(ShellCommand(
220 name = "cleanold",
221 description = "Cleaning previous builds",
222 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
223 haltOnFailure = True,
224 timeout = 2400))
225
226 factory.addStep(ShellCommand(
227 name = "cleanup",
228 description = "Cleaning work area",
229 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
230 haltOnFailure = True,
231 timeout = 2400))
232
233 factory.addStep(ShellCommand(
234 name = "mksdkdir",
235 description = "Preparing SDK directory",
236 command = ["mkdir", "-p", "sdk"],
237 haltOnFailure = True))
238
239 factory.addStep(ShellCommand(
240 name = "downloadsdk",
241 description = "Downloading SDK archive",
242 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
243 env={'RSYNC_PASSWORD': rsync_sdk_key},
244 haltOnFailure = True,
245 logEnviron = False))
246
247 factory.addStep(ShellCommand(
248 name = "unpacksdk",
249 description = "Unpacking SDK archive",
250 command = ["tar", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
251 haltOnFailure = True))
252
253 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
254 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
255
256 factory.addStep(ShellCommand(
257 name = "mkdldir",
258 description = "Preparing download directory",
259 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
260
261 factory.addStep(ShellCommand(
262 name = "mkconf",
263 description = "Preparing SDK configuration",
264 workdir = "build/sdk",
265 command = ["sh", "-c", "rm -f .config && make defconfig"]))
266
267 factory.addStep(ShellCommand(
268 name = "updatefeeds",
269 description = "Updating feeds",
270 workdir = "build/sdk",
271 command = ["./scripts/feeds", "update"]))
272
273 factory.addStep(ShellCommand(
274 name = "installfeeds",
275 description = "Installing feeds",
276 workdir = "build/sdk",
277 command = ["./scripts/feeds", "install", "-a"]))
278
279 factory.addStep(ShellCommand(
280 name = "compile",
281 description = "Building packages",
282 workdir = "build/sdk",
283 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "V=s", "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y"]))
284
285 if gpg_keyid is not None:
286 factory.addStep(MasterShellCommand(
287 name = "signprepare",
288 description = "Preparing temporary signing directory",
289 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
290 haltOnFailure = True
291 ))
292
293 factory.addStep(ShellCommand(
294 name = "signpack",
295 description = "Packing files to sign",
296 workdir = "build/sdk",
297 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
298 haltOnFailure = True
299 ))
300
301 factory.addStep(FileUpload(
302 slavesrc = "sdk/sign.tar.gz",
303 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
304 haltOnFailure = True
305 ))
306
307 factory.addStep(MasterShellCommand(
308 name = "signfiles",
309 description = "Signing files",
310 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
311 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
312 haltOnFailure = True
313 ))
314
315 factory.addStep(FileDownload(
316 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
317 slavedest = "sdk/sign.tar.gz",
318 haltOnFailure = True
319 ))
320
321 factory.addStep(ShellCommand(
322 name = "signunpack",
323 description = "Unpacking signed files",
324 workdir = "build/sdk",
325 command = ["tar", "-xzf", "sign.tar.gz"],
326 haltOnFailure = True
327 ))
328
329 factory.addStep(ShellCommand(
330 name = "uploadprepare",
331 description = "Preparing package directory",
332 workdir = "build/sdk",
333 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
334 env={'RSYNC_PASSWORD': rsync_bin_key},
335 haltOnFailure = True,
336 logEnviron = False
337 ))
338
339 factory.addStep(ShellCommand(
340 name = "packageupload",
341 description = "Uploading package files",
342 workdir = "build/sdk",
343 command = ["rsync", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
344 env={'RSYNC_PASSWORD': rsync_bin_key},
345 haltOnFailure = True,
346 logEnviron = False
347 ))
348
349 factory.addStep(ShellCommand(
350 name = "logprepare",
351 description = "Preparing log directory",
352 workdir = "build/sdk",
353 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
354 env={'RSYNC_PASSWORD': rsync_bin_key},
355 haltOnFailure = True,
356 logEnviron = False
357 ))
358
359 factory.addStep(ShellCommand(
360 name = "logfind",
361 description = "Finding failure logs",
362 workdir = "build/sdk/logs/package/feeds",
363 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
364 haltOnFailure = False
365 ))
366
367 factory.addStep(ShellCommand(
368 name = "logcollect",
369 description = "Collecting failure logs",
370 workdir = "build/sdk",
371 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
372 haltOnFailure = False
373 ))
374
375 factory.addStep(ShellCommand(
376 name = "logupload",
377 description = "Uploading failure logs",
378 workdir = "build/sdk",
379 command = ["rsync", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
380 env={'RSYNC_PASSWORD': rsync_bin_key},
381 haltOnFailure = False,
382 logEnviron = False
383 ))
384
385 if rsync_src_url is not None:
386 factory.addStep(ShellCommand(
387 name = "sourceupload",
388 description = "Uploading source archives",
389 workdir = "build/sdk",
390 command = ["rsync", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
391 env={'RSYNC_PASSWORD': rsync_src_key},
392 haltOnFailure = False,
393 logEnviron = False
394 ))
395
396 from buildbot.config import BuilderConfig
397
398 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
399
400
401 ####### STATUS arches
402
403 # 'status' is a list of Status arches. The results of each build will be
404 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
405 # including web pages, email senders, and IRC bots.
406
407 c['status'] = []
408
409 from buildbot.status import html
410 from buildbot.status.web import authz, auth
411
412 if ini.has_option("status", "bind"):
413 if ini.has_option("status", "user") and ini.has_option("status", "password"):
414 authz_cfg=authz.Authz(
415 # change any of these to True to enable; see the manual for more
416 # options
417 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
418 gracefulShutdown = 'auth',
419 forceBuild = 'auth', # use this to test your slave once it is set up
420 forceAllBuilds = 'auth',
421 pingBuilder = False,
422 stopBuild = 'auth',
423 stopAllBuilds = 'auth',
424 cancelPendingBuild = 'auth',
425 )
426 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
427 else:
428 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
429
430 ####### PROJECT IDENTITY
431
432 # the 'title' string will appear at the top of this buildbot
433 # installation's html.WebStatus home page (linked to the
434 # 'titleURL') and is embedded in the title of the waterfall HTML page.
435
436 c['title'] = ini.get("general", "title")
437 c['titleURL'] = ini.get("general", "title_url")
438
439 # the 'buildbotURL' string should point to the location where the buildbot's
440 # internal web server (usually the html.WebStatus page) is visible. This
441 # typically uses the port number set in the Waterfall 'status' entry, but
442 # with an externally-visible host name which the buildbot cannot figure out
443 # without some help.
444
445 c['buildbotURL'] = buildbot_url
446
447 ####### DB URL
448
449 c['db'] = {
450 # This specifies what database buildbot uses to store its state. You can leave
451 # this at its default for all but the largest installations.
452 'db_url' : "sqlite:///state.sqlite",
453 }