ce406449b2d212a42caabd2885c261d34491267f
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32 other_builds = 0
33
34 if ini.has_option("general", "port"):
35 slave_port = ini.getint("general", "port")
36
37 if ini.has_option("general", "persistent"):
38 persistent = ini.getboolean("general", "persistent")
39
40 if ini.has_option("general", "other_builds"):
41 other_builds = ini.getint("general", "other_builds")
42
43 c['slaves'] = []
44 max_builds = dict()
45
46 for section in ini.sections():
47 if section.startswith("slave "):
48 if ini.has_option(section, "name") and ini.has_option(section, "password"):
49 name = ini.get(section, "name")
50 password = ini.get(section, "password")
51 max_builds[name] = 1
52 if ini.has_option(section, "builds"):
53 max_builds[name] = ini.getint(section, "builds")
54 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
55
56 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
57 # This must match the value configured into the buildslaves (with their
58 # --master option)
59 c['slavePortnum'] = slave_port
60
61 # coalesce builds
62 c['mergeRequests'] = True
63
64 ####### CHANGESOURCES
65
66 home_dir = os.path.abspath(ini.get("general", "homedir"))
67
68 rsync_bin_url = ini.get("rsync", "binary_url")
69 rsync_bin_key = ini.get("rsync", "binary_password")
70
71 rsync_src_url = None
72 rsync_src_key = None
73
74 if ini.has_option("rsync", "source_url"):
75 rsync_src_url = ini.get("rsync", "source_url")
76 rsync_src_key = ini.get("rsync", "source_password")
77
78 rsync_sdk_url = None
79 rsync_sdk_key = None
80 rsync_sdk_pat = "lede-sdk-*.tar.xz"
81
82 if ini.has_option("rsync", "sdk_url"):
83 rsync_sdk_url = ini.get("rsync", "sdk_url")
84
85 if ini.has_option("rsync", "sdk_password"):
86 rsync_sdk_key = ini.get("rsync", "sdk_password")
87
88 if ini.has_option("rsync", "sdk_pattern"):
89 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
90
91 gpg_home = "~/.gnupg"
92 gpg_keyid = None
93 gpg_comment = "Unattended build signature"
94 gpg_passfile = "/dev/null"
95
96 if ini.has_option("gpg", "home"):
97 gpg_home = ini.get("gpg", "home")
98
99 if ini.has_option("gpg", "keyid"):
100 gpg_keyid = ini.get("gpg", "keyid")
101
102 if ini.has_option("gpg", "comment"):
103 gpg_comment = ini.get("gpg", "comment")
104
105 if ini.has_option("gpg", "passfile"):
106 gpg_passfile = ini.get("gpg", "passfile")
107
108
109 # find arches
110 arches = [ ]
111 archnames = [ ]
112
113 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
114 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
115
116 while True:
117 line = findarches.stdout.readline()
118 if not line:
119 break
120 at = line.strip().split()
121 arches.append(at)
122 archnames.append(at[0])
123
124
125 # find feeds
126 feeds = []
127 feedbranches = dict()
128
129 from buildbot.changes.gitpoller import GitPoller
130 c['change_source'] = []
131
132 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
133 for line in f:
134 parts = line.strip().split()
135 if parts[0] == "src-git":
136 feeds.append(parts)
137 url = parts[2].strip().split(';')
138 branch = url[1] if len(url) > 1 else 'master'
139 feedbranches[url[0]] = branch
140 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
141
142
143 ####### SCHEDULERS
144
145 # Configure the Schedulers, which decide how to react to incoming changes. In this
146 # case, just kick off a 'basebuild' build
147
148 def branch_change_filter(change):
149 return change.branch == feedbranches[change.repository]
150
151 from buildbot.schedulers.basic import SingleBranchScheduler
152 from buildbot.schedulers.forcesched import ForceScheduler
153 from buildbot.changes import filter
154 c['schedulers'] = []
155 c['schedulers'].append(SingleBranchScheduler(
156 name="all",
157 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
158 treeStableTimer=60,
159 builderNames=archnames))
160
161 c['schedulers'].append(ForceScheduler(
162 name="force",
163 builderNames=archnames))
164
165 ####### BUILDERS
166
167 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
168 # what steps, and which slaves can execute them. Note that any particular build will
169 # only take place on one slave.
170
171 from buildbot.process.factory import BuildFactory
172 from buildbot.steps.source import Git
173 from buildbot.steps.shell import ShellCommand
174 from buildbot.steps.shell import SetProperty
175 from buildbot.steps.transfer import FileUpload
176 from buildbot.steps.transfer import FileDownload
177 from buildbot.steps.master import MasterShellCommand
178 from buildbot.process.properties import WithProperties
179
180
181 def GetDirectorySuffix(props):
182 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
183 return "-%s" % props["slavename"].split('-')[1]
184 else:
185 return ""
186
187 def GetNumJobs(props):
188 if props.hasProperty("slavename") and props.hasProperty("nproc"):
189 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
190 else:
191 return 1
192
193
194 c['builders'] = []
195
196 dlLock = locks.SlaveLock("slave_dl")
197
198 slaveNames = [ ]
199
200 for slave in c['slaves']:
201 slaveNames.append(slave.slavename)
202
203 for arch in arches:
204 ts = arch[1].split('/')
205
206 factory = BuildFactory()
207
208 # find number of cores
209 factory.addStep(SetProperty(
210 name = "nproc",
211 property = "nproc",
212 description = "Finding number of CPUs",
213 command = ["nproc"]))
214
215 # prepare workspace
216 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
217
218 if not persistent:
219 factory.addStep(ShellCommand(
220 name = "cleanold",
221 description = "Cleaning previous builds",
222 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
223 haltOnFailure = True,
224 timeout = 2400))
225
226 factory.addStep(ShellCommand(
227 name = "cleanup",
228 description = "Cleaning work area",
229 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
230 haltOnFailure = True,
231 timeout = 2400))
232
233 factory.addStep(ShellCommand(
234 name = "mksdkdir",
235 description = "Preparing SDK directory",
236 command = ["mkdir", "-p", "sdk"],
237 haltOnFailure = True))
238
239 factory.addStep(ShellCommand(
240 name = "downloadsdk",
241 description = "Downloading SDK archive",
242 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
243 env={'RSYNC_PASSWORD': rsync_sdk_key},
244 haltOnFailure = True,
245 logEnviron = False))
246
247 factory.addStep(ShellCommand(
248 name = "unpacksdk",
249 description = "Unpacking SDK archive",
250 command = ["tar", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
251 haltOnFailure = True))
252
253 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
254 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
255
256 factory.addStep(ShellCommand(
257 name = "mkdldir",
258 description = "Preparing download directory",
259 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
260
261 factory.addStep(ShellCommand(
262 name = "mkconf",
263 description = "Preparing SDK configuration",
264 workdir = "build/sdk",
265 command = ["sh", "-c", "rm -f .config && make defconfig"]))
266
267 factory.addStep(ShellCommand(
268 name = "updatefeeds",
269 description = "Updating feeds",
270 workdir = "build/sdk",
271 command = ["./scripts/feeds", "update"]))
272
273 factory.addStep(ShellCommand(
274 name = "installfeeds",
275 description = "Installing feeds",
276 workdir = "build/sdk",
277 command = ["./scripts/feeds", "install", "-a"]))
278
279 factory.addStep(ShellCommand(
280 name = "compile",
281 description = "Building packages",
282 workdir = "build/sdk",
283 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "V=s", "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y"]))
284
285 factory.addStep(ShellCommand(
286 name = "mkfeedsconf",
287 description = "Generating pinned feeds.conf",
288 workdir = "build/sdk",
289 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
290
291 if gpg_keyid is not None:
292 factory.addStep(MasterShellCommand(
293 name = "signprepare",
294 description = "Preparing temporary signing directory",
295 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
296 haltOnFailure = True
297 ))
298
299 factory.addStep(ShellCommand(
300 name = "signpack",
301 description = "Packing files to sign",
302 workdir = "build/sdk",
303 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
304 haltOnFailure = True
305 ))
306
307 factory.addStep(FileUpload(
308 slavesrc = "sdk/sign.tar.gz",
309 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
310 haltOnFailure = True
311 ))
312
313 factory.addStep(MasterShellCommand(
314 name = "signfiles",
315 description = "Signing files",
316 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
317 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
318 haltOnFailure = True
319 ))
320
321 factory.addStep(FileDownload(
322 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
323 slavedest = "sdk/sign.tar.gz",
324 haltOnFailure = True
325 ))
326
327 factory.addStep(ShellCommand(
328 name = "signunpack",
329 description = "Unpacking signed files",
330 workdir = "build/sdk",
331 command = ["tar", "-xzf", "sign.tar.gz"],
332 haltOnFailure = True
333 ))
334
335 factory.addStep(ShellCommand(
336 name = "uploadprepare",
337 description = "Preparing package directory",
338 workdir = "build/sdk",
339 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
340 env={'RSYNC_PASSWORD': rsync_bin_key},
341 haltOnFailure = True,
342 logEnviron = False
343 ))
344
345 factory.addStep(ShellCommand(
346 name = "packageupload",
347 description = "Uploading package files",
348 workdir = "build/sdk",
349 command = ["rsync", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
350 env={'RSYNC_PASSWORD': rsync_bin_key},
351 haltOnFailure = True,
352 logEnviron = False
353 ))
354
355 factory.addStep(ShellCommand(
356 name = "logprepare",
357 description = "Preparing log directory",
358 workdir = "build/sdk",
359 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
360 env={'RSYNC_PASSWORD': rsync_bin_key},
361 haltOnFailure = True,
362 logEnviron = False
363 ))
364
365 factory.addStep(ShellCommand(
366 name = "logfind",
367 description = "Finding failure logs",
368 workdir = "build/sdk/logs/package/feeds",
369 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
370 haltOnFailure = False
371 ))
372
373 factory.addStep(ShellCommand(
374 name = "logcollect",
375 description = "Collecting failure logs",
376 workdir = "build/sdk",
377 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
378 haltOnFailure = False
379 ))
380
381 factory.addStep(ShellCommand(
382 name = "logupload",
383 description = "Uploading failure logs",
384 workdir = "build/sdk",
385 command = ["rsync", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
386 env={'RSYNC_PASSWORD': rsync_bin_key},
387 haltOnFailure = False,
388 logEnviron = False
389 ))
390
391 if rsync_src_url is not None:
392 factory.addStep(ShellCommand(
393 name = "sourceupload",
394 description = "Uploading source archives",
395 workdir = "build/sdk",
396 command = ["rsync", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
397 env={'RSYNC_PASSWORD': rsync_src_key},
398 haltOnFailure = False,
399 logEnviron = False
400 ))
401
402 from buildbot.config import BuilderConfig
403
404 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
405
406
407 ####### STATUS arches
408
409 # 'status' is a list of Status arches. The results of each build will be
410 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
411 # including web pages, email senders, and IRC bots.
412
413 c['status'] = []
414
415 from buildbot.status import html
416 from buildbot.status.web import authz, auth
417
418 if ini.has_option("status", "bind"):
419 if ini.has_option("status", "user") and ini.has_option("status", "password"):
420 authz_cfg=authz.Authz(
421 # change any of these to True to enable; see the manual for more
422 # options
423 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
424 gracefulShutdown = 'auth',
425 forceBuild = 'auth', # use this to test your slave once it is set up
426 forceAllBuilds = 'auth',
427 pingBuilder = False,
428 stopBuild = 'auth',
429 stopAllBuilds = 'auth',
430 cancelPendingBuild = 'auth',
431 )
432 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
433 else:
434 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
435
436 ####### PROJECT IDENTITY
437
438 # the 'title' string will appear at the top of this buildbot
439 # installation's html.WebStatus home page (linked to the
440 # 'titleURL') and is embedded in the title of the waterfall HTML page.
441
442 c['title'] = ini.get("general", "title")
443 c['titleURL'] = ini.get("general", "title_url")
444
445 # the 'buildbotURL' string should point to the location where the buildbot's
446 # internal web server (usually the html.WebStatus page) is visible. This
447 # typically uses the port number set in the Waterfall 'status' entry, but
448 # with an externally-visible host name which the buildbot cannot figure out
449 # without some help.
450
451 c['buildbotURL'] = buildbot_url
452
453 ####### DB URL
454
455 c['db'] = {
456 # This specifies what database buildbot uses to store its state. You can leave
457 # this at its default for all but the largest installations.
458 'db_url' : "sqlite:///state.sqlite",
459 }