phase2: adjust change filter
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32
33 if ini.has_option("general", "port"):
34 slave_port = ini.getint("general", "port")
35
36 if ini.has_option("general", "persistent"):
37 persistent = ini.getboolean("general", "persistent")
38
39 c['slaves'] = []
40
41 for section in ini.sections():
42 if section.startswith("slave "):
43 if ini.has_option(section, "name") and ini.has_option(section, "password"):
44 name = ini.get(section, "name")
45 password = ini.get(section, "password")
46 max_builds = 1
47 if ini.has_option(section, "builds"):
48 max_builds = ini.getint(section, "builds")
49 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds))
50
51 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
52 # This must match the value configured into the buildslaves (with their
53 # --master option)
54 c['slavePortnum'] = slave_port
55
56 # coalesce builds
57 c['mergeRequests'] = True
58
59 ####### CHANGESOURCES
60
61 home_dir = os.path.abspath(ini.get("general", "homedir"))
62
63 rsync_bin_url = ini.get("rsync", "binary_url")
64 rsync_bin_key = ini.get("rsync", "binary_password")
65
66 rsync_src_url = None
67 rsync_src_key = None
68
69 if ini.has_option("rsync", "source_url"):
70 rsync_src_url = ini.get("rsync", "source_url")
71 rsync_src_key = ini.get("rsync", "source_password")
72
73 rsync_sdk_url = None
74 rsync_sdk_key = None
75 rsync_sdk_pat = "lede-sdk-*.tar.xz"
76
77 if ini.has_option("rsync", "sdk_url"):
78 rsync_sdk_url = ini.get("rsync", "sdk_url")
79
80 if ini.has_option("rsync", "sdk_password"):
81 rsync_sdk_key = ini.get("rsync", "sdk_password")
82
83 if ini.has_option("rsync", "sdk_pattern"):
84 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
85
86 gpg_home = "~/.gnupg"
87 gpg_keyid = None
88 gpg_comment = "Unattended build signature"
89 gpg_passfile = "/dev/null"
90
91 if ini.has_option("gpg", "home"):
92 gpg_home = ini.get("gpg", "home")
93
94 if ini.has_option("gpg", "keyid"):
95 gpg_keyid = ini.get("gpg", "keyid")
96
97 if ini.has_option("gpg", "comment"):
98 gpg_comment = ini.get("gpg", "comment")
99
100 if ini.has_option("gpg", "passfile"):
101 gpg_passfile = ini.get("gpg", "passfile")
102
103
104 # find arches
105 arches = [ ]
106 archnames = [ ]
107
108 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
109 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
110
111 while True:
112 line = findarches.stdout.readline()
113 if not line:
114 break
115 at = line.strip().split()
116 arches.append(at)
117 archnames.append(at[0])
118
119
120 # find feeds
121 feeds = []
122 feedbranches = dict()
123
124 from buildbot.changes.gitpoller import GitPoller
125 c['change_source'] = []
126
127 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
128 for line in f:
129 parts = line.strip().split()
130 if parts[0] == "src-git":
131 feeds.append(parts)
132 url = parts[2].strip().split(';')
133 branch = url[1] if len(url) > 1 else 'master'
134 feedbranches[url[0]] = branch
135 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
136
137
138 ####### SCHEDULERS
139
140 # Configure the Schedulers, which decide how to react to incoming changes. In this
141 # case, just kick off a 'basebuild' build
142
143 def branch_change_filter(change):
144 return change.branch == feedbranches[change.repository]
145
146 from buildbot.schedulers.basic import SingleBranchScheduler
147 from buildbot.schedulers.forcesched import ForceScheduler
148 from buildbot.changes import filter
149 c['schedulers'] = []
150 c['schedulers'].append(SingleBranchScheduler(
151 name="all",
152 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
153 treeStableTimer=60,
154 builderNames=archnames))
155
156 c['schedulers'].append(ForceScheduler(
157 name="force",
158 builderNames=archnames))
159
160 ####### BUILDERS
161
162 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
163 # what steps, and which slaves can execute them. Note that any particular build will
164 # only take place on one slave.
165
166 from buildbot.process.factory import BuildFactory
167 from buildbot.steps.source import Git
168 from buildbot.steps.shell import ShellCommand
169 from buildbot.steps.shell import SetProperty
170 from buildbot.steps.transfer import FileUpload
171 from buildbot.steps.transfer import FileDownload
172 from buildbot.steps.master import MasterShellCommand
173 from buildbot.process.properties import WithProperties
174
175
176 def GetDirectorySuffix(props):
177 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
178 return "-%s" % props["slavename"].split('-')[1]
179 else:
180 return ""
181
182
183 c['builders'] = []
184
185 dlLock = locks.SlaveLock("slave_dl")
186
187 slaveNames = [ ]
188
189 for slave in c['slaves']:
190 slaveNames.append(slave.slavename)
191
192 for arch in arches:
193 ts = arch[1].split('/')
194
195 factory = BuildFactory()
196
197 # find number of cores
198 factory.addStep(SetProperty(
199 name = "nproc",
200 property = "nproc",
201 description = "Finding number of CPUs",
202 command = ["nproc"]))
203
204 # prepare workspace
205 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
206
207 if not persistent:
208 factory.addStep(ShellCommand(
209 name = "cleanold",
210 description = "Cleaning previous builds",
211 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
212 haltOnFailure = True,
213 timeout = 2400))
214
215 factory.addStep(ShellCommand(
216 name = "cleanup",
217 description = "Cleaning work area",
218 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
219 haltOnFailure = True,
220 timeout = 2400))
221
222 factory.addStep(ShellCommand(
223 name = "mksdkdir",
224 description = "Preparing SDK directory",
225 command = ["mkdir", "-p", "sdk"],
226 haltOnFailure = True))
227
228 factory.addStep(ShellCommand(
229 name = "downloadsdk",
230 description = "Downloading SDK archive",
231 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
232 env={'RSYNC_PASSWORD': rsync_sdk_key},
233 haltOnFailure = True,
234 logEnviron = False))
235
236 factory.addStep(ShellCommand(
237 name = "unpacksdk",
238 description = "Unpacking SDK archive",
239 command = ["tar", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
240 haltOnFailure = True))
241
242 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
243 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
244
245 factory.addStep(ShellCommand(
246 name = "mkdldir",
247 description = "Preparing download directory",
248 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
249
250 factory.addStep(ShellCommand(
251 name = "mkconf",
252 description = "Preparing SDK configuration",
253 workdir = "build/sdk",
254 command = ["sh", "-c", "rm -f .config && make defconfig"]))
255
256 factory.addStep(ShellCommand(
257 name = "updatefeeds",
258 description = "Updating feeds",
259 workdir = "build/sdk",
260 command = ["./scripts/feeds", "update"]))
261
262 factory.addStep(ShellCommand(
263 name = "installfeeds",
264 description = "Installing feeds",
265 workdir = "build/sdk",
266 command = ["./scripts/feeds", "install", "-a"]))
267
268 factory.addStep(ShellCommand(
269 name = "compile",
270 description = "Building packages",
271 workdir = "build/sdk",
272 command = ["make", WithProperties("-j%(nproc:~4)s"), "V=s", "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y"]))
273
274 if gpg_keyid is not None:
275 factory.addStep(MasterShellCommand(
276 name = "signprepare",
277 description = "Preparing temporary signing directory",
278 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
279 haltOnFailure = True
280 ))
281
282 factory.addStep(ShellCommand(
283 name = "signpack",
284 description = "Packing files to sign",
285 workdir = "build/sdk",
286 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
287 haltOnFailure = True
288 ))
289
290 factory.addStep(FileUpload(
291 slavesrc = "sdk/sign.tar.gz",
292 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
293 haltOnFailure = True
294 ))
295
296 factory.addStep(MasterShellCommand(
297 name = "signfiles",
298 description = "Signing files",
299 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
300 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
301 haltOnFailure = True
302 ))
303
304 factory.addStep(FileDownload(
305 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
306 slavedest = "sdk/sign.tar.gz",
307 haltOnFailure = True
308 ))
309
310 factory.addStep(ShellCommand(
311 name = "signunpack",
312 description = "Unpacking signed files",
313 workdir = "build/sdk",
314 command = ["tar", "-xzf", "sign.tar.gz"],
315 haltOnFailure = True
316 ))
317
318 factory.addStep(ShellCommand(
319 name = "uploadprepare",
320 description = "Preparing package directory",
321 workdir = "build/sdk",
322 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
323 env={'RSYNC_PASSWORD': rsync_bin_key},
324 haltOnFailure = True,
325 logEnviron = False
326 ))
327
328 factory.addStep(ShellCommand(
329 name = "packageupload",
330 description = "Uploading package files",
331 workdir = "build/sdk",
332 command = ["rsync", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
333 env={'RSYNC_PASSWORD': rsync_bin_key},
334 haltOnFailure = True,
335 logEnviron = False
336 ))
337
338 factory.addStep(ShellCommand(
339 name = "logprepare",
340 description = "Preparing log directory",
341 workdir = "build/sdk",
342 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
343 env={'RSYNC_PASSWORD': rsync_bin_key},
344 haltOnFailure = True,
345 logEnviron = False
346 ))
347
348 factory.addStep(ShellCommand(
349 name = "logfind",
350 description = "Finding failure logs",
351 workdir = "build/sdk/logs/package/feeds",
352 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
353 haltOnFailure = False
354 ))
355
356 factory.addStep(ShellCommand(
357 name = "logcollect",
358 description = "Collecting failure logs",
359 workdir = "build/sdk",
360 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
361 haltOnFailure = False
362 ))
363
364 factory.addStep(ShellCommand(
365 name = "logupload",
366 description = "Uploading failure logs",
367 workdir = "build/sdk",
368 command = ["rsync", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
369 env={'RSYNC_PASSWORD': rsync_bin_key},
370 haltOnFailure = False,
371 logEnviron = False
372 ))
373
374 if rsync_src_url is not None:
375 factory.addStep(ShellCommand(
376 name = "sourceupload",
377 description = "Uploading source archives",
378 workdir = "build/sdk",
379 command = ["rsync", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
380 env={'RSYNC_PASSWORD': rsync_src_key},
381 haltOnFailure = False,
382 logEnviron = False
383 ))
384
385 from buildbot.config import BuilderConfig
386
387 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
388
389
390 ####### STATUS arches
391
392 # 'status' is a list of Status arches. The results of each build will be
393 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
394 # including web pages, email senders, and IRC bots.
395
396 c['status'] = []
397
398 from buildbot.status import html
399 from buildbot.status.web import authz, auth
400
401 if ini.has_option("status", "bind"):
402 if ini.has_option("status", "user") and ini.has_option("status", "password"):
403 authz_cfg=authz.Authz(
404 # change any of these to True to enable; see the manual for more
405 # options
406 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
407 gracefulShutdown = 'auth',
408 forceBuild = 'auth', # use this to test your slave once it is set up
409 forceAllBuilds = 'auth',
410 pingBuilder = False,
411 stopBuild = 'auth',
412 stopAllBuilds = 'auth',
413 cancelPendingBuild = 'auth',
414 )
415 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
416 else:
417 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
418
419 ####### PROJECT IDENTITY
420
421 # the 'title' string will appear at the top of this buildbot
422 # installation's html.WebStatus home page (linked to the
423 # 'titleURL') and is embedded in the title of the waterfall HTML page.
424
425 c['title'] = ini.get("general", "title")
426 c['titleURL'] = ini.get("general", "title_url")
427
428 # the 'buildbotURL' string should point to the location where the buildbot's
429 # internal web server (usually the html.WebStatus page) is visible. This
430 # typically uses the port number set in the Waterfall 'status' entry, but
431 # with an externally-visible host name which the buildbot cannot figure out
432 # without some help.
433
434 c['buildbotURL'] = buildbot_url
435
436 ####### DB URL
437
438 c['db'] = {
439 # This specifies what database buildbot uses to store its state. You can leave
440 # this at its default for all but the largest installations.
441 'db_url' : "sqlite:///state.sqlite",
442 }