Remove unsupported logEnviron from master command calls
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31
32 if ini.has_option("general", "port"):
33 slave_port = ini.getint("general", "port")
34
35 c['slaves'] = []
36
37 for section in ini.sections():
38 if section.startswith("slave "):
39 if ini.has_option(section, "name") and ini.has_option(section, "password"):
40 name = ini.get(section, "name")
41 password = ini.get(section, "password")
42 max_builds = 1
43 if ini.has_option(section, "builds"):
44 max_builds = ini.getint(section, "builds")
45 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds))
46
47 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
48 # This must match the value configured into the buildslaves (with their
49 # --master option)
50 c['slavePortnum'] = slave_port
51
52 # coalesce builds
53 c['mergeRequests'] = True
54
55 ####### CHANGESOURCES
56
57 home_dir = os.path.abspath(ini.get("general", "homedir"))
58
59 rsync_bin_url = ini.get("rsync", "binary_url")
60 rsync_bin_key = ini.get("rsync", "binary_password")
61
62 rsync_src_url = None
63 rsync_src_key = None
64
65 if ini.has_option("rsync", "source_url"):
66 rsync_src_url = ini.get("rsync", "source_url")
67 rsync_src_key = ini.get("rsync", "source_password")
68
69 rsync_sdk_url = None
70 rsync_sdk_key = None
71 rsync_sdk_pat = "lede-sdk-*.tar.xz"
72
73 if ini.has_option("rsync", "sdk_url"):
74 rsync_sdk_url = ini.get("rsync", "sdk_url")
75
76 if ini.has_option("rsync", "sdk_password"):
77 rsync_sdk_key = ini.get("rsync", "sdk_password")
78
79 if ini.has_option("rsync", "sdk_pattern"):
80 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
81
82 gpg_home = "~/.gnupg"
83 gpg_keyid = None
84 gpg_comment = "Unattended build signature"
85 gpg_passfile = "/dev/null"
86
87 if ini.has_option("gpg", "home"):
88 gpg_home = ini.get("gpg", "home")
89
90 if ini.has_option("gpg", "keyid"):
91 gpg_keyid = ini.get("gpg", "keyid")
92
93 if ini.has_option("gpg", "comment"):
94 gpg_comment = ini.get("gpg", "comment")
95
96 if ini.has_option("gpg", "passfile"):
97 gpg_passfile = ini.get("gpg", "passfile")
98
99
100 # find arches
101 arches = [ ]
102 archnames = [ ]
103
104 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
105 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
106
107 while True:
108 line = findarches.stdout.readline()
109 if not line:
110 break
111 at = line.strip().split()
112 arches.append(at)
113 archnames.append(at[0])
114
115
116 # find feeds
117 feeds = []
118
119 from buildbot.changes.gitpoller import GitPoller
120 c['change_source'] = []
121
122 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
123 for line in f:
124 parts = line.strip().split()
125 if parts[0] == "src-git":
126 feeds.append(parts)
127 url = parts[2].strip().split(';')
128 branch = url[1] if len(url) > 1 else 'master'
129 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
130
131
132 ####### SCHEDULERS
133
134 # Configure the Schedulers, which decide how to react to incoming changes. In this
135 # case, just kick off a 'basebuild' build
136
137 from buildbot.schedulers.basic import SingleBranchScheduler
138 from buildbot.schedulers.forcesched import ForceScheduler
139 from buildbot.changes import filter
140 c['schedulers'] = []
141 c['schedulers'].append(SingleBranchScheduler(
142 name="all",
143 change_filter=filter.ChangeFilter(branch='master'),
144 treeStableTimer=60,
145 builderNames=archnames))
146
147 c['schedulers'].append(ForceScheduler(
148 name="force",
149 builderNames=archnames))
150
151 ####### BUILDERS
152
153 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
154 # what steps, and which slaves can execute them. Note that any particular build will
155 # only take place on one slave.
156
157 from buildbot.process.factory import BuildFactory
158 from buildbot.steps.source import Git
159 from buildbot.steps.shell import ShellCommand
160 from buildbot.steps.shell import SetProperty
161 from buildbot.steps.transfer import FileUpload
162 from buildbot.steps.transfer import FileDownload
163 from buildbot.steps.master import MasterShellCommand
164 from buildbot.process.properties import WithProperties
165
166 c['builders'] = []
167
168 dlLock = locks.SlaveLock("slave_dl")
169
170 slaveNames = [ ]
171
172 for slave in c['slaves']:
173 slaveNames.append(slave.slavename)
174
175 for arch in arches:
176 ts = arch[1].split('/')
177
178 factory = BuildFactory()
179
180 # find number of cores
181 factory.addStep(SetProperty(
182 name = "nproc",
183 property = "nproc",
184 description = "Finding number of CPUs",
185 command = ["nproc"]))
186
187 # prepare workspace
188 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
189
190 factory.addStep(ShellCommand(
191 name = "cleanold",
192 description = "Cleaning previous builds",
193 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
194 haltOnFailure = True,
195 timeout = 2400))
196
197 factory.addStep(ShellCommand(
198 name = "cleanup",
199 description = "Cleaning work area",
200 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
201 haltOnFailure = True,
202 timeout = 2400))
203
204 factory.addStep(ShellCommand(
205 name = "mksdkdir",
206 description = "Preparing SDK directory",
207 command = ["mkdir", "sdk"],
208 haltOnFailure = True))
209
210 factory.addStep(ShellCommand(
211 name = "downloadsdk",
212 description = "Downloading SDK archive",
213 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
214 env={'RSYNC_PASSWORD': rsync_sdk_key},
215 haltOnFailure = True,
216 logEnviron = False))
217
218 factory.addStep(ShellCommand(
219 name = "unpacksdk",
220 description = "Unpacking SDK archive",
221 command = ["tar", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
222 haltOnFailure = True))
223
224 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
225 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
226
227 factory.addStep(ShellCommand(
228 name = "mkdldir",
229 description = "Preparing download directory",
230 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
231
232 factory.addStep(ShellCommand(
233 name = "mkconf",
234 description = "Preparing SDK configuration",
235 workdir = "build/sdk",
236 command = ["sh", "-c", "rm -f .config && make defconfig"]))
237
238 factory.addStep(ShellCommand(
239 name = "updatefeeds",
240 description = "Updating feeds",
241 workdir = "build/sdk",
242 command = ["./scripts/feeds", "update"]))
243
244 factory.addStep(ShellCommand(
245 name = "installfeeds",
246 description = "Installing feeds",
247 workdir = "build/sdk",
248 command = ["./scripts/feeds", "install", "-a"]))
249
250 factory.addStep(ShellCommand(
251 name = "compile",
252 description = "Building packages",
253 workdir = "build/sdk",
254 command = ["make", WithProperties("-j%(nproc:~4)s"), "V=s", "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y"]))
255
256 if gpg_keyid is not None:
257 factory.addStep(MasterShellCommand(
258 name = "signprepare",
259 description = "Preparing temporary signing directory",
260 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
261 haltOnFailure = True
262 ))
263
264 factory.addStep(ShellCommand(
265 name = "signpack",
266 description = "Packing files to sign",
267 workdir = "build/sdk",
268 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
269 haltOnFailure = True
270 ))
271
272 factory.addStep(FileUpload(
273 slavesrc = "sdk/sign.tar.gz",
274 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
275 haltOnFailure = True
276 ))
277
278 factory.addStep(MasterShellCommand(
279 name = "signfiles",
280 description = "Signing files",
281 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
282 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
283 haltOnFailure = True
284 ))
285
286 factory.addStep(FileDownload(
287 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
288 slavedest = "sdk/sign.tar.gz",
289 haltOnFailure = True
290 ))
291
292 factory.addStep(ShellCommand(
293 name = "signunpack",
294 description = "Unpacking signed files",
295 workdir = "build/sdk",
296 command = ["tar", "-xzf", "sign.tar.gz"],
297 haltOnFailure = True
298 ))
299
300 factory.addStep(ShellCommand(
301 name = "uploadprepare",
302 description = "Preparing package directory",
303 workdir = "build/sdk",
304 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/packages/" %(rsync_bin_url)],
305 env={'RSYNC_PASSWORD': rsync_bin_key},
306 haltOnFailure = True,
307 logEnviron = False
308 ))
309
310 factory.addStep(ShellCommand(
311 name = "packageupload",
312 description = "Uploading package files",
313 workdir = "build/sdk",
314 command = ["rsync", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), "%s/packages/%s/" %(rsync_bin_url, arch[0])],
315 env={'RSYNC_PASSWORD': rsync_bin_key},
316 haltOnFailure = True,
317 logEnviron = False
318 ))
319
320 factory.addStep(ShellCommand(
321 name = "logprepare",
322 description = "Preparing log directory",
323 workdir = "build/sdk",
324 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
325 env={'RSYNC_PASSWORD': rsync_bin_key},
326 haltOnFailure = True,
327 logEnviron = False
328 ))
329
330 factory.addStep(ShellCommand(
331 name = "logfind",
332 description = "Finding failure logs",
333 workdir = "build/sdk/logs/package/feeds",
334 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
335 haltOnFailure = False
336 ))
337
338 factory.addStep(ShellCommand(
339 name = "logcollect",
340 description = "Collecting failure logs",
341 workdir = "build/sdk",
342 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
343 haltOnFailure = False
344 ))
345
346 factory.addStep(ShellCommand(
347 name = "logupload",
348 description = "Uploading failure logs",
349 workdir = "build/sdk",
350 command = ["rsync", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
351 env={'RSYNC_PASSWORD': rsync_bin_key},
352 haltOnFailure = False,
353 logEnviron = False
354 ))
355
356 if rsync_src_url is not None:
357 factory.addStep(ShellCommand(
358 name = "sourceupload",
359 description = "Uploading source archives",
360 workdir = "build/sdk",
361 command = ["rsync", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
362 env={'RSYNC_PASSWORD': rsync_src_key},
363 haltOnFailure = False,
364 logEnviron = False
365 ))
366
367 from buildbot.config import BuilderConfig
368
369 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
370
371
372 ####### STATUS arches
373
374 # 'status' is a list of Status arches. The results of each build will be
375 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
376 # including web pages, email senders, and IRC bots.
377
378 c['status'] = []
379
380 from buildbot.status import html
381 from buildbot.status.web import authz, auth
382
383 if ini.has_option("status", "bind"):
384 if ini.has_option("status", "user") and ini.has_option("status", "password"):
385 authz_cfg=authz.Authz(
386 # change any of these to True to enable; see the manual for more
387 # options
388 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
389 gracefulShutdown = 'auth',
390 forceBuild = 'auth', # use this to test your slave once it is set up
391 forceAllBuilds = 'auth',
392 pingBuilder = False,
393 stopBuild = 'auth',
394 stopAllBuilds = 'auth',
395 cancelPendingBuild = 'auth',
396 )
397 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
398 else:
399 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
400
401 ####### PROJECT IDENTITY
402
403 # the 'title' string will appear at the top of this buildbot
404 # installation's html.WebStatus home page (linked to the
405 # 'titleURL') and is embedded in the title of the waterfall HTML page.
406
407 c['title'] = ini.get("general", "title")
408 c['titleURL'] = ini.get("general", "title_url")
409
410 # the 'buildbotURL' string should point to the location where the buildbot's
411 # internal web server (usually the html.WebStatus page) is visible. This
412 # typically uses the port number set in the Waterfall 'status' entry, but
413 # with an externally-visible host name which the buildbot cannot figure out
414 # without some help.
415
416 c['buildbotURL'] = buildbot_url
417
418 ####### DB URL
419
420 c['db'] = {
421 # This specifies what database buildbot uses to store its state. You can leave
422 # this at its default for all but the largest installations.
423 'db_url' : "sqlite:///state.sqlite",
424 }