phase2: add version suffix to faillogs directory
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32 other_builds = 0
33 tree_expire = 0
34 git_ssh = False
35
36 if ini.has_option("general", "port"):
37 slave_port = ini.getint("general", "port")
38
39 if ini.has_option("general", "persistent"):
40 persistent = ini.getboolean("general", "persistent")
41
42 if ini.has_option("general", "other_builds"):
43 other_builds = ini.getint("general", "other_builds")
44
45 if ini.has_option("general", "expire"):
46 tree_expire = ini.getint("general", "expire")
47
48 if ini.has_option("general", "git_ssh"):
49 git_ssh = ini.getboolean("general", "git_ssh")
50
51 c['slaves'] = []
52 max_builds = dict()
53
54 for section in ini.sections():
55 if section.startswith("slave "):
56 if ini.has_option(section, "name") and ini.has_option(section, "password"):
57 name = ini.get(section, "name")
58 password = ini.get(section, "password")
59 max_builds[name] = 1
60 if ini.has_option(section, "builds"):
61 max_builds[name] = ini.getint(section, "builds")
62 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
63
64 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
65 # This must match the value configured into the buildslaves (with their
66 # --master option)
67 c['slavePortnum'] = slave_port
68
69 # coalesce builds
70 c['mergeRequests'] = True
71
72 # Reduce amount of backlog data
73 c['buildHorizon'] = 30
74 c['logHorizon'] = 20
75
76 ####### CHANGESOURCES
77
78 home_dir = os.path.abspath(ini.get("general", "homedir"))
79
80 rsync_bin_url = ini.get("rsync", "binary_url")
81 rsync_bin_key = ini.get("rsync", "binary_password")
82
83 rsync_src_url = None
84 rsync_src_key = None
85
86 if ini.has_option("rsync", "source_url"):
87 rsync_src_url = ini.get("rsync", "source_url")
88 rsync_src_key = ini.get("rsync", "source_password")
89
90 rsync_sdk_url = None
91 rsync_sdk_key = None
92 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
93
94 if ini.has_option("rsync", "sdk_url"):
95 rsync_sdk_url = ini.get("rsync", "sdk_url")
96
97 if ini.has_option("rsync", "sdk_password"):
98 rsync_sdk_key = ini.get("rsync", "sdk_password")
99
100 if ini.has_option("rsync", "sdk_pattern"):
101 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
102
103 gpg_home = "~/.gnupg"
104 gpg_keyid = None
105 gpg_comment = "Unattended build signature"
106 gpg_passfile = "/dev/null"
107
108 if ini.has_option("gpg", "home"):
109 gpg_home = ini.get("gpg", "home")
110
111 if ini.has_option("gpg", "keyid"):
112 gpg_keyid = ini.get("gpg", "keyid")
113
114 if ini.has_option("gpg", "comment"):
115 gpg_comment = ini.get("gpg", "comment")
116
117 if ini.has_option("gpg", "passfile"):
118 gpg_passfile = ini.get("gpg", "passfile")
119
120
121 # find arches
122 arches = [ ]
123 archnames = [ ]
124
125 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
126 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
127
128 while True:
129 line = findarches.stdout.readline()
130 if not line:
131 break
132 at = line.strip().split()
133 arches.append(at)
134 archnames.append(at[0])
135
136
137 # find feeds
138 feeds = []
139 feedbranches = dict()
140
141 from buildbot.changes.gitpoller import GitPoller
142 c['change_source'] = []
143
144 def parse_feed_entry(line):
145 parts = line.strip().split()
146 if parts[0] == "src-git":
147 feeds.append(parts)
148 url = parts[2].strip().split(';')
149 branch = url[1] if len(url) > 1 else 'master'
150 feedbranches[url[0]] = branch
151 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
152
153 make = subprocess.Popen(['make', '--no-print-directory', '-C', home_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
154 env = dict(os.environ, TOPDIR=home_dir+'/source.git'), stdout = subprocess.PIPE)
155
156 line = make.stdout.readline()
157 if line:
158 parse_feed_entry(line)
159
160 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
161 for line in f:
162 parse_feed_entry(line)
163
164
165 ####### SCHEDULERS
166
167 # Configure the Schedulers, which decide how to react to incoming changes. In this
168 # case, just kick off a 'basebuild' build
169
170 def branch_change_filter(change):
171 return change.branch == feedbranches[change.repository]
172
173 from buildbot.schedulers.basic import SingleBranchScheduler
174 from buildbot.schedulers.forcesched import ForceScheduler
175 from buildbot.changes import filter
176 c['schedulers'] = []
177 c['schedulers'].append(SingleBranchScheduler(
178 name="all",
179 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
180 treeStableTimer=60,
181 builderNames=archnames))
182
183 c['schedulers'].append(ForceScheduler(
184 name="force",
185 builderNames=archnames))
186
187 ####### BUILDERS
188
189 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
190 # what steps, and which slaves can execute them. Note that any particular build will
191 # only take place on one slave.
192
193 from buildbot.process.factory import BuildFactory
194 from buildbot.steps.source import Git
195 from buildbot.steps.shell import ShellCommand
196 from buildbot.steps.shell import SetProperty
197 from buildbot.steps.transfer import FileUpload
198 from buildbot.steps.transfer import FileDownload
199 from buildbot.steps.transfer import StringDownload
200 from buildbot.steps.master import MasterShellCommand
201 from buildbot.process.properties import WithProperties
202
203
204 def GetDirectorySuffix(props):
205 verpat = re.compile('^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
206 if props.hasProperty("release_version"):
207 m = verpat.match(props["release_version"])
208 if m is not None:
209 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
210 return ""
211
212 def GetNumJobs(props):
213 if props.hasProperty("slavename") and props.hasProperty("nproc"):
214 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
215 else:
216 return 1
217
218 def GetCwd(props):
219 if props.hasProperty("builddir"):
220 return props["builddir"]
221 elif props.hasProperty("workdir"):
222 return props["workdir"]
223 else:
224 return "/"
225
226
227 c['builders'] = []
228
229 dlLock = locks.SlaveLock("slave_dl")
230
231 slaveNames = [ ]
232
233 for slave in c['slaves']:
234 slaveNames.append(slave.slavename)
235
236 for arch in arches:
237 ts = arch[1].split('/')
238
239 factory = BuildFactory()
240
241 # find number of cores
242 factory.addStep(SetProperty(
243 name = "nproc",
244 property = "nproc",
245 description = "Finding number of CPUs",
246 command = ["nproc"]))
247
248 # prepare workspace
249 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
250
251 if not persistent:
252 factory.addStep(ShellCommand(
253 name = "cleanold",
254 description = "Cleaning previous builds",
255 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
256 haltOnFailure = True,
257 timeout = 2400))
258
259 factory.addStep(ShellCommand(
260 name = "cleanup",
261 description = "Cleaning work area",
262 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
263 haltOnFailure = True,
264 timeout = 2400))
265
266 # expire tree if needed
267 elif tree_expire > 0:
268 factory.addStep(FileDownload(
269 mastersrc = home_dir+"/expire.sh",
270 slavedest = "../expire.sh",
271 mode = 0755))
272
273 factory.addStep(ShellCommand(
274 name = "expire",
275 description = "Checking for build tree expiry",
276 command = ["./expire.sh", str(tree_expire)],
277 workdir = ".",
278 haltOnFailure = True,
279 timeout = 2400))
280
281 factory.addStep(ShellCommand(
282 name = "mksdkdir",
283 description = "Preparing SDK directory",
284 command = ["mkdir", "-p", "sdk"],
285 haltOnFailure = True))
286
287 factory.addStep(ShellCommand(
288 name = "downloadsdk",
289 description = "Downloading SDK archive",
290 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
291 env={'RSYNC_PASSWORD': rsync_sdk_key},
292 haltOnFailure = True,
293 logEnviron = False))
294
295 factory.addStep(ShellCommand(
296 name = "unpacksdk",
297 description = "Unpacking SDK archive",
298 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
299 haltOnFailure = True))
300
301 factory.addStep(ShellCommand(
302 name = "updatesdk",
303 description = "Updating SDK",
304 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
305 haltOnFailure = True))
306
307 factory.addStep(StringDownload(
308 name = "writeversionmk",
309 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
310 slavedest = "sdk/getversion.mk",
311 mode = 0755))
312
313 factory.addStep(SetProperty(
314 name = "getversion",
315 property = "release_version",
316 description = "Finding SDK release version",
317 workdir = "build/sdk",
318 command = ["make", "-f", "getversion.mk"]))
319
320 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
321 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
322
323 factory.addStep(ShellCommand(
324 name = "mkdldir",
325 description = "Preparing download directory",
326 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
327 haltOnFailure = True))
328
329 factory.addStep(ShellCommand(
330 name = "mkconf",
331 description = "Preparing SDK configuration",
332 workdir = "build/sdk",
333 command = ["sh", "-c", "rm -f .config && make defconfig"]))
334
335 factory.addStep(FileDownload(
336 mastersrc = home_dir+'/ccache.sh',
337 slavedest = 'sdk/ccache.sh',
338 mode = 0755))
339
340 factory.addStep(ShellCommand(
341 name = "prepccache",
342 description = "Preparing ccache",
343 workdir = "build/sdk",
344 command = ["./ccache.sh"],
345 haltOnFailure = True))
346
347 if git_ssh:
348 factory.addStep(FileDownload(
349 name = "dlgitclonekey",
350 mastersrc = home_dir+"/git-clone.key",
351 slavedest = "../git-clone.key",
352 mode = 0600))
353
354 factory.addStep(ShellCommand(
355 name = "patchfeedsconf",
356 description = "Patching feeds.conf",
357 workdir = "build/sdk",
358 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
359 haltOnFailure = True))
360
361 factory.addStep(ShellCommand(
362 name = "updatefeeds",
363 description = "Updating feeds",
364 workdir = "build/sdk",
365 command = ["./scripts/feeds", "update", "-f"],
366 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
367 haltOnFailure = True))
368
369 if git_ssh:
370 factory.addStep(ShellCommand(
371 name = "rmfeedsconf",
372 description = "Removing feeds.conf",
373 workdir = "build/sdk",
374 command=["rm", "feeds.conf"],
375 haltOnFailure = True))
376
377 factory.addStep(ShellCommand(
378 name = "installfeeds",
379 description = "Installing feeds",
380 workdir = "build/sdk",
381 command = ["./scripts/feeds", "install", "-a"],
382 haltOnFailure = True))
383
384 factory.addStep(ShellCommand(
385 name = "logclear",
386 description = "Clearing failure logs",
387 workdir = "build/sdk",
388 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
389 haltOnFailure = False
390 ))
391
392 factory.addStep(ShellCommand(
393 name = "compile",
394 description = "Building packages",
395 workdir = "build/sdk",
396 timeout = 3600,
397 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"],
398 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
399 haltOnFailure = True))
400
401 factory.addStep(ShellCommand(
402 name = "mkfeedsconf",
403 description = "Generating pinned feeds.conf",
404 workdir = "build/sdk",
405 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
406
407 if gpg_keyid is not None:
408 factory.addStep(MasterShellCommand(
409 name = "signprepare",
410 description = "Preparing temporary signing directory",
411 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
412 haltOnFailure = True
413 ))
414
415 factory.addStep(ShellCommand(
416 name = "signpack",
417 description = "Packing files to sign",
418 workdir = "build/sdk",
419 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
420 haltOnFailure = True
421 ))
422
423 factory.addStep(FileUpload(
424 slavesrc = "sdk/sign.tar.gz",
425 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
426 haltOnFailure = True
427 ))
428
429 factory.addStep(MasterShellCommand(
430 name = "signfiles",
431 description = "Signing files",
432 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
433 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
434 haltOnFailure = True
435 ))
436
437 factory.addStep(FileDownload(
438 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
439 slavedest = "sdk/sign.tar.gz",
440 haltOnFailure = True
441 ))
442
443 factory.addStep(ShellCommand(
444 name = "signunpack",
445 description = "Unpacking signed files",
446 workdir = "build/sdk",
447 command = ["tar", "-xzf", "sign.tar.gz"],
448 haltOnFailure = True
449 ))
450
451 factory.addStep(ShellCommand(
452 name = "uploadprepare",
453 description = "Preparing package directory",
454 workdir = "build/sdk",
455 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
456 env={'RSYNC_PASSWORD': rsync_bin_key},
457 haltOnFailure = True,
458 logEnviron = False
459 ))
460
461 factory.addStep(ShellCommand(
462 name = "packageupload",
463 description = "Uploading package files",
464 workdir = "build/sdk",
465 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
466 env={'RSYNC_PASSWORD': rsync_bin_key},
467 haltOnFailure = True,
468 logEnviron = False
469 ))
470
471 factory.addStep(ShellCommand(
472 name = "logprepare",
473 description = "Preparing log directory",
474 workdir = "build/sdk",
475 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
476 env={'RSYNC_PASSWORD': rsync_bin_key},
477 haltOnFailure = True,
478 logEnviron = False
479 ))
480
481 factory.addStep(ShellCommand(
482 name = "logfind",
483 description = "Finding failure logs",
484 workdir = "build/sdk/logs/package/feeds",
485 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
486 haltOnFailure = False
487 ))
488
489 factory.addStep(ShellCommand(
490 name = "logcollect",
491 description = "Collecting failure logs",
492 workdir = "build/sdk",
493 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
494 haltOnFailure = False
495 ))
496
497 factory.addStep(ShellCommand(
498 name = "logupload",
499 description = "Uploading failure logs",
500 workdir = "build/sdk",
501 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
502 env={'RSYNC_PASSWORD': rsync_bin_key},
503 haltOnFailure = False,
504 logEnviron = False
505 ))
506
507 if rsync_src_url is not None:
508 factory.addStep(ShellCommand(
509 name = "sourcelist",
510 description = "Finding source archives to upload",
511 workdir = "build/sdk",
512 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
513 haltOnFailure = True
514 ))
515
516 factory.addStep(ShellCommand(
517 name = "sourceupload",
518 description = "Uploading source archives",
519 workdir = "build/sdk",
520 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
521 WithProperties("--partial-dir=.~tmp~%s~%%(slavename)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
522 env={'RSYNC_PASSWORD': rsync_src_key},
523 haltOnFailure = False,
524 logEnviron = False
525 ))
526
527 factory.addStep(ShellCommand(
528 name = "df",
529 description = "Reporting disk usage",
530 command=["df", "-h", "."],
531 env={'LC_ALL': 'C'},
532 haltOnFailure = False,
533 alwaysRun = True
534 ))
535
536 from buildbot.config import BuilderConfig
537
538 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
539
540
541 ####### STATUS arches
542
543 # 'status' is a list of Status arches. The results of each build will be
544 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
545 # including web pages, email senders, and IRC bots.
546
547 c['status'] = []
548
549 from buildbot.status import html
550 from buildbot.status.web import authz, auth
551
552 if ini.has_option("status", "bind"):
553 if ini.has_option("status", "user") and ini.has_option("status", "password"):
554 authz_cfg=authz.Authz(
555 # change any of these to True to enable; see the manual for more
556 # options
557 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
558 gracefulShutdown = 'auth',
559 forceBuild = 'auth', # use this to test your slave once it is set up
560 forceAllBuilds = 'auth',
561 pingBuilder = False,
562 stopBuild = 'auth',
563 stopAllBuilds = 'auth',
564 cancelPendingBuild = 'auth',
565 )
566 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
567 else:
568 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
569
570 ####### PROJECT IDENTITY
571
572 # the 'title' string will appear at the top of this buildbot
573 # installation's html.WebStatus home page (linked to the
574 # 'titleURL') and is embedded in the title of the waterfall HTML page.
575
576 c['title'] = ini.get("general", "title")
577 c['titleURL'] = ini.get("general", "title_url")
578
579 # the 'buildbotURL' string should point to the location where the buildbot's
580 # internal web server (usually the html.WebStatus page) is visible. This
581 # typically uses the port number set in the Waterfall 'status' entry, but
582 # with an externally-visible host name which the buildbot cannot figure out
583 # without some help.
584
585 c['buildbotURL'] = buildbot_url
586
587 ####### DB URL
588
589 c['db'] = {
590 # This specifies what database buildbot uses to store its state. You can leave
591 # this at its default for all but the largest installations.
592 'db_url' : "sqlite:///state.sqlite",
593 }