####### -*- python -*- # ex: set syntax=python: # vim:ft=python # This is a buildmaster config file. It must be installed as # 'master.cfg' in your buildmaster's base directory. import datetime from itertools import ifilter, imap, izip import json import os import re import subprocess from buildbot.plugins import buildslave, changes, schedulers, status, steps, util # In 0.8.12, WebStatus can't be used as a plugin because it doesn't # actually implement the IStatusReceiver interface, as it claims to. from buildbot.status.web.baseweb import WebStatus from twisted.internet import defer ####### HELPER FUNCTIONS ####### def _path(name): return os.path.join(os.path.dirname(__file__), name) def merge_dicts(*dicts): res = {} for d in dicts: res.update(d) return res def port_from_path(path, sep='/'): components = path.split(sep) try: if (components[0] != '_resources' and components[2] in ('Portfile', 'files')): return components[1] except IndexError: pass # Might be better to throw a custom exception here? return None # This is the dictionary that the buildmaster pays attention to. We also use # a shorter alias to save typing. c = BuildmasterConfig = {} config = { # Production or development 'production': False, # Connections 'slaveport': 9989, 'httpport': 8010, # External configuration. Use absolute paths when overriding these. 'configfile': _path('config.json'), 'workersfile': _path('slaves.json'), 'htpasswdfile': _path('htpasswd'), # GitHub. Repository URLs must have the ".git" suffix. 'githubsecretfile': _path('github.secret'), 'baseurl': 'https://github.com/macports/macports-base.git', 'mpbburl': 'https://github.com/macports/mpbb.git', 'portsurl': 'https://github.com/macports/macports-ports.git', 'wwwurl': 'https://github.com/macports/macports-www.git', 'guideurl': 'https://github.com/macports/macports-guide.git', 'infraurl': 'https://github.com/macports/macports-infrastructure.git', # Tooling 'slaveprefix': '/opt/local', 'toolsprefix': '/opt/mports', # Deployment 'archivesite': 'https://packages.macports.org', 'privkey': '', 'deploy': {}, # Site definitions # (http://docs.buildbot.net/0.8.12/manual/cfg-global.html#site-definitions) 'title': 'MacPorts', 'titleurl': 'https://www.macports.org/', 'buildboturl': 'http://localhost:8010/', # Database # (http://docs.buildbot.net/0.8.12/manual/cfg-global.html#database-specification) 'db': {'db_url': 'sqlite:///state.sqlite'}, # Data lifetime # (http://docs.buildbot.net/0.8.12/manual/cfg-global.html#data-lifetime) 'buildcachesize': 600, 'buildhorizon': 10000, 'eventhorizon': 2000, 'loghorizon': 5000 } # Override defaults with external settings. try: with open(config['configfile']) as f: extconfig = json.load(f) except IOError: extconfig = {} config.update(extconfig) path_base = '/usr/bin:/bin:/usr/sbin:/sbin' path_ports = os.path.join(config['toolsprefix'], 'bin') + ':' + path_base path_jobs = path_ports # Allow spaces and tabs in property values c['validation'] = {'property_value': re.compile(r'^[ \t\w./~:-]*$')} ####### BUILDSLAVES ####### # The 'slaves' list defines the set of recognized buildslaves. Each element is # a BuildSlave object, specifying a unique slave name and password. The same # slave name and password must be configured on the slave. with open(config['workersfile']) as f: slavedata = json.load(f) # convert unicode to byte strings build_platforms = [s.encode('utf-8') for s in slavedata['build_platforms']] c['slaves'] = [buildslave.BuildSlave(name, pwd) for name, pwd in slavedata['slaves'].iteritems()] # 'slavePortnum' defines the TCP port to listen on for connections from slaves. # This must match the value configured into the buildslaves (with their # --master option) c['slavePortnum'] = config['slaveport'] ####### WEB STATUS AND CHANGE HOOKS ####### # WebStatus [1] runs a web server that serves the web interface. It can # also accept HTTP requests at a hook endpoint [2] and translate them # into Changes. We're using the GitHub hook [3] to accept webhook # payloads [4]. # # [1]: http://docs.buildbot.net/0.8.12/manual/cfg-statustargets.html#webstatus # [2]: http://docs.buildbot.net/0.8.12/manual/cfg-statustargets.html#change-hooks # [3]: http://docs.buildbot.net/0.8.12/manual/cfg-statustargets.html#github-hook # [4]: https://developer.github.com/webhooks if config['production']: with open(config['githubsecretfile']) as f: githubsecret = f.readline().rstrip('\n') change_hook_kwargs = { 'change_hook_dialects': { 'github': { 'secret': githubsecret, 'strict': True } } } else: # TODO Add alternate change source, probably a GitPoller. change_hook_kwargs = {} c['status'] = [ WebStatus( http_port=config['httpport'], authz=util.Authz( auth=util.HTPasswdAprAuth(config['htpasswdfile']), gracefulShutdown='auth', forceBuild='auth', forceAllBuilds='auth', pingBuilder='auth', stopBuild='auth', stopAllBuilds='auth', cancelPendingBuild='auth'), **change_hook_kwargs) ] ####### SCHEDULERS ####### base_platforms = [plat for plat in build_platforms if 'legacy' not in plat and '10.6_i386' not in plat] port_platforms = [plat for plat in build_platforms if 'linux' not in plat and '10.5_ppc' != plat] base_buildernames = map('base-{}'.format, base_platforms) portwatcher_buildernames = map('ports-{}-watcher'.format, port_platforms) portbuilder_buildernames = map('ports-{}-builder'.format, port_platforms) portbuilder_triggerables = map('ports-{}-trigger'.format, port_platforms) # The ChangeFilters assume that Git URLs end with ".git". c['schedulers'] = [ schedulers.SingleBranchScheduler( name='base', treeStableTimer=5, change_filter=util.ChangeFilter( repository=config['baseurl'][:-4], branch='master'), builderNames=base_buildernames), schedulers.SingleBranchScheduler( name='ports', # Don't start a separate build for every pushed commit. treeStableTimer=5, change_filter=util.ChangeFilter( repository=config['portsurl'][:-4], branch='master', # Should actually skip changes to files/ only, but only if # we know the last build of the port succeeded. filter_fn=lambda change: any(port_from_path(f) for f in change.files)), builderNames=portwatcher_buildernames), schedulers.ForceScheduler( name='base_force', builderNames=base_buildernames), # schedulers.ForceScheduler( # name='portbuilder_force', # builderNames=portbuilder_buildernames, # properties=[util.StringParameter( # name='portname', # label='Port name:', # default='', # required=True) # ]), schedulers.ForceScheduler( name='portwatcher_force', builderNames=portwatcher_buildernames, properties=[util.StringParameter( name='portlist', label='Port list:', default='', size=30, required=True)]) ] if 'www' in config['deploy']: c['schedulers'].extend(( schedulers.SingleBranchScheduler( name='www', treeStableTimer=300, change_filter=util.ChangeFilter( repository=config['wwwurl'][:-4], branch='master'), builderNames=['jobs-www']), schedulers.ForceScheduler( name='www_force', builderNames=['jobs-www']) )) if 'guide' in config['deploy']: c['schedulers'].extend(( schedulers.SingleBranchScheduler( name='guide', treeStableTimer=300, change_filter=util.ChangeFilter( repository=config['guideurl'][:-4], branch='master'), builderNames=['jobs-guide']), schedulers.ForceScheduler( name='guide_force', builderNames=['jobs-www']) )) if 'portindex' in config['deploy']: c['schedulers'].extend(( schedulers.SingleBranchScheduler( name='portindex', treeStableTimer=300, change_filter=util.ChangeFilter( repository=config['portsurl'][:-4], branch='master'), builderNames=['jobs-portindex']), schedulers.ForceScheduler( name='portindex_force', builderNames=['jobs-portindex']) )) portbuilders = izip(portbuilder_triggerables, portbuilder_buildernames) c['schedulers'].extend(schedulers.Triggerable(name=t, builderNames=[b]) for t, b in portbuilders) ####### BUILDERS ####### # WARNING: mergeRequests has to be False or Triggerable builds will not be scheduled correctly! c['mergeRequests'] = False # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: # what steps, and which slaves can execute them. Note that any particular build will # only take place on one slave. base_factory = util.BuildFactory() base_factory.workdir = '../build' # Set progress=True on Git steps to prevent timeouts on slow fetches. base_factory.addStep(steps.Git( repourl=config['baseurl'], progress=True, mode='full', method='copy', env={'PATH': path_ports})) base_factory.addStep(steps.Configure(command=util.WithProperties(""" env PATH=/usr/bin:/bin:/usr/sbin:/sbin ./configure --enable-readline \ --prefix=%(workdir)s/opt/local \ --with-applications-dir=%(workdir)s/opt/local/Applications \ --with-install-user=`id -un` \ --with-install-group=`id -gn` \ """),logfiles={'config.log': 'config.log'})) base_factory.addStep(steps.Compile(command='make -j`sysctl -n hw.activecpu`')) base_factory.addStep(steps.ShellCommand( command='make install', name='install', description=['installing'], descriptionDone=['install'])) base_factory.addStep(steps.ShellCommand( command='make test', name='test', description=['testing'], descriptionDone=['test'])) base_factory.addStep(steps.ShellCommand( command=util.WithProperties('make distclean; rm -rf %(workdir)s/opt/local'), name='clean', description=['cleaning'], descriptionDone=['clean'])) # custom class to make the file list available on the slave... class SetPropertyFromCommandWithPortlist(steps.SetPropertyFromCommand): def setBuild(self, build): super(SetPropertyFromCommandWithPortlist, self).setBuild(build) # support forced build properties ports = set(self.getProperty('portlist', default='').split()) # paths should be category/portdir(/...) ports.update(ifilter(None, imap(port_from_path, self.build.allFiles()))) self.setProperty('fullportlist', ' '.join(ports)) def getText(self, cmd, results): if self.hasProperty('subportlist'): return ['Port list: {}'.format(self.getProperty('subportlist'))] # let ShellCommand describe return steps.ShellCommand.getText(self, cmd, results) # can't run with prefix inside the workdir in production, # because archives must be built with prefix=/opt/local if config['production']: prefix = '/opt/local' dlhost = 'packages@packages-origin.macports.org' dlpath = '/var/www/html/packages' else: prefix = config['slaveprefix'] dlhost = '' dlpath = './deployed_archives' ulpath = 'archive_staging' ulpath_unique = ulpath+'-%(buildername)s' @util.renderer def make_build_url(props): buildername = props.getProperty('buildername') buildnumber = props.getProperty('buildnumber') url = c['buildbotURL'] if not url.endswith('/'): url += '/' url += 'builders/%s/builds/%s' % (buildername, buildnumber) return url class TriggerWithPortlist(steps.Trigger): def getSchedulersAndProperties(self): sp = [] for scheduler in self.schedulerNames: for port in self.build.getProperty('subportlist').split(): props = self.set_properties.copy() props['portname'] = port sp.append([scheduler, props]) return sp # -- Port Watcher -- def make_portwatcher_factory(triggerable): portwatcher_factory = util.BuildFactory() portwatcher_factory.useProgress = False portwatcher_factory.workdir = '../build' # get mpbb; we'll do the checkout of base and dports via these scripts portwatcher_factory.addStep(steps.Git( repourl=config['mpbburl'], progress=True, env={'PATH': path_ports}, alwaysUseLatest=True, workdir=os.path.join(portwatcher_factory.workdir, 'mpbb'), haltOnFailure=True)) portwatcher_factory.addStep(steps.ShellCommand( command=['./mpbb/mpbb', '--prefix', util.WithProperties(prefix), 'cleanup'], name='cleanup', description=['cleaning'], descriptionDone=['clean'])) portwatcher_factory.addStep(steps.ShellCommand( command=['./mpbb/mpbb', '--prefix', util.WithProperties(prefix), 'selfupdate'], name='selfupdate', description=['updating', 'MacPorts'], descriptionDone=['update', 'MacPorts'], haltOnFailure=True)) portwatcher_factory.addStep(steps.ShellCommand( command=['./mpbb/mpbb', '--prefix', util.WithProperties(prefix), 'checkout', '--ports-url', config['portsurl'], '--ports-commit', util.Property('revision', default='FETCH_HEAD')], timeout=3600, name='checkout', description=['syncing', 'ports'], descriptionDone=['sync', 'ports'], haltOnFailure=True)) def extract_subportlist(rc, stdout, stderr): """ Extract function for SetPropertyFromCommand(). Buildbot did not get the capability to ignore or distinguish stderr output before 0.9.x, but extract_fn always had the option to deal with them separately, so do that. This is called by SetPropertyFromCommand with the return value of the command and strings containing stdout and stderr. The return value should be a dictionary of new properties to be set. """ if rc != 0: # Set an empty subport list on error return {'subportlist': ''} subports = [x.strip() for x in stdout.splitlines()] return {'subportlist': ' '.join(sorted(subports))} portwatcher_factory.addStep(SetPropertyFromCommandWithPortlist( command=util.WithProperties('./mpbb/mpbb list-subports %(fullportlist)s'), extract_fn=extract_subportlist, name='subports', description=['listing', 'subports'])) portwatcher_factory.addStep(TriggerWithPortlist( schedulerNames=[triggerable], set_properties={'triggered_by': make_build_url}, waitForFinish=True, updateSourceStamp=True)) # make a logfile summarising the success/failure status for each port # (Current approach is not so useful as it is not incremental; # ideally this would already be displayed during the Trigger step.) portwatcher_factory.addStep(steps.ShellCommand( command=['cat', os.path.join(logdir, 'ports-progress.txt')], name='summary', description=['summary'])) return portwatcher_factory # -- Port Builder -- portbuilder_factory = util.BuildFactory() portbuilder_factory.useProgress = False portbuilder_factory.workdir = '../build' logdir = os.path.join(portbuilder_factory.workdir, 'logs') portbuilder_factory.addStep(steps.Compile( command=['./mpbb/mpbb', '--prefix', util.WithProperties(prefix), 'install-dependencies', util.WithProperties('%(portname)s')], name='install-dependencies', description=['installing', 'dependencies', 'of', util.WithProperties('%(portname)s')], descriptionDone=['install', 'dependencies', 'of', util.WithProperties('%(portname)s')], logfiles={'dependencies': os.path.join(logdir, 'dependencies-progress.txt')}, haltOnFailure=True)) portbuilder_factory.addStep(steps.Compile( command=['./mpbb/mpbb', '--prefix', util.WithProperties(prefix), 'install-port', util.WithProperties('%(portname)s')], name='install-port', description=['installing', util.WithProperties('%(portname)s')], descriptionDone=['install', util.WithProperties('%(portname)s')], logfiles={'files': os.path.join(logdir, 'port-contents.txt'), 'statistics': os.path.join(logdir, 'port-statistics.txt'), 'main.log': os.path.join(logdir, 'main.log')}, haltOnFailure=True)) portbuilder_factory.addStep(steps.ShellCommand( command=['./mpbb/mpbb', '--prefix', util.WithProperties(prefix), 'gather-archives', '--archive-site', config['archivesite'], '--staging-dir', ulpath], name='gather-archives', description=['gathering', 'archives'], descriptionDone=['gather', 'archives'], haltOnFailure=True)) # upload archives from build slave to master portbuilder_factory.addStep(steps.DirectoryUpload( slavesrc=ulpath, masterdest=util.WithProperties(ulpath_unique))) # XXX: move deploy_archives.sh functionality to mp-buildbot # sign generated binaries and sync to download server (if distributable) if config['production']: portbuilder_factory.addStep(steps.MasterShellCommand( command=['./deploy_archives.sh', util.WithProperties(ulpath_unique)], name='deploy-archives', description=['deploying', 'archives'], descriptionDone=['deploy', 'archives'], env={'PRIVKEY': config['privkey'], 'DLHOST': dlhost, 'DLPATH': dlpath})) # TODO: do we want to upload the individual logs so maintainers can review them? portbuilder_factory.addStep(steps.ShellCommand( command=['./mpbb/mpbb', '--prefix', util.WithProperties(prefix), 'cleanup'], name='cleanup', description=['cleaning'], descriptionDone=['clean'], alwaysRun=True)) def make_rsync_deploy_steps(host, user, sshkeyfile, sshknownhostsfile, srcpath, destpath): return ( steps.FileDownload( name='ssh key', description='transferring', descriptionDone='transfer', mastersrc=sshkeyfile, slavedest='ssh_key', mode=0600), steps.FileDownload( name='ssh known_hosts', description='transferring', descriptionDone='transfer', mastersrc=sshknownhostsfile, slavedest='ssh_known_hosts', mode=0600), steps.ShellCommand( name='rsync', description='deploying', descriptionDone='deploy', command='rsync -avzhC --delay-updates --delete-delay %s/ %s@%s:%s/' % (srcpath, user, host, destpath), env={'RSYNC_RSH': 'ssh -i ssh_key -oUserKnownHostsFile=ssh_known_hosts'}) ) if 'www' in config['deploy']: jobs_www_factory = util.BuildFactory() # TODO: incremental mode with cleanup? jobs_www_factory.addStep(steps.Git( repourl=config['wwwurl'], progress=True, mode='full', method='copy', workdir='www')) # TODO: validate/lint files jobs_www_factory.addSteps( make_rsync_deploy_steps( host=config['deploy']['www']['host'], user=config['deploy']['www']['user'], sshkeyfile=config['deploy']['www']['sshkeyfile'], sshknownhostsfile=config['deploy']['www']['sshknownhostsfile'], srcpath='www', destpath=config['deploy']['www']['destpath'])) if 'portindex' in config['deploy']: jobs_portindex_factory = util.BuildFactory() # TODO: incremental mode with cleanup? jobs_portindex_factory.addStep(steps.Git( name='git infrastructure', repourl=config['infraurl'], progress=True, alwaysUseLatest=True, mode='full', method='copy', workdir='build/infrastructure')) jobs_portindex_factory.addStep(steps.Git( name='git ports', repourl=config['portsurl'], progress=True, alwaysUseLatest=True, mode='incremental', workdir='build/ports')) jobs_portindex_factory.addStep(steps.ShellCommand( command='portindex', name='portindex', description=['generating'], descriptionDone=['done'], workdir='build/ports')) jobs_portindex_factory.addStep(steps.ShellCommand( command='port-tclsh infrastructure/jobs/portindex2postgres.tcl', name='portindex2postgres', description=['generating'], descriptionDone=['done'])) jobs_portindex_factory.addSteps( make_rsync_deploy_steps( host=config['deploy']['portindex']['host'], user=config['deploy']['portindex']['user'], sshkeyfile=config['deploy']['portindex']['sshkeyfile'], sshknownhostsfile=config['deploy']['portindex']['sshknownhostsfile'], srcpath='PortIndex.sql', destpath=config['deploy']['portindex']['destpath'])) if 'guide' in config['deploy']: jobs_guide_factory = util.BuildFactory() # TODO: incremental mode with cleanup? jobs_guide_factory.addStep(steps.Git( repourl=config['guideurl'], progress=True, mode='full', method='copy', workdir='guide')) # TODO: check for existence of tools in toolsprefix jobs_guide_factory.addStep(steps.Compile( name='validate', description='validating', descriptionDone='validate', command='make validate', workdir='guide')) jobs_guide_factory.addStep(steps.Compile( command='make all', workdir='guide')) jobs_guide_factory.addSteps( make_rsync_deploy_steps( host=config['deploy']['guide']['host'], user=config['deploy']['guide']['user'], sshkeyfile=config['deploy']['guide']['sshkeyfile'], sshknownhostsfile=config['deploy']['guide']['sshknownhostsfile'], srcpath='guide', destpath=config['deploy']['guide']['destpath'])) ####### BUILDER CONFIGURATION ####### # XXX: slavenames assignment should be automatic and more generic portsslaves = {} baseslaves = {} slavenames = slavedata['slaves'].keys() for plat in build_platforms: baseslaves[plat] = filter(lambda x: x.endswith(plat+'-base'), slavenames) portsslaves[plat] = filter(lambda x: x.endswith(plat+'-ports'), slavenames) env_buildinfo = { 'BUILDBOT_BUILDERNAME': util.WithProperties('%(buildername)s'), 'BUILDBOT_BUILDNUMBER': util.WithProperties('%(buildnumber)s'), 'BUILDBOT_BUILDURL': make_build_url } c['builders'] = [] extract_os = re.compile(r'10\.\d+') for plat in build_platforms: os_match = extract_os.search(plat) os_version = os_match.group(0) if os_match else plat if 'legacy' not in plat and '10.6_i386' not in plat: c['builders'].append( util.BuilderConfig( name='base-' + plat, slavenames=['base-' + plat], factory=base_factory, tags=['base', os_version], env=merge_dicts(env_buildinfo, {'PATH': path_base}))) if 'linux' not in plat and '10.5_ppc' != plat: c['builders'].extend(( util.BuilderConfig( name='ports-' + plat + '-watcher', slavenames=['ports-' + plat], factory=make_portwatcher_factory('ports-' + plat + '-trigger'), tags=['portwatcher', os_version], env=merge_dicts(env_buildinfo, {'PATH': path_ports})), util.BuilderConfig( name='ports-' + plat + '-builder', slavenames=['ports-' + plat], factory=portbuilder_factory, tags=['portbuilder', os_version], env=merge_dicts(env_buildinfo, {'PATH': path_ports})) )) if 'www' in config['deploy']: c['builders'].append( util.BuilderConfig( name='jobs-www', slavenames=['jobs'], factory=jobs_www_factory, tags=['jobs', 'docs', 'www'], env=merge_dicts(env_buildinfo, {'PATH': path_jobs}))) if 'portindex' in config['deploy']: c['builders'].append( util.BuilderConfig( name='jobs-portindex', slavenames=['jobs'], factory=jobs_portindex_factory, tags=['jobs', 'portindex', 'www'], env=merge_dicts(env_buildinfo, {'PATH': path_jobs}))) if 'guide' in config['deploy']: c['builders'].append( util.BuilderConfig( name='jobs-guide', slavenames=['jobs'], factory=jobs_guide_factory, tags=['jobs', 'docs', 'guide'], env=merge_dicts(env_buildinfo, {'PATH': path_jobs}))) ####### MAIL NOTIFIERS ####### # TODO: This is the old mail notifier; # - useful functionality could be copied # - then the code should be removed # # notifier that sends mail to last committers and maintainers of failed ports class OldPortsMailNotifier(status.MailNotifier): # would make more sense to override getInterestedUsers() in BuildStatus, # but it seems almost impossible to tell a builder to use a different # class for status in its Build objects def useLookup(self, build): failedPorts = set() interestedUsers = set() # XXX: needs to be rewritten for the new steps of mp-buildbot statusStep = [x for x in build.getSteps() if x.getName() == 'status'][0] statusLog = [x for x in statusStep.getLogs() if x.getName() == 'portstatus'][0] for line in statusLog.getText().splitlines(): halves = line.split() if halves[0] == '[FAIL]': failedPorts.add(halves[1]) fakeAddresses = {'nomaintainer', 'nomaintainer@macports.org', 'openmaintainer', 'openmaintainer@macports.org'} for p in failedPorts: output = subprocess.Popen(['/opt/local/bin/port', 'info', '--index', '--maintainers', '--line', p], stdout=subprocess.PIPE).communicate()[0].strip() for m in output.split(','): if m not in fakeAddresses: interestedUsers.add(m) ss = build.getSourceStamp() if ss: for c in ss.changes: interesting = False for f in c.files: comps = f.split('/') if len(comps) >= 3 and comps[2] in failedPorts and comps[0] == 'dports' and comps[1] != '_resources': interesting = True break if interesting: interestedUsers.add(c.who) dl = [] for u in interestedUsers: d = defer.maybeDeferred(self.lookup.getAddress, u) dl.append(d) return defer.gatherResults(dl) class PortsMailNotifier(status.MailNotifier, object): def __init__(self, fromaddr, *args, **kwargs): self.interested_users = set() self.portMessageFormatter = kwargs.pop('portMessageFormatter') super(PortsMailNotifier, self).__init__(fromaddr=fromaddr, *args, **kwargs) # same as original, but calls portMessageFormatter with access to interested_users def buildMessageDict(self, name, build, results): msgdict, self.interested_users = self.portMessageFormatter( self.mode, name, build, results, self.master_status) return msgdict def useLookup(self, build): # Initialize with additional recipients. dl = [defer.maybeDeferred(self.lookup.getAddress, user) for user in self.interested_users] # original list of recipients # for u in build.getResponsibleUsers() + build.getInterestedUsers(): # d = defer.maybeDeferred(self.lookup.getAddress, u) # dl.append(d) return defer.gatherResults(dl) def portWatcherMessageFormatter(mode, name, build, results, master_status): interested_users = set() result = util.Results[results] subject = 'Build {}'.format(result.title()) text = list() text.append('Status: {}'.format(result.title())) text.append('Build slave: {}'.format(build.getSlavename())) build_url = master_status.getURLForThing(build) if build_url: text.append('Full logs: {}'.format(build_url)) text.append('Build reason: {}'.format(build.getReason())) text.append('Port list: {}'.format(build.getProperty('fullportlist'))) text.append('Subport list:\n\t- {}'.format(build.getProperty('subportlist').replace(' ', '\n\t- '))) text.append('Variants: {}'.format(build.getProperty('variants'))) text.append('Revision: {}'.format(build.getProperty('revision'))) text.append('Build time: {}'.format(datetime.timedelta(seconds=int(round(build.getTimes()[1] - build.getTimes()[0]))))) text.append(u'Committer: {}'.format(','.join(build.getResponsibleUsers()))) text.append('\nLog from failed builds:') summary_step = [x for x in build.getSteps() if x.getName() == 'summary'][0] summary_log = [x for x in summary_step.getLogs() if x.getName() == 'stdio'][0] failed_ports = set() maintainers_to_notify = set() pattern = re.compile(r"^Building '(?P.*?)'.*?(\(failed to install dependency '(?P.*?)'\))?( maintainers: (?P.*?)[.])?$") # iterate through all the ports being built for line in summary_log.getText().splitlines(): if 'ERROR' not in line: continue # in case of a build error, print the error and add the broken port(s) to the list line = line.replace(';', '@') text.append('\t' + line.replace(' maintainers:', '\n\t> maintainers:')) match = pattern.match(line) if match: port, dependency, maintainers = match.group('port', 'dependency', 'maintainers') failed_ports.add(port) if dependency: failed_ports.add(dependency) if maintainers: maintainers_to_notify.update(maintainers.split(',')) if failed_ports: text.append('\nBroken ports:\n\t- {}'.format('\n\t- '.join(sorted(failed_ports)))) if maintainers_to_notify: text.append('\nResponsible maintainers:\n\t- {}'.format('\n\t- '.join(sorted(maintainers_to_notify)))) interested_users.update(maintainers_to_notify) # links to individual builds text.append('\nLinks to individual build jobs:') trigger_step = [x for x in build.getSteps() if x.getName() == 'trigger'][0] # FIXME Sorting is lexicographic and won't work properly for # - ports-10.11-x86_64-builder #99 # - ports-10.11-x86_64-builder #100 for label, url in sorted(trigger_step.getURLs().iteritems()): text.append('- {}\n {}'.format(label, url)) text.append('\n-- \nBest regards,\nMacPorts Buildbot\n{}'.format(c['buildbotURL'])) if failed_ports: subject += ': ' + ', '.join(sorted(failed_ports)[:10]) if len(failed_ports) > 10: subject += ', and {} more'.format(len(failed_ports) - 10) return ({'body': '\n'.join(text), 'type': 'plain', 'subject': subject}, interested_users) if config['production']: c['status'].extend(( # send mail about base failures to users on the blamelist status.MailNotifier( fromaddr='buildbot@macports.org', extraHeaders={'Reply-To': 'noreply@macports.org'}, # unless lookup is defined, users have to be configured locally # maybe a smarter function is needed, but lookup='' does it for now lookup='', mode=('problem'), builders=base_buildernames, #extraRecipients=['...'], #smtpPort=25, #relayhost='localhost', sendToInterestedUsers=True), PortsMailNotifier( fromaddr='buildbot@macports.org', extraHeaders={'Reply-To': 'noreply@macports.org'}, lookup='', mode=('failing'), builders=portwatcher_buildernames, #extraRecipients=['...'], #smtpPort=25, #relayhost='localhost', sendToInterestedUsers=True, portMessageFormatter=portWatcherMessageFormatter), # notifications about exceptions status.MailNotifier( fromaddr='buildbot@macports.org', extraHeaders={'Reply-To': 'noreply@macports.org'}, mode=('exception'), extraRecipients=['admin@macports.org'], sendToInterestedUsers=False) )) ####### PROJECT IDENTITY ####### # the 'title' string will appear at the top of this buildbot # installation's WebStatus home page (linked to the # 'titleURL') and is embedded in the title of the waterfall HTML page. c['title'] = config['title'] c['titleURL'] = config['titleurl'] c['buildbotURL'] = config['buildboturl'] ####### DATABASE ####### # This specifies what database buildbot uses to store its state. You can # leave this at its default for all but the largest installations. c['db'] = config['db'] ####### DATA LIFETIME ####### c['buildHorizon'] = config['buildhorizon'] c['logHorizon'] = config['loghorizon'] c['eventHorizon'] = config['eventhorizon'] c['buildCacheSize'] = config['buildcachesize']