Commit 1492aeb4 authored by Greg Guterman's avatar Greg Guterman Committed by Commit Bot

Allow for bucket-based mb_config and add default

I've joined the current mb_config.pyl
with /src/cr-buildbucket.cfg to remap masters to buckets for a new
bucket-based mb_config_buckets.pyl.
Also the old logic that used the 'master: { chromium: {...}}' dict to
specify the chromium builders that blacklist chrome proprietary mixins
now looks at a top-level config key in mb_config_buckets.pyl
called 'public_artifact_builders'.

mb.py has been modified to support the new type of mapping in
mb_config_buckets.pyl and will use it by default if a bucket is specified.
I hope to move all the code that uses mb to use buckets instead of masters,
at which point I plan to remove all the master-related functions from mb.py
The new functions have been written in a way that makes it easy to remove
the old master-based functions at the cost of being less DRY.
Validation stuff was factored out to a helper file to lighten the load
inside of mb.py.
Also there seems to be a dead function which I removed: CheckCompile.

This pyl also excludes all the internal builders, which will be
moved to src-internal.

Bug: 1028719
Change-Id: I86b55b12def06afab81faec66463963ede2f3244
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1963441
Commit-Queue: Gregory Guterman <guterman@google.com>
Reviewed-by: default avatarAaron Gable <agable@chromium.org>
Reviewed-by: default avatarStephen Martinis <martiniss@chromium.org>
Cr-Commit-Position: refs/heads/master@{#731713}
parent 0d1119fb
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Validation functions for the Meta-Build config file"""
import collections
def GetAllConfigsMaster(masters):
"""Build a list of all of the configs referenced by builders.
Deprecated in favor or GetAllConfigsBucket
"""
all_configs = {}
for master in masters:
for config in masters[master].values():
if isinstance(config, dict):
for c in config.values():
all_configs[c] = master
else:
all_configs[config] = master
return all_configs
def GetAllConfigsBucket(buckets):
"""Build a list of all of the configs referenced by builders."""
all_configs = {}
for bucket in buckets:
for config in buckets[bucket].values():
if isinstance(config, dict):
for c in config.values():
all_configs[c] = bucket
else:
all_configs[config] = bucket
return all_configs
def CheckAllConfigsAndMixinsReferenced(errs, all_configs, configs, mixins):
"""Check that every actual config is actually referenced."""
for config in configs:
if not config in all_configs:
errs.append('Unused config "%s".' % config)
# Figure out the whole list of mixins, and check that every mixin
# listed by a config or another mixin actually exists.
referenced_mixins = set()
for config, mixin_names in configs.items():
for mixin in mixin_names:
if not mixin in mixins:
errs.append(
'Unknown mixin "%s" referenced by config "%s".' % (mixin, config))
referenced_mixins.add(mixin)
for mixin in mixins:
for sub_mixin in mixins[mixin].get('mixins', []):
if not sub_mixin in mixins:
errs.append(
'Unknown mixin "%s" referenced by mixin "%s".' % (sub_mixin, mixin))
referenced_mixins.add(sub_mixin)
# Check that every mixin defined is actually referenced somewhere.
for mixin in mixins:
if not mixin in referenced_mixins:
errs.append('Unreferenced mixin "%s".' % mixin)
return errs
def EnsureNoProprietaryMixinsBucket(errs, default_config, config_file,
public_artifact_builders, buckets, configs,
mixins):
"""Check that the 'chromium' bots which build public artifacts
do not include the chrome_with_codecs mixin.
"""
if config_file != default_config:
return
if public_artifact_builders is None:
errs.append('Missing "public_artifact_builders" config entry. '
'Please update this proprietary codecs check with the '
'name of the builders responsible for public build artifacts.')
return
# crbug/1033585
for bucket, builders in public_artifact_builders.items():
for builder in builders:
config = buckets[bucket][builder]
def RecurseMixins(builder, current_mixin):
if current_mixin == 'chrome_with_codecs':
errs.append('Public artifact builder "%s" can not contain the '
'"chrome_with_codecs" mixin.' % builder)
return
if not 'mixins' in mixins[current_mixin]:
return
for mixin in mixins[current_mixin]['mixins']:
RecurseMixins(builder, mixin)
for mixin in configs[config]:
RecurseMixins(builder, mixin)
return errs
def EnsureNoProprietaryMixinsMaster(errs, default_config, config_file, masters,
configs, mixins):
"""If we're checking the Chromium config, check that the 'chromium' bots
which build public artifacts do not include the chrome_with_codecs mixin.
Deprecated in favor of BlacklistMixinsBucket
"""
if config_file == default_config:
if 'chromium' in masters:
for builder in masters['chromium']:
config = masters['chromium'][builder]
def RecurseMixins(current_mixin):
if current_mixin == 'chrome_with_codecs':
errs.append('Public artifact builder "%s" can not contain the '
'"chrome_with_codecs" mixin.' % builder)
return
if not 'mixins' in mixins[current_mixin]:
return
for mixin in mixins[current_mixin]['mixins']:
RecurseMixins(mixin)
for mixin in configs[config]:
RecurseMixins(mixin)
else:
errs.append('Missing "chromium" master. Please update this '
'proprietary codecs check with the name of the master '
'responsible for public build artifacts.')
def CheckDuplicateConfigs(errs, config_pool, mixin_pool, grouping,
flatten_config):
"""Check for duplicate configs.
Evaluate all configs, and see if, when
evaluated, differently named configs are the same.
"""
evaled_to_source = collections.defaultdict(set)
for group, builders in grouping.items():
for builder in builders:
config = grouping[group][builder]
if not config:
continue
if isinstance(config, dict):
# Ignore for now
continue
elif config.startswith('//'):
args = config
else:
args = flatten_config(config_pool, mixin_pool, config)['gn_args']
if 'error' in args:
continue
evaled_to_source[args].add(config)
for v in evaled_to_source.values():
if len(v) != 1:
errs.append(
'Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: %s. Please '
'consolidate these configs into only one unique name per '
'configuration value.' % (', '.join(sorted('%r' % val for val in v))))
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
......@@ -9,6 +9,7 @@ MB is a wrapper script for GN that can be used to generate build files
for sets of canned configurations and analyze them.
"""
from __future__ import absolute_import
from __future__ import print_function
import argparse
......@@ -34,8 +35,21 @@ from collections import OrderedDict
CHROMIUM_SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))))
sys.path = [os.path.join(CHROMIUM_SRC_DIR, 'build')] + sys.path
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '..'))
import gn_helpers
from mb.lib import validation
def DefaultVals():
"""Default mixin values"""
return {
'args_file': '',
'cros_passthrough': False,
'gn_args': '',
}
def PruneVirtualEnv():
# Set by VirtualEnv, no need to keep it.
......@@ -68,16 +82,21 @@ def main(args):
class MetaBuildWrapper(object):
def __init__(self):
self.chromium_src_dir = CHROMIUM_SRC_DIR
self.default_config = os.path.join(self.chromium_src_dir, 'tools', 'mb',
'mb_config.pyl')
self.default_config_master = os.path.join(self.chromium_src_dir, 'tools',
'mb', 'mb_config.pyl')
self.default_config_bucket = os.path.join(self.chromium_src_dir, 'tools',
'mb', 'mb_config_buckets.pyl')
self.default_isolate_map = os.path.join(self.chromium_src_dir, 'testing',
'buildbot', 'gn_isolate_map.pyl')
self.group_by_bucket = False
self.executable = sys.executable
self.platform = sys.platform
self.sep = os.sep
self.args = argparse.Namespace()
self.configs = {}
self.public_artifact_builders = None
self.masters = {}
self.buckets = {}
self.mixins = {}
def Main(self, args):
......@@ -99,20 +118,24 @@ class MetaBuildWrapper(object):
def ParseArgs(self, argv):
def AddCommonOptions(subp):
group = subp.add_mutually_exclusive_group()
group.add_argument(
'-m', '--master', help='master name to look up config from')
group.add_argument('-u', '--bucket', help='bucket to look up config from')
subp.add_argument('-b', '--builder',
help='builder name to look up config from')
subp.add_argument('-m', '--master',
help='master name to look up config from')
subp.add_argument('-c', '--config',
help='configuration to analyze')
subp.add_argument('--phase',
help='optional phase name (used when builders '
'do multiple compiles with different '
'arguments in a single build)')
subp.add_argument('-f', '--config-file', metavar='PATH',
default=self.default_config,
help='path to config file '
'(default is %(default)s)')
subp.add_argument(
'-f',
'--config-file',
metavar='PATH',
help=('path to config file '
'(default is mb_config[_bucket].pyl'))
subp.add_argument('-i', '--isolate-map-file', metavar='PATH',
help='path to isolate map file '
'(default is %(default)s)',
......@@ -159,9 +182,12 @@ class MetaBuildWrapper(object):
subp = subps.add_parser('export',
description='Print out the expanded configuration '
'for each builder as a JSON object.')
subp.add_argument('-f', '--config-file', metavar='PATH',
default=self.default_config,
help='path to config file (default is %(default)s)')
subp.add_argument(
'-f',
'--config-file',
metavar='PATH',
help=('path to config file '
'(default is mb_config[_bucket].pyl'))
subp.add_argument('-g', '--goma-dir',
help='path to goma directory')
subp.set_defaults(func=self.CmdExport)
......@@ -275,7 +301,6 @@ class MetaBuildWrapper(object):
subp = subps.add_parser('validate',
description='Validate the config file.')
subp.add_argument('-f', '--config-file', metavar='PATH',
default=self.default_config,
help='path to config file (default is %(default)s)')
subp.set_defaults(func=self.CmdValidate)
......@@ -304,6 +329,20 @@ class MetaBuildWrapper(object):
self.args = parser.parse_args(argv)
self.group_by_bucket = getattr(self.args, 'master', None) is None
# Use the correct default config file
# Not using hasattr here because it would still require a None check
if (self.args.func != self.CmdValidate
and getattr(self.args, 'config_file', None) is None):
# The default bucket config should be the same in all except replacing
# master with bucket and handling proprietary chrome mixins
if self.group_by_bucket:
self.args.config_file = self.default_config_bucket
else:
self.args.config_file = self.default_config_master
def DumpInputFiles(self):
def DumpContentsOfFilePassedTo(arg_name, path):
......@@ -325,7 +364,42 @@ class MetaBuildWrapper(object):
vals = self.Lookup()
return self.RunGNAnalyze(vals)
def CmdExportBucket(self):
self.ReadConfigFile()
obj = {}
for bucket, builders in self.buckets.items():
obj[bucket] = {}
for builder in builders:
config = self.buckets[bucket][builder]
if not config:
continue
if isinstance(config, dict):
args = {
k: FlattenConfig(self.configs, self.mixins, v)['gn_args']
for k, v in config.items()
}
elif config.startswith('//'):
args = config
else:
args = FlattenConfig(self.configs, self.mixins, config)['gn_args']
if 'error' in args:
continue
obj[bucket][builder] = args
# Dump object and trim trailing whitespace.
s = '\n'.join(
l.rstrip()
for l in json.dumps(obj, sort_keys=True, indent=2).splitlines())
self.Print(s)
return 0
def CmdExport(self):
''' Deprecated in favor of CmdExportBucket '''
if self.group_by_bucket:
return self.CmdExportBucket()
self.ReadConfigFile()
obj = {}
for master, builders in self.masters.items():
......@@ -336,12 +410,14 @@ class MetaBuildWrapper(object):
continue
if isinstance(config, dict):
args = {k: self.FlattenConfig(v)['gn_args']
for k, v in config.items()}
args = {
k: FlattenConfig(self.configs, self.mixins, v)['gn_args']
for k, v in config.items()
}
elif config.startswith('//'):
args = config
else:
args = self.FlattenConfig(config)['gn_args']
args = FlattenConfig(self.configs, self.mixins, config)['gn_args']
if 'error' in args:
continue
......@@ -632,21 +708,60 @@ class MetaBuildWrapper(object):
('cpu', 'x86-64'),
os_dim]
def CmdValidateBucket(self, print_ok=True):
errs = []
# Build a list of all of the configs referenced by builders.
all_configs = validation.GetAllConfigsBucket(self.buckets)
# Check that every referenced args file or config actually exists.
for config, loc in all_configs.items():
if config.startswith('//'):
if not self.Exists(self.ToAbsPath(config)):
errs.append(
'Unknown args file "%s" referenced from "%s".' % (config, loc))
elif not config in self.configs:
errs.append('Unknown config "%s" referenced from "%s".' % (config, loc))
# Check that every config and mixin is referenced.
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
self.configs, self.mixins)
validation.EnsureNoProprietaryMixinsBucket(
errs, self.default_config_bucket, self.args.config_file,
self.public_artifact_builders, self.buckets, self.configs, self.mixins)
validation.CheckDuplicateConfigs(errs, self.configs, self.mixins,
self.buckets, FlattenConfig)
if errs:
raise MBErr(('mb config file %s has problems:' % self.args.config_file) +
'\n ' + '\n '.join(errs))
if print_ok:
self.Print('mb config file %s looks ok.' % self.args.config_file)
return 0
def CmdValidate(self, print_ok=True):
errs = []
# Validate both bucket and master configs if
# a specific one isn't specified
if getattr(self.args, 'config_file', None) is None:
# Read the file to make sure it parses.
self.args.config_file = self.default_config_bucket
self.ReadConfigFile()
self.CmdValidateBucket()
# Build a list of all of the configs referenced by builders.
all_configs = {}
for master in self.masters:
for config in self.masters[master].values():
if isinstance(config, dict):
for c in config.values():
all_configs[c] = master
self.args.config_file = self.default_config_master
self.ReadConfigFile()
else:
all_configs[config] = master
self.ReadConfigFile()
if self.group_by_bucket:
return self.CmdValidateBucket()
# Build a list of all of the configs referenced by builders.
all_configs = validation.GetAllConfigsMaster(self.masters)
# Check that every referenced args file or config actually exists.
for config, loc in all_configs.items():
......@@ -658,84 +773,17 @@ class MetaBuildWrapper(object):
errs.append('Unknown config "%s" referenced from "%s".' %
(config, loc))
# Check that every actual config is actually referenced.
for config in self.configs:
if not config in all_configs:
errs.append('Unused config "%s".' % config)
# Figure out the whole list of mixins, and check that every mixin
# listed by a config or another mixin actually exists.
referenced_mixins = set()
for config, mixins in self.configs.items():
for mixin in mixins:
if not mixin in self.mixins:
errs.append('Unknown mixin "%s" referenced by config "%s".' %
(mixin, config))
referenced_mixins.add(mixin)
for mixin in self.mixins:
for sub_mixin in self.mixins[mixin].get('mixins', []):
if not sub_mixin in self.mixins:
errs.append('Unknown mixin "%s" referenced by mixin "%s".' %
(sub_mixin, mixin))
referenced_mixins.add(sub_mixin)
# Check that every mixin defined is actually referenced somewhere.
for mixin in self.mixins:
if not mixin in referenced_mixins:
errs.append('Unreferenced mixin "%s".' % mixin)
# If we're checking the Chromium config, check that the 'chromium' bots
# which build public artifacts do not include the chrome_with_codecs mixin.
if self.args.config_file == self.default_config:
if 'chromium' in self.masters:
for builder in self.masters['chromium']:
config = self.masters['chromium'][builder]
def RecurseMixins(builder, current_mixin):
if current_mixin == 'chrome_with_codecs':
errs.append('Public artifact builder "%s" can not contain the '
'"chrome_with_codecs" mixin.' % builder)
return
if not 'mixins' in self.mixins[current_mixin]:
return
for mixin in self.mixins[current_mixin]['mixins']:
RecurseMixins(builder, mixin)
for mixin in self.configs[config]:
RecurseMixins(builder, mixin)
else:
errs.append('Missing "chromium" master. Please update this '
'proprietary codecs check with the name of the master '
'responsible for public build artifacts.')
# Check for duplicate configs. Evaluate all configs, and see if, when
# evaluated, differently named configs are the same.
evaled_to_source = collections.defaultdict(set)
for master, builders in self.masters.items():
for builder in builders:
config = self.masters[master][builder]
if not config:
continue
# Check that every config and mixin is referenced.
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
self.configs, self.mixins)
if isinstance(config, dict):
# Ignore for now
continue
elif config.startswith('//'):
args = config
else:
args = self.FlattenConfig(config)['gn_args']
if 'error' in args:
continue
validation.EnsureNoProprietaryMixinsMaster(
errs, self.default_config_master, self.args.config_file, self.masters,
self.configs, self.mixins)
evaled_to_source[args].add(config)
validation.CheckDuplicateConfigs(errs, self.configs, self.mixins,
self.masters, FlattenConfig)
for v in evaled_to_source.values():
if len(v) != 1:
errs.append('Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: %s. Please '
'consolidate these configs into only one unique name per '
'configuration value.' % (
', '.join(sorted('%r' % val for val in v))))
if errs:
raise MBErr(('mb config file %s has problems:' % self.args.config_file) +
'\n ' + '\n '.join(errs))
......@@ -747,7 +795,7 @@ class MetaBuildWrapper(object):
def GetConfig(self):
build_dir = self.args.path
vals = self.DefaultVals()
vals = DefaultVals()
if self.args.builder or self.args.master or self.args.config:
vals = self.Lookup()
# Re-run gn gen in order to ensure the config is consistent with the
......@@ -784,17 +832,20 @@ class MetaBuildWrapper(object):
vals = self.ReadIOSBotConfig()
if not vals:
self.ReadConfigFile()
if self.group_by_bucket:
config = self.ConfigFromArgsBucket()
else:
config = self.ConfigFromArgs()
if config.startswith('//'):
if not self.Exists(self.ToAbsPath(config)):
raise MBErr('args file "%s" not found' % config)
vals = self.DefaultVals()
vals = DefaultVals()
vals['args_file'] = config
else:
if not config in self.configs:
raise MBErr('Config "%s" not found in %s' %
(config, self.args.config_file))
vals = self.FlattenConfig(config)
vals = FlattenConfig(self.configs, self.mixins, config)
return vals
def ReadIOSBotConfig(self):
......@@ -808,7 +859,7 @@ class MetaBuildWrapper(object):
contents = json.loads(self.ReadFile(path))
gn_args = ' '.join(contents.get('gn_args', []))
vals = self.DefaultVals()
vals = DefaultVals()
vals['gn_args'] = gn_args
return vals
......@@ -823,8 +874,12 @@ class MetaBuildWrapper(object):
(self.args.config_file, e))
self.configs = contents['configs']
self.masters = contents['masters']
self.mixins = contents['mixins']
self.masters = contents.get('masters')
self.buckets = contents.get('buckets')
self.public_artifact_builders = contents.get('public_artifact_builders')
self.group_by_bucket = bool(self.buckets)
def ReadIsolateMap(self):
if not self.args.isolate_map_files:
......@@ -848,7 +903,44 @@ class MetaBuildWrapper(object):
'Failed to parse isolate map file "%s": %s' % (isolate_map, e))
return isolate_maps
def ConfigFromArgsBucket(self):
if self.args.config:
if self.args.bucket or self.args.builder:
raise MBErr('Can not specify both -c/--config and -u/--bucket or '
'-b/--builder')
return self.args.config
if not self.args.bucket or not self.args.builder:
raise MBErr('Must specify either -c/--config or '
'(-u/--bucket and -b/--builder)')
if not self.args.bucket in self.buckets:
raise MBErr('Bucket name "%s" not found in "%s"' %
(self.args.bucket, self.args.config_file))
if not self.args.builder in self.buckets[self.args.bucket]:
raise MBErr('Builder name "%s" not found under buckets[%s] in "%s"' %
(self.args.builder, self.args.bucket, self.args.config_file))
config = self.buckets[self.args.bucket][self.args.builder]
if isinstance(config, dict):
if self.args.phase is None:
raise MBErr('Must specify a build --phase for %s on %s' %
(self.args.builder, self.args.bucket))
phase = str(self.args.phase)
if phase not in config:
raise MBErr('Phase %s doesn\'t exist for %s on %s' %
(phase, self.args.builder, self.args.bucket))
return config[phase]
if self.args.phase is not None:
raise MBErr('Must not specify a build --phase for %s on %s' %
(self.args.builder, self.args.bucket))
return config
def ConfigFromArgs(self):
''' Deprecated in favor ConfigFromArgsBucket '''
if self.args.config:
if self.args.master or self.args.builder:
raise MBErr('Can not specific both -c/--config and -m/--master or '
......@@ -884,47 +976,6 @@ class MetaBuildWrapper(object):
(self.args.builder, self.args.master))
return config
def FlattenConfig(self, config):
mixins = self.configs[config]
vals = self.DefaultVals()
visited = []
self.FlattenMixins(mixins, vals, visited)
return vals
def DefaultVals(self):
return {
'args_file': '',
'cros_passthrough': False,
'gn_args': '',
}
def FlattenMixins(self, mixins, vals, visited):
for m in mixins:
if m not in self.mixins:
raise MBErr('Unknown mixin "%s"' % m)
visited.append(m)
mixin_vals = self.mixins[m]
if 'cros_passthrough' in mixin_vals:
vals['cros_passthrough'] = mixin_vals['cros_passthrough']
if 'args_file' in mixin_vals:
if vals['args_file']:
raise MBErr('args_file specified multiple times in mixins '
'for mixin %s' % m)
vals['args_file'] = mixin_vals['args_file']
if 'gn_args' in mixin_vals:
if vals['gn_args']:
vals['gn_args'] += ' ' + mixin_vals['gn_args']
else:
vals['gn_args'] = mixin_vals['gn_args']
if 'mixins' in mixin_vals:
self.FlattenMixins(mixin_vals['mixins'], vals, visited)
return vals
def RunGNGen(self, vals, compute_inputs_for_analyze=False, check=True):
build_dir = self.args.path
......@@ -1705,26 +1756,6 @@ class MetaBuildWrapper(object):
raise MBErr('Error %s writing to the output path "%s"' %
(e, path))
def CheckCompile(self, master, builder):
url_template = self.args.url_template + '/{builder}/builds/_all?as_text=1'
url = urllib2.quote(url_template.format(master=master, builder=builder),
safe=':/()?=')
try:
builds = json.loads(self.Fetch(url))
except Exception as e:
return str(e)
successes = sorted(
[int(x) for x in builds.keys() if "text" in builds[x] and
cmp(builds[x]["text"][:2], ["build", "successful"]) == 0],
reverse=True)
if not successes:
return "no successful builds"
build = builds[str(successes[0])]
step_names = set([step["name"] for step in build["steps"]])
compile_indicators = set(["compile", "compile (with patch)", "analyze"])
if compile_indicators & step_names:
return "compiles"
return "does not compile"
def PrintCmd(self, cmd, env):
if self.platform == 'win32':
......@@ -1886,6 +1917,42 @@ class LedResult(object):
self._run_cmd(self._result, cmd), self._run_cmd)
def FlattenConfig(config_pool, mixin_pool, config):
mixins = config_pool[config]
vals = DefaultVals()
visited = []
FlattenMixins(mixin_pool, mixins, vals, visited)
return vals
def FlattenMixins(mixin_pool, mixins_to_flatten, vals, visited):
for m in mixins_to_flatten:
if m not in mixin_pool:
raise MBErr('Unknown mixin "%s"' % m)
visited.append(m)
mixin_vals = mixin_pool[m]
if 'cros_passthrough' in mixin_vals:
vals['cros_passthrough'] = mixin_vals['cros_passthrough']
if 'args_file' in mixin_vals:
if vals['args_file']:
raise MBErr('args_file specified multiple times in mixins '
'for mixin %s' % m)
vals['args_file'] = mixin_vals['args_file']
if 'gn_args' in mixin_vals:
if vals['gn_args']:
vals['gn_args'] += ' ' + mixin_vals['gn_args']
else:
vals['gn_args'] = mixin_vals['gn_args']
if 'mixins' in mixin_vals:
FlattenMixins(mixin_pool, mixin_vals['mixins'], vals, visited)
return vals
class MBErr(Exception):
pass
......
This source diff could not be displayed because it is too large. You can view the blob instead.
#!/usr/bin/python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for mb.py."""
from __future__ import print_function
from __future__ import absolute_import
import json
import os
......@@ -14,7 +15,10 @@ import StringIO
import sys
import unittest
import mb
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '..'))
from mb import mb
class FakeMBW(mb.MetaBuildWrapper):
......@@ -24,7 +28,9 @@ class FakeMBW(mb.MetaBuildWrapper):
# Override vars for test portability.
if win32:
self.chromium_src_dir = 'c:\\fake_src'
self.default_config = 'c:\\fake_src\\tools\\mb\\mb_config.pyl'
self.default_config_master = 'c:\\fake_src\\tools\\mb\\mb_config.pyl'
self.default_config_bucket = 'c:\\fake_src\\tools\\mb\\mb_config_bucket.pyl' # pylint: disable=line-too-long
self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\'
'gn_isolate_map.pyl')
self.platform = 'win32'
......@@ -33,7 +39,8 @@ class FakeMBW(mb.MetaBuildWrapper):
self.cwd = 'c:\\fake_src\\out\\Default'
else:
self.chromium_src_dir = '/fake_src'
self.default_config = '/fake_src/tools/mb/mb_config.pyl'
self.default_config_master = '/fake_src/tools/mb/mb_config.pyl'
self.default_config_bucket = '/fake_src/tools/mb/mb_config_bucket.pyl'
self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl'
self.executable = '/usr/bin/python'
self.platform = 'linux2'
......@@ -172,6 +179,55 @@ TEST_CONFIG = """\
}
"""
TEST_CONFIG_BUCKET = """\
{
'public_artifact_builders': {},
'buckets': {
'ci': {
'fake_builder': 'rel_bot',
'fake_debug_builder': 'debug_goma',
'fake_simplechrome_builder': 'cros_chrome_sdk',
'fake_args_bot': '//build/args/bots/fake_master/fake_args_bot.gn',
'fake_multi_phase': { 'phase_1': 'phase_1', 'phase_2': 'phase_2'},
'fake_args_file': 'args_file_goma',
}
},
'configs': {
'args_file_goma': ['args_file', 'goma'],
'cros_chrome_sdk': ['cros_chrome_sdk'],
'rel_bot': ['rel', 'goma', 'fake_feature1'],
'debug_goma': ['debug', 'goma'],
'phase_1': ['phase_1'],
'phase_2': ['phase_2'],
},
'mixins': {
'cros_chrome_sdk': {
'cros_passthrough': True,
},
'fake_feature1': {
'gn_args': 'enable_doom_melon=true',
},
'goma': {
'gn_args': 'use_goma=true',
},
'args_file': {
'args_file': '//build/args/fake.gn',
},
'phase_1': {
'gn_args': 'phase=1',
},
'phase_2': {
'gn_args': 'phase=2',
},
'rel': {
'gn_args': 'is_debug=false',
},
'debug': {
'gn_args': 'is_debug=true',
},
},
}
"""
TEST_BAD_CONFIG = """\
{
......@@ -199,6 +255,35 @@ TEST_BAD_CONFIG = """\
}
"""
TEST_BAD_CONFIG_BUCKET = """\
{
'public_artifact_builders': {
'fake_bucket_a': ['fake_builder_a', 'fake_builder_b'],
},
'configs': {
'rel_bot_1': ['rel', 'chrome_with_codecs'],
'rel_bot_2': ['rel', 'bad_nested_config'],
},
'buckets': {
'fake_bucket_a': {
'fake_builder_a': 'rel_bot_1',
'fake_builder_b': 'rel_bot_2',
},
},
'mixins': {
'chrome_with_codecs': {
'gn_args': 'proprietary_codecs=true',
},
'bad_nested_config': {
'mixins': ['chrome_with_codecs'],
},
'rel': {
'gn_args': 'is_debug=false',
},
},
}
"""
TEST_ARGS_FILE_TWICE_CONFIG = """\
{
......@@ -220,6 +305,26 @@ TEST_ARGS_FILE_TWICE_CONFIG = """\
"""
TEST_ARGS_FILE_TWICE_CONFIG_BUCKET = """\
{
'public_artifact_builders': {},
'buckets': {
'chromium': {},
'fake_bucket': {
'fake_args_file_twice': 'args_file_twice',
},
},
'configs': {
'args_file_twice': ['args_file', 'args_file'],
},
'mixins': {
'args_file': {
'args_file': '//build/args/fake.gn',
},
},
}
"""
TEST_DUP_CONFIG = """\
{
'masters': {
......@@ -241,6 +346,27 @@ TEST_DUP_CONFIG = """\
}
"""
TEST_DUP_CONFIG_BUCKET = """\
{
'public_artifact_builders': {},
'buckets': {
'ci': {},
'fake_bucket': {
'fake_builder': 'some_config',
'other_builder': 'some_other_config',
},
},
'configs': {
'some_config': ['args_file'],
'some_other_config': ['args_file'],
},
'mixins': {
'args_file': {
'args_file': '//build/args/fake.gn',
},
},
}
"""
TRYSERVER_CONFIG = """\
{
......@@ -264,7 +390,8 @@ TRYSERVER_CONFIG = """\
class UnitTest(unittest.TestCase):
def fake_mbw(self, files=None, win32=False):
mbw = FakeMBW(win32=win32)
mbw.files.setdefault(mbw.default_config, TEST_CONFIG)
mbw.files.setdefault(mbw.default_config_master, TEST_CONFIG)
mbw.files.setdefault(mbw.default_config_bucket, TEST_CONFIG_BUCKET)
mbw.files.setdefault(
mbw.ToAbsPath('//testing/buildbot/gn_isolate_map.pyl'),
'''{
......@@ -442,9 +569,18 @@ class UnitTest(unittest.TestCase):
('import("//build/args/fake.gn")\n'
'use_goma = true\n'))
def test_gen_args_file_twice_bucket(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config_bucket] = TEST_ARGS_FILE_TWICE_CONFIG_BUCKET
self.check([
'gen', '-u', 'fake_bucket', '-b', 'fake_args_file_twice', '//out/Debug'
],
mbw=mbw,
ret=1)
def test_gen_args_file_twice(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config] = TEST_ARGS_FILE_TWICE_CONFIG
mbw.files[mbw.default_config_master] = TEST_ARGS_FILE_TWICE_CONFIG
self.check(['gen', '-m', 'fake_master', '-b', 'fake_args_file_twice',
'//out/Debug'], mbw=mbw, ret=1)
......@@ -793,18 +929,44 @@ class UnitTest(unittest.TestCase):
'enable_doom_melon = true\n'
'use_goma = true\n'))
def test_recursive_lookup_bucket(self):
files = {
'/fake_src/build/args/fake.gn': ('enable_doom_melon = true\n'
'enable_antidoom_banana = true\n')
}
self.check(['lookup', '-u', 'ci', '-b', 'fake_args_file', '--recursive'],
files=files,
ret=0,
out=('enable_antidoom_banana = true\n'
'enable_doom_melon = true\n'
'use_goma = true\n'))
def test_validate(self):
mbw = self.fake_mbw()
self.check(['validate'], mbw=mbw, ret=0)
def test_bad_validate(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config] = TEST_BAD_CONFIG
self.check(['validate'], mbw=mbw, ret=1)
mbw.files[mbw.default_config_master] = TEST_BAD_CONFIG
self.check(['validate', '-f', mbw.default_config_master], mbw=mbw, ret=1)
def test_bad_validate_bucket(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config_bucket] = TEST_BAD_CONFIG_BUCKET
self.check(['validate', '-f', mbw.default_config_bucket], mbw=mbw, ret=1)
def test_duplicate_validate(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config] = TEST_DUP_CONFIG
mbw.files[mbw.default_config_master] = TEST_DUP_CONFIG
self.check(['validate'], mbw=mbw, ret=1)
self.assertIn(
'Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: \'some_config\', '
'\'some_other_config\'.', mbw.out)
def test_duplicate_validate_bucket(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config_bucket] = TEST_DUP_CONFIG_BUCKET
self.check(['validate'], mbw=mbw, ret=1)
self.assertIn('Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: \'some_config\', '
......
#!/usr/bin/python
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for mb_validate.py."""
from __future__ import print_function
from __future__ import absolute_import
import sys
import ast
import os
import unittest
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '..'))
from mb import mb
from mb import mb_unittest
from mb.lib import validation
TEST_UNREFERENCED_MIXIN_CONFIG = """\
{
'public_artifact_builders': {},
'configs': {
'rel_bot_1': ['rel'],
'rel_bot_2': ['rel'],
},
'buckets': {
'fake_bucket_a': {
'fake_builder_a': 'rel_bot_1',
'fake_builder_b': 'rel_bot_2',
},
},
'mixins': {
'unreferenced_mixin': {
'gn_args': 'proprietary_codecs=true',
},
'rel': {
'gn_args': 'is_debug=false',
},
},
}
"""
TEST_UNKNOWNMIXIN_CONFIG = """\
{
'public_artifact_builders': {},
'configs': {
'rel_bot_1': ['rel'],
'rel_bot_2': ['rel', 'unknown_mixin'],
},
'buckets': {
'fake_bucket_a': {
'fake_builder_a': 'rel_bot_1',
'fake_builder_b': 'rel_bot_2',
},
},
'mixins': {
'rel': {
'gn_args': 'is_debug=false',
},
},
}
"""
TEST_UNKNOWN_NESTED_MIXIN_CONFIG = """\
{
'public_artifact_builders': {},
'configs': {
'rel_bot_1': ['rel', 'nested_mixin'],
'rel_bot_2': ['rel'],
},
'buckets': {
'fake_bucket_a': {
'fake_builder_a': 'rel_bot_1',
'fake_builder_b': 'rel_bot_2',
},
},
'mixins': {
'nested_mixin': {
'mixins': {
'unknown_mixin': {
'gn_args': 'proprietary_codecs=true',
},
},
},
'rel': {
'gn_args': 'is_debug=false',
},
},
}
"""
class UnitTest(unittest.TestCase):
def test_GetAllConfigsMaster(self):
configs = ast.literal_eval(mb_unittest.TEST_CONFIG)
all_configs = validation.GetAllConfigsMaster(configs['masters'])
self.assertEqual(all_configs['rel_bot'], 'fake_master')
self.assertEqual(all_configs['debug_goma'], 'fake_master')
def test_GetAllConfigsBucket(self):
configs = ast.literal_eval(mb_unittest.TEST_CONFIG_BUCKET)
all_configs = validation.GetAllConfigsBucket(configs['buckets'])
self.assertEqual(all_configs['rel_bot'], 'ci')
self.assertEqual(all_configs['debug_goma'], 'ci')
def test_CheckAllConfigsAndMixinsReferenced_ok(self):
configs = ast.literal_eval(mb_unittest.TEST_CONFIG_BUCKET)
errs = []
all_configs = validation.GetAllConfigsBucket(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertEqual(errs, [])
def test_CheckAllConfigsAndMixinsReferenced_unreferenced(self):
configs = ast.literal_eval(TEST_UNREFERENCED_MIXIN_CONFIG)
errs = []
all_configs = validation.GetAllConfigsMaster(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertIn('Unreferenced mixin "unreferenced_mixin".', errs)
def test_CheckAllConfigsAndMixinsReferenced_unknown(self):
configs = ast.literal_eval(TEST_UNKNOWNMIXIN_CONFIG)
errs = []
all_configs = validation.GetAllConfigsMaster(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertIn(
'Unknown mixin "unknown_mixin" '
'referenced by config "rel_bot_2".', errs)
def test_CheckAllConfigsAndMixinsReferenced_unknown_nested(self):
configs = ast.literal_eval(TEST_UNKNOWN_NESTED_MIXIN_CONFIG)
errs = []
all_configs = validation.GetAllConfigsMaster(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertIn(
'Unknown mixin "unknown_mixin" '
'referenced by mixin "nested_mixin".', errs)
def test_CheckAllConfigsAndMixinsReferenced_unused(self):
configs = ast.literal_eval(TEST_UNKNOWN_NESTED_MIXIN_CONFIG)
errs = []
all_configs = validation.GetAllConfigsMaster(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertIn(
'Unknown mixin "unknown_mixin" '
'referenced by mixin "nested_mixin".', errs)
def test_EnsureNoProprietaryMixinsBucket(self):
bad_configs = ast.literal_eval(mb_unittest.TEST_BAD_CONFIG_BUCKET)
errs = []
default_config = 'fake_config_file'
config_file = 'fake_config_file'
public_artifact_builders = bad_configs['public_artifact_builders']
buckets = bad_configs['buckets']
mixins = bad_configs['mixins']
config_configs = bad_configs['configs']
validation.EnsureNoProprietaryMixinsBucket(
errs, default_config, config_file, public_artifact_builders, buckets,
config_configs, mixins)
self.assertIn(
'Public artifact builder "fake_builder_a" '
'can not contain the "chrome_with_codecs" mixin.', errs)
self.assertIn(
'Public artifact builder "fake_builder_b" '
'can not contain the "chrome_with_codecs" mixin.', errs)
self.assertEqual(len(errs), 2)
def test_EnsureNoProprietaryMixinsMaster(self):
bad_configs = ast.literal_eval(mb_unittest.TEST_BAD_CONFIG)
errs = []
default_config = 'fake_config_file'
config_file = 'fake_config_file'
buckets = bad_configs['masters']
mixins = bad_configs['mixins']
config_configs = bad_configs['configs']
validation.EnsureNoProprietaryMixinsMaster(
errs, default_config, config_file, buckets, config_configs, mixins)
self.assertIn(
'Public artifact builder "a" '
'can not contain the "chrome_with_codecs" mixin.', errs)
self.assertIn(
'Public artifact builder "b" '
'can not contain the "chrome_with_codecs" mixin.', errs)
self.assertEqual(len(errs), 2)
def test_CheckDuplicateConfigs_ok(self):
configs = ast.literal_eval(mb_unittest.TEST_CONFIG_BUCKET)
config_configs = configs['configs']
mixins = configs['mixins']
grouping = configs['buckets']
errs = []
validation.CheckDuplicateConfigs(errs, config_configs, mixins, grouping,
mb.FlattenConfig)
self.assertEqual(errs, [])
@unittest.skip('bla')
def test_CheckDuplicateConfigs_dups(self):
configs = ast.literal_eval(mb_unittest.TEST_DUP_CONFIG_BUCKET)
config_configs = configs['configs']
mixins = configs['mixins']
grouping = configs['buckets']
errs = []
validation.CheckDuplicateConfigs(errs, config_configs, mixins, grouping,
mb.FlattenConfig)
self.assertIn(
'Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: \'some_config\', '
'\'some_other_config\'. Please consolidate these configs '
'into only one unique name per configuration value.', errs)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment