Commit 1492aeb4 authored by Greg Guterman's avatar Greg Guterman Committed by Commit Bot

Allow for bucket-based mb_config and add default

I've joined the current mb_config.pyl
with /src/cr-buildbucket.cfg to remap masters to buckets for a new
bucket-based mb_config_buckets.pyl.
Also the old logic that used the 'master: { chromium: {...}}' dict to
specify the chromium builders that blacklist chrome proprietary mixins
now looks at a top-level config key in mb_config_buckets.pyl
called 'public_artifact_builders'.

mb.py has been modified to support the new type of mapping in
mb_config_buckets.pyl and will use it by default if a bucket is specified.
I hope to move all the code that uses mb to use buckets instead of masters,
at which point I plan to remove all the master-related functions from mb.py
The new functions have been written in a way that makes it easy to remove
the old master-based functions at the cost of being less DRY.
Validation stuff was factored out to a helper file to lighten the load
inside of mb.py.
Also there seems to be a dead function which I removed: CheckCompile.

This pyl also excludes all the internal builders, which will be
moved to src-internal.

Bug: 1028719
Change-Id: I86b55b12def06afab81faec66463963ede2f3244
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1963441
Commit-Queue: Gregory Guterman <guterman@google.com>
Reviewed-by: default avatarAaron Gable <agable@chromium.org>
Reviewed-by: default avatarStephen Martinis <martiniss@chromium.org>
Cr-Commit-Position: refs/heads/master@{#731713}
parent 0d1119fb
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Validation functions for the Meta-Build config file"""
import collections
def GetAllConfigsMaster(masters):
"""Build a list of all of the configs referenced by builders.
Deprecated in favor or GetAllConfigsBucket
"""
all_configs = {}
for master in masters:
for config in masters[master].values():
if isinstance(config, dict):
for c in config.values():
all_configs[c] = master
else:
all_configs[config] = master
return all_configs
def GetAllConfigsBucket(buckets):
"""Build a list of all of the configs referenced by builders."""
all_configs = {}
for bucket in buckets:
for config in buckets[bucket].values():
if isinstance(config, dict):
for c in config.values():
all_configs[c] = bucket
else:
all_configs[config] = bucket
return all_configs
def CheckAllConfigsAndMixinsReferenced(errs, all_configs, configs, mixins):
"""Check that every actual config is actually referenced."""
for config in configs:
if not config in all_configs:
errs.append('Unused config "%s".' % config)
# Figure out the whole list of mixins, and check that every mixin
# listed by a config or another mixin actually exists.
referenced_mixins = set()
for config, mixin_names in configs.items():
for mixin in mixin_names:
if not mixin in mixins:
errs.append(
'Unknown mixin "%s" referenced by config "%s".' % (mixin, config))
referenced_mixins.add(mixin)
for mixin in mixins:
for sub_mixin in mixins[mixin].get('mixins', []):
if not sub_mixin in mixins:
errs.append(
'Unknown mixin "%s" referenced by mixin "%s".' % (sub_mixin, mixin))
referenced_mixins.add(sub_mixin)
# Check that every mixin defined is actually referenced somewhere.
for mixin in mixins:
if not mixin in referenced_mixins:
errs.append('Unreferenced mixin "%s".' % mixin)
return errs
def EnsureNoProprietaryMixinsBucket(errs, default_config, config_file,
public_artifact_builders, buckets, configs,
mixins):
"""Check that the 'chromium' bots which build public artifacts
do not include the chrome_with_codecs mixin.
"""
if config_file != default_config:
return
if public_artifact_builders is None:
errs.append('Missing "public_artifact_builders" config entry. '
'Please update this proprietary codecs check with the '
'name of the builders responsible for public build artifacts.')
return
# crbug/1033585
for bucket, builders in public_artifact_builders.items():
for builder in builders:
config = buckets[bucket][builder]
def RecurseMixins(builder, current_mixin):
if current_mixin == 'chrome_with_codecs':
errs.append('Public artifact builder "%s" can not contain the '
'"chrome_with_codecs" mixin.' % builder)
return
if not 'mixins' in mixins[current_mixin]:
return
for mixin in mixins[current_mixin]['mixins']:
RecurseMixins(builder, mixin)
for mixin in configs[config]:
RecurseMixins(builder, mixin)
return errs
def EnsureNoProprietaryMixinsMaster(errs, default_config, config_file, masters,
configs, mixins):
"""If we're checking the Chromium config, check that the 'chromium' bots
which build public artifacts do not include the chrome_with_codecs mixin.
Deprecated in favor of BlacklistMixinsBucket
"""
if config_file == default_config:
if 'chromium' in masters:
for builder in masters['chromium']:
config = masters['chromium'][builder]
def RecurseMixins(current_mixin):
if current_mixin == 'chrome_with_codecs':
errs.append('Public artifact builder "%s" can not contain the '
'"chrome_with_codecs" mixin.' % builder)
return
if not 'mixins' in mixins[current_mixin]:
return
for mixin in mixins[current_mixin]['mixins']:
RecurseMixins(mixin)
for mixin in configs[config]:
RecurseMixins(mixin)
else:
errs.append('Missing "chromium" master. Please update this '
'proprietary codecs check with the name of the master '
'responsible for public build artifacts.')
def CheckDuplicateConfigs(errs, config_pool, mixin_pool, grouping,
flatten_config):
"""Check for duplicate configs.
Evaluate all configs, and see if, when
evaluated, differently named configs are the same.
"""
evaled_to_source = collections.defaultdict(set)
for group, builders in grouping.items():
for builder in builders:
config = grouping[group][builder]
if not config:
continue
if isinstance(config, dict):
# Ignore for now
continue
elif config.startswith('//'):
args = config
else:
args = flatten_config(config_pool, mixin_pool, config)['gn_args']
if 'error' in args:
continue
evaled_to_source[args].add(config)
for v in evaled_to_source.values():
if len(v) != 1:
errs.append(
'Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: %s. Please '
'consolidate these configs into only one unique name per '
'configuration value.' % (', '.join(sorted('%r' % val for val in v))))
This diff is collapsed.
This source diff could not be displayed because it is too large. You can view the blob instead.
#!/usr/bin/python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for mb.py."""
from __future__ import print_function
from __future__ import absolute_import
import json
import os
......@@ -14,7 +15,10 @@ import StringIO
import sys
import unittest
import mb
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '..'))
from mb import mb
class FakeMBW(mb.MetaBuildWrapper):
......@@ -24,7 +28,9 @@ class FakeMBW(mb.MetaBuildWrapper):
# Override vars for test portability.
if win32:
self.chromium_src_dir = 'c:\\fake_src'
self.default_config = 'c:\\fake_src\\tools\\mb\\mb_config.pyl'
self.default_config_master = 'c:\\fake_src\\tools\\mb\\mb_config.pyl'
self.default_config_bucket = 'c:\\fake_src\\tools\\mb\\mb_config_bucket.pyl' # pylint: disable=line-too-long
self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\'
'gn_isolate_map.pyl')
self.platform = 'win32'
......@@ -33,7 +39,8 @@ class FakeMBW(mb.MetaBuildWrapper):
self.cwd = 'c:\\fake_src\\out\\Default'
else:
self.chromium_src_dir = '/fake_src'
self.default_config = '/fake_src/tools/mb/mb_config.pyl'
self.default_config_master = '/fake_src/tools/mb/mb_config.pyl'
self.default_config_bucket = '/fake_src/tools/mb/mb_config_bucket.pyl'
self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl'
self.executable = '/usr/bin/python'
self.platform = 'linux2'
......@@ -119,7 +126,7 @@ class FakeFile(object):
self.buf += contents
def close(self):
self.files[self.name] = self.buf
self.files[self.name] = self.buf
TEST_CONFIG = """\
......@@ -172,6 +179,55 @@ TEST_CONFIG = """\
}
"""
TEST_CONFIG_BUCKET = """\
{
'public_artifact_builders': {},
'buckets': {
'ci': {
'fake_builder': 'rel_bot',
'fake_debug_builder': 'debug_goma',
'fake_simplechrome_builder': 'cros_chrome_sdk',
'fake_args_bot': '//build/args/bots/fake_master/fake_args_bot.gn',
'fake_multi_phase': { 'phase_1': 'phase_1', 'phase_2': 'phase_2'},
'fake_args_file': 'args_file_goma',
}
},
'configs': {
'args_file_goma': ['args_file', 'goma'],
'cros_chrome_sdk': ['cros_chrome_sdk'],
'rel_bot': ['rel', 'goma', 'fake_feature1'],
'debug_goma': ['debug', 'goma'],
'phase_1': ['phase_1'],
'phase_2': ['phase_2'],
},
'mixins': {
'cros_chrome_sdk': {
'cros_passthrough': True,
},
'fake_feature1': {
'gn_args': 'enable_doom_melon=true',
},
'goma': {
'gn_args': 'use_goma=true',
},
'args_file': {
'args_file': '//build/args/fake.gn',
},
'phase_1': {
'gn_args': 'phase=1',
},
'phase_2': {
'gn_args': 'phase=2',
},
'rel': {
'gn_args': 'is_debug=false',
},
'debug': {
'gn_args': 'is_debug=true',
},
},
}
"""
TEST_BAD_CONFIG = """\
{
......@@ -199,6 +255,35 @@ TEST_BAD_CONFIG = """\
}
"""
TEST_BAD_CONFIG_BUCKET = """\
{
'public_artifact_builders': {
'fake_bucket_a': ['fake_builder_a', 'fake_builder_b'],
},
'configs': {
'rel_bot_1': ['rel', 'chrome_with_codecs'],
'rel_bot_2': ['rel', 'bad_nested_config'],
},
'buckets': {
'fake_bucket_a': {
'fake_builder_a': 'rel_bot_1',
'fake_builder_b': 'rel_bot_2',
},
},
'mixins': {
'chrome_with_codecs': {
'gn_args': 'proprietary_codecs=true',
},
'bad_nested_config': {
'mixins': ['chrome_with_codecs'],
},
'rel': {
'gn_args': 'is_debug=false',
},
},
}
"""
TEST_ARGS_FILE_TWICE_CONFIG = """\
{
......@@ -220,6 +305,26 @@ TEST_ARGS_FILE_TWICE_CONFIG = """\
"""
TEST_ARGS_FILE_TWICE_CONFIG_BUCKET = """\
{
'public_artifact_builders': {},
'buckets': {
'chromium': {},
'fake_bucket': {
'fake_args_file_twice': 'args_file_twice',
},
},
'configs': {
'args_file_twice': ['args_file', 'args_file'],
},
'mixins': {
'args_file': {
'args_file': '//build/args/fake.gn',
},
},
}
"""
TEST_DUP_CONFIG = """\
{
'masters': {
......@@ -241,6 +346,27 @@ TEST_DUP_CONFIG = """\
}
"""
TEST_DUP_CONFIG_BUCKET = """\
{
'public_artifact_builders': {},
'buckets': {
'ci': {},
'fake_bucket': {
'fake_builder': 'some_config',
'other_builder': 'some_other_config',
},
},
'configs': {
'some_config': ['args_file'],
'some_other_config': ['args_file'],
},
'mixins': {
'args_file': {
'args_file': '//build/args/fake.gn',
},
},
}
"""
TRYSERVER_CONFIG = """\
{
......@@ -264,7 +390,8 @@ TRYSERVER_CONFIG = """\
class UnitTest(unittest.TestCase):
def fake_mbw(self, files=None, win32=False):
mbw = FakeMBW(win32=win32)
mbw.files.setdefault(mbw.default_config, TEST_CONFIG)
mbw.files.setdefault(mbw.default_config_master, TEST_CONFIG)
mbw.files.setdefault(mbw.default_config_bucket, TEST_CONFIG_BUCKET)
mbw.files.setdefault(
mbw.ToAbsPath('//testing/buildbot/gn_isolate_map.pyl'),
'''{
......@@ -442,9 +569,18 @@ class UnitTest(unittest.TestCase):
('import("//build/args/fake.gn")\n'
'use_goma = true\n'))
def test_gen_args_file_twice_bucket(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config_bucket] = TEST_ARGS_FILE_TWICE_CONFIG_BUCKET
self.check([
'gen', '-u', 'fake_bucket', '-b', 'fake_args_file_twice', '//out/Debug'
],
mbw=mbw,
ret=1)
def test_gen_args_file_twice(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config] = TEST_ARGS_FILE_TWICE_CONFIG
mbw.files[mbw.default_config_master] = TEST_ARGS_FILE_TWICE_CONFIG
self.check(['gen', '-m', 'fake_master', '-b', 'fake_args_file_twice',
'//out/Debug'], mbw=mbw, ret=1)
......@@ -793,18 +929,44 @@ class UnitTest(unittest.TestCase):
'enable_doom_melon = true\n'
'use_goma = true\n'))
def test_recursive_lookup_bucket(self):
files = {
'/fake_src/build/args/fake.gn': ('enable_doom_melon = true\n'
'enable_antidoom_banana = true\n')
}
self.check(['lookup', '-u', 'ci', '-b', 'fake_args_file', '--recursive'],
files=files,
ret=0,
out=('enable_antidoom_banana = true\n'
'enable_doom_melon = true\n'
'use_goma = true\n'))
def test_validate(self):
mbw = self.fake_mbw()
self.check(['validate'], mbw=mbw, ret=0)
def test_bad_validate(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config] = TEST_BAD_CONFIG
self.check(['validate'], mbw=mbw, ret=1)
mbw.files[mbw.default_config_master] = TEST_BAD_CONFIG
self.check(['validate', '-f', mbw.default_config_master], mbw=mbw, ret=1)
def test_bad_validate_bucket(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config_bucket] = TEST_BAD_CONFIG_BUCKET
self.check(['validate', '-f', mbw.default_config_bucket], mbw=mbw, ret=1)
def test_duplicate_validate(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config] = TEST_DUP_CONFIG
mbw.files[mbw.default_config_master] = TEST_DUP_CONFIG
self.check(['validate'], mbw=mbw, ret=1)
self.assertIn(
'Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: \'some_config\', '
'\'some_other_config\'.', mbw.out)
def test_duplicate_validate_bucket(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config_bucket] = TEST_DUP_CONFIG_BUCKET
self.check(['validate'], mbw=mbw, ret=1)
self.assertIn('Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: \'some_config\', '
......
#!/usr/bin/python
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for mb_validate.py."""
from __future__ import print_function
from __future__ import absolute_import
import sys
import ast
import os
import unittest
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.abspath(__file__)), '..'))
from mb import mb
from mb import mb_unittest
from mb.lib import validation
TEST_UNREFERENCED_MIXIN_CONFIG = """\
{
'public_artifact_builders': {},
'configs': {
'rel_bot_1': ['rel'],
'rel_bot_2': ['rel'],
},
'buckets': {
'fake_bucket_a': {
'fake_builder_a': 'rel_bot_1',
'fake_builder_b': 'rel_bot_2',
},
},
'mixins': {
'unreferenced_mixin': {
'gn_args': 'proprietary_codecs=true',
},
'rel': {
'gn_args': 'is_debug=false',
},
},
}
"""
TEST_UNKNOWNMIXIN_CONFIG = """\
{
'public_artifact_builders': {},
'configs': {
'rel_bot_1': ['rel'],
'rel_bot_2': ['rel', 'unknown_mixin'],
},
'buckets': {
'fake_bucket_a': {
'fake_builder_a': 'rel_bot_1',
'fake_builder_b': 'rel_bot_2',
},
},
'mixins': {
'rel': {
'gn_args': 'is_debug=false',
},
},
}
"""
TEST_UNKNOWN_NESTED_MIXIN_CONFIG = """\
{
'public_artifact_builders': {},
'configs': {
'rel_bot_1': ['rel', 'nested_mixin'],
'rel_bot_2': ['rel'],
},
'buckets': {
'fake_bucket_a': {
'fake_builder_a': 'rel_bot_1',
'fake_builder_b': 'rel_bot_2',
},
},
'mixins': {
'nested_mixin': {
'mixins': {
'unknown_mixin': {
'gn_args': 'proprietary_codecs=true',
},
},
},
'rel': {
'gn_args': 'is_debug=false',
},
},
}
"""
class UnitTest(unittest.TestCase):
def test_GetAllConfigsMaster(self):
configs = ast.literal_eval(mb_unittest.TEST_CONFIG)
all_configs = validation.GetAllConfigsMaster(configs['masters'])
self.assertEqual(all_configs['rel_bot'], 'fake_master')
self.assertEqual(all_configs['debug_goma'], 'fake_master')
def test_GetAllConfigsBucket(self):
configs = ast.literal_eval(mb_unittest.TEST_CONFIG_BUCKET)
all_configs = validation.GetAllConfigsBucket(configs['buckets'])
self.assertEqual(all_configs['rel_bot'], 'ci')
self.assertEqual(all_configs['debug_goma'], 'ci')
def test_CheckAllConfigsAndMixinsReferenced_ok(self):
configs = ast.literal_eval(mb_unittest.TEST_CONFIG_BUCKET)
errs = []
all_configs = validation.GetAllConfigsBucket(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertEqual(errs, [])
def test_CheckAllConfigsAndMixinsReferenced_unreferenced(self):
configs = ast.literal_eval(TEST_UNREFERENCED_MIXIN_CONFIG)
errs = []
all_configs = validation.GetAllConfigsMaster(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertIn('Unreferenced mixin "unreferenced_mixin".', errs)
def test_CheckAllConfigsAndMixinsReferenced_unknown(self):
configs = ast.literal_eval(TEST_UNKNOWNMIXIN_CONFIG)
errs = []
all_configs = validation.GetAllConfigsMaster(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertIn(
'Unknown mixin "unknown_mixin" '
'referenced by config "rel_bot_2".', errs)
def test_CheckAllConfigsAndMixinsReferenced_unknown_nested(self):
configs = ast.literal_eval(TEST_UNKNOWN_NESTED_MIXIN_CONFIG)
errs = []
all_configs = validation.GetAllConfigsMaster(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertIn(
'Unknown mixin "unknown_mixin" '
'referenced by mixin "nested_mixin".', errs)
def test_CheckAllConfigsAndMixinsReferenced_unused(self):
configs = ast.literal_eval(TEST_UNKNOWN_NESTED_MIXIN_CONFIG)
errs = []
all_configs = validation.GetAllConfigsMaster(configs['buckets'])
config_configs = configs['configs']
mixins = configs['mixins']
validation.CheckAllConfigsAndMixinsReferenced(errs, all_configs,
config_configs, mixins)
self.assertIn(
'Unknown mixin "unknown_mixin" '
'referenced by mixin "nested_mixin".', errs)
def test_EnsureNoProprietaryMixinsBucket(self):
bad_configs = ast.literal_eval(mb_unittest.TEST_BAD_CONFIG_BUCKET)
errs = []
default_config = 'fake_config_file'
config_file = 'fake_config_file'
public_artifact_builders = bad_configs['public_artifact_builders']
buckets = bad_configs['buckets']
mixins = bad_configs['mixins']
config_configs = bad_configs['configs']
validation.EnsureNoProprietaryMixinsBucket(
errs, default_config, config_file, public_artifact_builders, buckets,
config_configs, mixins)
self.assertIn(
'Public artifact builder "fake_builder_a" '
'can not contain the "chrome_with_codecs" mixin.', errs)
self.assertIn(
'Public artifact builder "fake_builder_b" '
'can not contain the "chrome_with_codecs" mixin.', errs)
self.assertEqual(len(errs), 2)
def test_EnsureNoProprietaryMixinsMaster(self):
bad_configs = ast.literal_eval(mb_unittest.TEST_BAD_CONFIG)
errs = []
default_config = 'fake_config_file'
config_file = 'fake_config_file'
buckets = bad_configs['masters']
mixins = bad_configs['mixins']
config_configs = bad_configs['configs']
validation.EnsureNoProprietaryMixinsMaster(
errs, default_config, config_file, buckets, config_configs, mixins)
self.assertIn(
'Public artifact builder "a" '
'can not contain the "chrome_with_codecs" mixin.', errs)
self.assertIn(
'Public artifact builder "b" '
'can not contain the "chrome_with_codecs" mixin.', errs)
self.assertEqual(len(errs), 2)
def test_CheckDuplicateConfigs_ok(self):
configs = ast.literal_eval(mb_unittest.TEST_CONFIG_BUCKET)
config_configs = configs['configs']
mixins = configs['mixins']
grouping = configs['buckets']
errs = []
validation.CheckDuplicateConfigs(errs, config_configs, mixins, grouping,
mb.FlattenConfig)
self.assertEqual(errs, [])
@unittest.skip('bla')
def test_CheckDuplicateConfigs_dups(self):
configs = ast.literal_eval(mb_unittest.TEST_DUP_CONFIG_BUCKET)
config_configs = configs['configs']
mixins = configs['mixins']
grouping = configs['buckets']
errs = []
validation.CheckDuplicateConfigs(errs, config_configs, mixins, grouping,
mb.FlattenConfig)
self.assertIn(
'Duplicate configs detected. When evaluated fully, the '
'following configs are all equivalent: \'some_config\', '
'\'some_other_config\'. Please consolidate these configs '
'into only one unique name per configuration value.', errs)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment