Commit 927cbe8f authored by Junji Watanabe's avatar Junji Watanabe Committed by Chromium LUCI CQ

[tools] Use CAS instead of Isolate server in tools/run-swarmed.py

Tested the script manually.
https://chromium-swarm.appspot.com/task?d=true&id=510e82c2bbfccc10

Bug: 1163817
Change-Id: I0724c51da3c61df645c8ea070180af1d4e3c8d57
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2623734
Auto-Submit: Junji Watanabe <jwata@google.com>
Reviewed-by: default avatarTakuto Ikuta <tikuta@chromium.org>
Reviewed-by: default avatarDirk Pranke <dpranke@google.com>
Commit-Queue: Dirk Pranke <dpranke@google.com>
Cr-Commit-Position: refs/heads/master@{#842696}
parent 9abf3025
......@@ -41,26 +41,24 @@ def _Spawn(args):
"""Triggers a swarming job. The arguments passed are:
- The index of the job;
- The command line arguments object;
- The hash of the isolate job used to trigger.
- The digest of test files.
The return value is passed to a collect-style map() and consists of:
- The index of the job;
- The json file created by triggering and used to collect results;
- The command line arguments object.
"""
index, args, isolated_hash, swarming_command = args
index, args, cas_digest, swarming_command = args
json_file = os.path.join(args.results, '%d.json' % index)
trigger_args = [
'tools/luci-go/swarming',
'trigger',
'-S',
'https://chromium-swarm.appspot.com',
'-I',
'https://isolateserver.appspot.com',
'-d',
'pool=' + args.pool,
'-s',
isolated_hash,
'-digest',
cas_digest,
'-dump-json',
json_file,
'-d',
......@@ -244,14 +242,14 @@ def main():
)
print('Uploading to isolate server, this can take a while...')
isolated = os.path.join(args.out_dir, args.target_name + '.isolated')
isolate = os.path.join(args.out_dir, args.target_name + '.isolate')
digest_json = os.path.join(args.out_dir, args.target_name + '.digest.json')
subprocess.check_output([
'tools/luci-go/isolate', 'archive', '-I',
'https://isolateserver.appspot.com', '-i',
os.path.join(args.out_dir, args.target_name + '.isolate'), '-s', isolated
'tools/luci-go/isolate', 'archive', '-cas-instance', 'chromium-swarm',
'-isolate', isolate, '-dump-json', digest_json
])
with open(isolated) as f:
isolated_hash = hashlib.sha1(f.read()).hexdigest()
with open(digest_json) as f:
cas_digest = json.load(f).get(args.target_name)
mb_cmd = [
sys.executable, 'tools/mb/mb.py', 'get-swarming-command', '--as-list'
......@@ -272,7 +270,7 @@ def main():
# Use dummy since threadpools give better exception messages
# than process pools do, and threads work fine for what we're doing.
pool = multiprocessing.dummy.Pool()
spawn_args = map(lambda i: (i, args, isolated_hash, swarming_cmd),
spawn_args = map(lambda i: (i, args, cas_digest, swarming_cmd),
range(args.copies))
spawn_results = pool.imap_unordered(_Spawn, spawn_args)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment