Commit 17def0f2 authored by Andrew Grieve's avatar Andrew Grieve Committed by Commit Bot

Reland "tools/binary_size/milestone_apk_sizes.py: Automating size reports"

This reverts commit 8f06819d.

Reason for revert: Fixed missing file.close()

The new script makes it easy to measure a set of archived .apks.
Add to resource_sizes.py the ability to output to stdout and fixes
up some warning messages it was showing.

Bug: 988127
Change-Id: Ifed112d594ba53073167627e01a446f85b6233c2
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1724444Reviewed-by: default avatarEric Stevenson <estevenson@chromium.org>
Reviewed-by: default avataroysteine <oysteine@chromium.org>
Commit-Queue: Andrew Grieve <agrieve@chromium.org>
Cr-Commit-Position: refs/heads/master@{#681852}
parent ea058d5b
......@@ -8,8 +8,6 @@
More information at //docs/speed/binary_size/metrics.md.
"""
from __future__ import print_function
import argparse
import collections
from contextlib import contextmanager
......@@ -82,7 +80,7 @@ _READELF_SIZES_METRICS = {
'.dynsym', '.dynstr', '.dynamic', '.shstrtab', '.got', '.plt',
'.got.plt', '.hash', '.gnu.hash'
],
'bss': ['.bss'],
'bss': ['.bss', '.bss.rel.ro'],
'other': [
'.init_array', '.preinit_array', '.ctors', '.fini_array', '.comment',
'.note.gnu.gold-version', '.note.crashpad.info', '.note.android.ident',
......@@ -114,7 +112,7 @@ def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix):
# Group any unknown section headers into the "other" group.
for section_header, section_size in section_sizes.iteritems():
print('Unknown elf section header: %s' % section_header)
sys.stderr.write('Unknown elf section header: %s\n' % section_header)
grouped_section_sizes['other'] += section_size
return grouped_section_sizes
......@@ -477,7 +475,7 @@ def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func):
# It will be -Inf for .apk files with multiple .arsc files and no out_dir set.
if normalized_apk_size < 0:
print('Skipping normalized_apk_size because no output directory was set.')
sys.stderr.write('Skipping normalized_apk_size (no output directory set)\n')
else:
report_func('Specifics', 'normalized apk size', normalized_apk_size,
'bytes')
......@@ -604,10 +602,19 @@ def ResourceSizes(args):
raise Exception('Unknown file type: ' + args.input)
if chartjson:
results_path = os.path.join(args.output_dir, 'results-chart.json')
logging.critical('Dumping chartjson to %s', results_path)
with open(results_path, 'w') as json_file:
json.dump(chartjson, json_file)
if args.output_file == '-':
json_file = sys.stdout
elif args.output_file:
json_file = open(args.output_file, 'w')
else:
results_path = os.path.join(args.output_dir, 'results-chart.json')
logging.critical('Dumping chartjson to %s', results_path)
json_file = open(results_path, 'w')
json.dump(chartjson, json_file, indent=2)
if json_file is not sys.stdout:
json_file.close()
# We would ideally generate a histogram set directly instead of generating
# chartjson then converting. However, perf_tests_results_helper is in
......@@ -681,6 +688,10 @@ def main():
output_group.add_argument(
'--output-dir', default='.', help='Directory to save chartjson to.')
output_group.add_argument(
'--output-file',
help='Path to output .json (replaces --output-dir). Works only for '
'--output-format=chartjson')
output_group.add_argument(
'--isolated-script-test-output',
type=os.path.realpath,
......
......@@ -37,6 +37,11 @@ import sys
import subprocess
import tempfile
_DIR_SOURCE_ROOT = os.path.normpath(
os.path.join(os.path.dirname(__file__), '..', '..'))
_GSUTIL = os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'depot_tools',
'gsutil.py')
_PUSH_URL = 'gs://chrome-supersize/milestones/'
_DESIRED_CPUS = ['arm', 'arm_64']
......@@ -163,7 +168,7 @@ def _DownloadOneSizeFile(arg_tuples):
src = '{}/{}'.format(base_url, subpath)
dest = os.path.join(temp_dir, subpath)
_MakeDirectory(os.path.dirname(dest))
subprocess.check_call(['gsutil.py', '-q', 'cp', src, dest])
subprocess.check_call([_GSUTIL, '-q', 'cp', src, dest])
@contextlib.contextmanager
......@@ -184,8 +189,7 @@ def _DownloadSizeFiles(base_url, reports):
def _FetchExistingMilestoneReports():
milestones = subprocess.check_output(
['gsutil.py', 'ls', '-R', _PUSH_URL + '*'])
milestones = subprocess.check_output([_GSUTIL, 'ls', '-R', _PUSH_URL + '*'])
for path in milestones.splitlines()[1:]:
report = Report.FromUrl(path)
if report:
......@@ -288,8 +292,15 @@ def main():
logging.warning('Reports saved to %s', args.directory)
cmd = [
'gsutil.py', '-m', 'rsync', '-J', '-a', 'public-read', '-r',
args.directory, _PUSH_URL,
_GSUTIL,
'-m',
'rsync',
'-J',
'-a',
'public-read',
'-r',
args.directory,
_PUSH_URL,
]
if args.sync:
......
#!/usr/bin/env python
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints the large commits given a .csv file from a telemetry size graph."""
import argparse
import collections
import csv
import json
import os
import posixpath
import logging
import multiprocessing.dummy
import subprocess
import sys
import tempfile
import zipfile
_DIR_SOURCE_ROOT = os.path.normpath(
os.path.join(os.path.dirname(__file__), '..', '..'))
sys.path.insert(1, os.path.join(_DIR_SOURCE_ROOT, 'build', 'android', 'gyp'))
from util import zipalign
zipalign.ApplyZipFileZipAlignFix()
_GSUTIL = os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'depot_tools',
'gsutil.py')
_RESOURCE_SIZES = os.path.join(_DIR_SOURCE_ROOT, 'build', 'android',
'resource_sizes.py')
class _Artifact(object):
def __init__(self, signed_prefix, name):
self.name = name
self._gs_url = posixpath.join(signed_prefix, name)
self._temp = tempfile.NamedTemporaryFile(suffix=posixpath.basename(name))
self._path = self._temp.name
self._resource_sizes_json = None
def FetchAndMeasure(self):
args = [_GSUTIL, 'cp', self._gs_url, self._path]
logging.warning(' '.join(args))
subprocess.check_call(args)
args = [
_RESOURCE_SIZES,
'--output-format',
'chartjson',
'--output-file',
'-',
self._path,
]
logging.warning(' '.join(args))
self._resource_sizes_json = json.loads(subprocess.check_output(args))
def AddSize(self, metrics):
metrics[self.name] = self._resource_sizes_json['charts']['InstallSize'][
'APK size']['value']
def AddAndroidGoSize(self, metrics):
metrics[self.name + ' (Android Go)'] = self._resource_sizes_json['charts'][
'InstallSize']['Estimated installed size (Android Go)']['value']
def AddMethodCount(self, metrics):
metrics[self.name + ' (method count)'] = self._resource_sizes_json[
'charts']['Dex']['unique methods']['value']
def PrintLibraryCompression(self):
with zipfile.ZipFile(self._path) as z:
for info in z.infolist():
if info.filename.endswith('.so'):
sys.stdout.write('{}/{} compressed: {} uncompressed: {}\n'.format(
self.name, posixpath.basename(info.filename), info.compress_size,
info.file_size))
def _DownloadAndAnalyze(signed_prefix):
artifacts = []
def make_artifact(name):
artifacts.append(_Artifact(signed_prefix, name))
return artifacts[-1]
chrome = make_artifact('arm/ChromeStable.apk')
webview = make_artifact('arm/AndroidWebview.apk')
webview64 = make_artifact('arm_64/AndroidWebview.apk')
chrome_modern = make_artifact('arm/ChromeModernStable.apks')
chrome_modern64 = make_artifact('arm_64/ChromeModernStable.apks')
monochrome = make_artifact('arm/MonochromeStable.apks')
monochrome64 = make_artifact('arm_64/MonochromeStable.apks')
# Download in parallel.
pool = multiprocessing.dummy.Pool()
pool.map(_Artifact.FetchAndMeasure, artifacts)
pool.close()
# Add metrics in the order that we want them in the .csv output.
metrics = collections.OrderedDict()
chrome.AddSize(metrics)
chrome_modern.AddSize(metrics)
chrome_modern64.AddSize(metrics)
monochrome.AddSize(metrics)
monochrome64.AddSize(metrics)
webview.AddSize(metrics)
webview64.AddSize(metrics)
monochrome.AddAndroidGoSize(metrics)
chrome.AddMethodCount(metrics)
monochrome.AddMethodCount(metrics)
csv_writer = csv.DictWriter(
sys.stdout, fieldnames=metrics.keys(), delimiter='\t')
csv_writer.writeheader()
csv_writer.writerow(metrics)
webview.PrintLibraryCompression()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--version', required=True, help='e.g.: "75.0.3770.143"')
parser.add_argument(
'--signed-bucket',
required=True,
help='GCS bucket to find files in. (e.g. "gs://bucket/subdir")')
options = parser.parse_args()
signed_prefix = posixpath.join(options.signed_bucket, options.version)
_DownloadAndAnalyze(signed_prefix)
if __name__ == '__main__':
main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment