Commit 2381b834 authored by Andrew Grieve's avatar Andrew Grieve Committed by Commit Bot

SuperSize: Add .sizediff support to console command

Useful for downloading .sizediff from trybots and inspecting locally.

Includes minor refactoring of file_format.py

Bug: None
Change-Id: I7cf9f8a5829e0eaab6210b632713508ce279ba03
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2422318
Commit-Queue: Andrew Grieve <agrieve@chromium.org>
Reviewed-by: default avatarSamuel Huang <huangs@chromium.org>
Cr-Commit-Position: refs/heads/master@{#809172}
parent 665ad94e
......@@ -727,6 +727,19 @@ def LoadAndPostProcessSizeInfo(path, file_obj=None):
return size_info
def LoadAndPostProcessDeltaSizeInfo(path, file_obj=None):
"""Returns a tuple of SizeInfos for the given |path|."""
logging.debug('Loading results from: %s', path)
before_size_info, after_size_info = file_format.LoadDeltaSizeInfo(
path, file_obj=file_obj)
logging.info('Normalizing symbol names')
_NormalizeNames(before_size_info.raw_symbols)
_NormalizeNames(after_size_info.symbols)
logging.info('Loaded %d + %d symbols', len(before_size_info.raw_symbols),
len(after_size_info.raw_symbols))
return before_size_info, after_size_info
def _CollectModuleSizes(minimal_apks_path):
sizes_by_module = collections.defaultdict(int)
with zipfile.ZipFile(minimal_apks_path) as z:
......
......@@ -486,11 +486,17 @@ def AddArguments(parser):
def Run(args, on_config_error):
# Up-front check for faster error-checking.
for path in args.inputs:
if not path.endswith('.size'):
on_config_error('All inputs must end with ".size"')
if not path.endswith('.size') and not path.endswith('.sizediff'):
on_config_error('All inputs must end with ".size" or ".sizediff"')
size_infos = [archive.LoadAndPostProcessSizeInfo(p) for p in args.inputs]
size_infos = []
for path in args.inputs:
if path.endswith('.sizediff'):
size_infos.extend(archive.LoadAndPostProcessDeltaSizeInfo(path))
else:
size_infos.append(archive.LoadAndPostProcessSizeInfo(path))
output_directory_finder = path_util.OutputDirectoryFinder(
value=args.output_directory,
any_path_within_output_directory=args.inputs[0])
......
......@@ -108,19 +108,21 @@ import itertools
import json
import logging
import os
import shutil
import sys
import models
import parallel
_COMMON_HEADER = b'# Created by //tools/binary_size\n'
# File format version for .size files.
_SERIALIZATION_VERSION_SINGLE_CONTAINER = 'Size File Format v1'
_SERIALIZATION_VERSION_MULTI_CONTAINER = 'Size File Format v1.1'
_SIZE_HEADER_SINGLE_CONTAINER = b'Size File Format v1\n'
_SIZE_HEADER_MULTI_CONTAINER = b'Size File Format v1.1\n'
# Header for .sizediff files
_SIZEDIFF_HEADER = '# Created by //tools/binary_size\nDIFF\n'
_SIZEDIFF_HEADER = b'DIFF\n'
_SIZEDIFF_VERSION = 1
class _Writer:
......@@ -252,14 +254,11 @@ def _SaveSizeInfoToFile(size_info,
num_containers = len(size_info.containers)
has_multi_containers = (num_containers > 1)
w = _Writer(file_obj)
# "Created by SuperSize" header
w.WriteLine('# Created by //tools/binary_size')
file_obj.write(_COMMON_HEADER)
if has_multi_containers:
w.WriteLine(_SERIALIZATION_VERSION_MULTI_CONTAINER)
file_obj.write(_SIZE_HEADER_MULTI_CONTAINER)
else:
w.WriteLine(_SERIALIZATION_VERSION_SINGLE_CONTAINER)
file_obj.write(_SIZE_HEADER_SINGLE_CONTAINER)
# JSON header fields
fields = {
......@@ -283,6 +282,8 @@ def _SaveSizeInfoToFile(size_info,
fields['section_sizes'] = size_info.containers[0].section_sizes
fields_str = json.dumps(fields, indent=2, sort_keys=True)
w = _Writer(file_obj)
w.WriteLine(str(len(fields_str)))
w.WriteLine(fields_str)
w.LogSize('header') # For libchrome: 570 bytes.
......@@ -404,11 +405,12 @@ def _LoadSizeInfoFromFile(file_obj, size_path):
"""
# Split lines on '\n', since '\r' can appear in some lines!
lines = io.TextIOWrapper(file_obj, newline='\n')
_ReadLine(lines) # Line 0: "Created by SuperSize" header
actual_version = _ReadLine(lines)
if actual_version == _SERIALIZATION_VERSION_SINGLE_CONTAINER:
header_line = _ReadLine(lines).encode('ascii')
assert header_line == _COMMON_HEADER[:-1], 'was ' + str(header_line)
header_line = _ReadLine(lines).encode('ascii')
if header_line == _SIZE_HEADER_SINGLE_CONTAINER[:-1]:
has_multi_containers = False
elif actual_version == _SERIALIZATION_VERSION_MULTI_CONTAINER:
elif header_line == _SIZE_HEADER_MULTI_CONTAINER[:-1]:
has_multi_containers = True
else:
raise ValueError('Version mismatch. Need to write some upgrade code.')
......@@ -624,9 +626,8 @@ def SaveSizeInfo(size_info,
sparse_symbols=sparse_symbols)
logging.debug('Serialization complete. Gzipping...')
bytesio.seek(0)
with _OpenGzipForWrite(path, file_obj=file_obj) as f:
f.write(bytesio.read())
f.write(bytesio.getbuffer())
def LoadSizeInfo(filename, file_obj=None):
......@@ -664,23 +665,42 @@ def SaveDeltaSizeInfo(delta_size_info, path, file_obj=None):
with file_obj or open(path, 'wb') as output_file:
w = _Writer(output_file)
# |_SIZEDIFF_HEADER| is multi-line with new line at end, so use
# WriteString() instead of WriteLine().
w.WriteString(_SIZEDIFF_HEADER)
w.WriteBytes(_COMMON_HEADER + _SIZEDIFF_HEADER)
# JSON header fields
fields = {
'version': 1,
'version': _SIZEDIFF_VERSION,
'before_length': before_size_file.tell(),
}
fields_str = json.dumps(fields, indent=2, sort_keys=True)
w.WriteLine(str(len(fields_str)))
w.WriteLine(fields_str)
before_size_file.seek(0)
shutil.copyfileobj(before_size_file, output_file)
w.WriteBytes(before_size_file.getbuffer())
after_promise.get()
after_size_file.seek(0)
shutil.copyfileobj(after_size_file, output_file)
w.WriteBytes(after_size_file.getbuffer())
def LoadDeltaSizeInfo(filename, file_obj=None):
"""Returns a tuple of size infos (before, after).
To reconstruct the DeltaSizeInfo, diff the two size infos.
"""
with file_obj or open(filename, 'rb') as f:
combined_header = _COMMON_HEADER + _SIZEDIFF_HEADER
actual_header = f.read(len(combined_header))
if actual_header != combined_header:
raise Exception('Bad file header.')
json_len = int(f.readline())
json_str = f.read(json_len + 1) # + 1 for \n
fields = json.loads(json_str)
assert fields['version'] == _SIZEDIFF_VERSION
after_pos = f.tell() + fields['before_length']
before_size_info = LoadSizeInfo(filename, f)
f.seek(after_pos)
after_size_info = LoadSizeInfo(filename, f)
return before_size_info, after_size_info
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment