Commit ad7204d7 authored by Andrew Grieve's avatar Andrew Grieve Committed by Commit Bot

resource_sizes.py: Remove detailed pak entry analysis

//tools/binary_size/supersize now does a better job of analyzing these.

Change-Id: I8fdf607c6985388dbb174f45bbfe8321046a5cf6
Reviewed-on: https://chromium-review.googlesource.com/995953Reviewed-by: default avatarPeter Wen <wnwen@chromium.org>
Commit-Queue: agrieve <agrieve@chromium.org>
Cr-Commit-Position: refs/heads/master@{#548175}
parent 261c6064
......@@ -13,12 +13,10 @@ import collections
from contextlib import contextmanager
import json
import logging
import operator
import os
import re
import struct
import sys
import tempfile
import zipfile
import zlib
......@@ -32,17 +30,11 @@ from pylib import constants
from pylib.constants import host_paths
_AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt'))
_GRIT_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'tools', 'grit')
_BUILD_UTILS_PATH = os.path.join(
host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')
_APK_PATCH_SIZE_ESTIMATOR_PATH = os.path.join(
host_paths.DIR_SOURCE_ROOT, 'third_party', 'apk-patch-size-estimator')
# Prepend the grit module from the source tree so it takes precedence over other
# grit versions that might present in the search path.
with host_paths.SysPath(_GRIT_PATH, 0):
from grit.format import data_pack # pylint: disable=import-error
with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
import perf_tests_results_helper # pylint: disable=import-error
......@@ -542,118 +534,6 @@ def PrintApkAnalysis(apk_filename, tool_prefix, out_dir, chartjson=None):
print 'Unknown entry:', info.filename, info.compress_size
def IsPakFileName(file_name):
"""Returns whether the given file name ends with .pak or .lpak."""
return file_name.endswith('.pak') or file_name.endswith('.lpak')
def PrintPakAnalysis(apk_filename, min_pak_resource_size, out_dir):
"""Print sizes of all resources in all pak files in |apk_filename|."""
print
print 'Analyzing pak files in %s...' % apk_filename
# A structure for holding details about a pak file.
Pak = collections.namedtuple(
'Pak', ['filename', 'compress_size', 'file_size', 'resources'])
# Build a list of Pak objets for each pak file.
paks = []
apk = zipfile.ZipFile(apk_filename, 'r')
try:
for i in (x for x in apk.infolist() if IsPakFileName(x.filename)):
with tempfile.NamedTemporaryFile() as f:
f.write(apk.read(i.filename))
f.flush()
paks.append(Pak(i.filename, i.compress_size, i.file_size,
data_pack.ReadDataPack(f.name).resources))
finally:
apk.close()
# Output the overall pak file summary.
total_files = len(paks)
total_compress_size = sum(pak.compress_size for pak in paks)
total_file_size = sum(pak.file_size for pak in paks)
print 'Total pak files: %d' % total_files
print 'Total compressed size: %s' % _FormatBytes(total_compress_size)
print 'Total uncompressed size: %s' % _FormatBytes(total_file_size)
print
if not paks:
return
# Output the table of details about all pak files.
print '%25s%11s%21s%21s' % (
'FILENAME', 'RESOURCES', 'COMPRESSED SIZE', 'UNCOMPRESSED SIZE')
for pak in sorted(paks, key=operator.attrgetter('file_size'), reverse=True):
print '%25s %10s %12s %6.2f%% %12s %6.2f%%' % (
pak.filename,
len(pak.resources),
_FormatBytes(pak.compress_size),
100.0 * pak.compress_size / total_compress_size,
_FormatBytes(pak.file_size),
100.0 * pak.file_size / total_file_size)
print
print 'Analyzing pak resources in %s...' % apk_filename
# Calculate aggregate stats about resources across pak files.
resource_count_map = collections.defaultdict(int)
resource_size_map = collections.defaultdict(int)
seen_data_ids = set()
alias_overhead_bytes = 4
resource_overhead_bytes = 6
for pak in paks:
for k, v in pak.resources.iteritems():
resource_count_map[k] += 1
if id(v) not in seen_data_ids:
seen_data_ids.add(id(v))
resource_size_map[k] += resource_overhead_bytes + len(v)
else:
resource_size_map[k] += alias_overhead_bytes
# Output the overall resource summary.
total_resource_size = sum(resource_size_map.values())
total_resource_count = len(resource_count_map)
assert total_resource_size <= total_file_size
print 'Total pak resources: %s' % total_resource_count
print 'Total uncompressed resource size: %s' % _FormatBytes(
total_resource_size)
print
if not out_dir or not os.path.isdir(out_dir):
return
resource_id_name_map, resources_id_header_map = _AnnotatePakResources(out_dir)
# Output the table of details about all resources across pak files.
print
print '%56s %5s %17s' % ('RESOURCE', 'COUNT', 'UNCOMPRESSED SIZE')
for i in sorted(resource_size_map, key=resource_size_map.get,
reverse=True):
if resource_size_map[i] < min_pak_resource_size:
break
print '%56s %5s %9s %6.2f%%' % (
resource_id_name_map.get(i, i),
resource_count_map[i],
_FormatBytes(resource_size_map[i]),
100.0 * resource_size_map[i] / total_resource_size)
# Print breakdown on a per-grd file basis.
size_by_header = collections.defaultdict(int)
for resid, size in resource_size_map.iteritems():
size_by_header[resources_id_header_map.get(resid, 'unknown')] += size
print
print '%80s %17s' % ('HEADER', 'UNCOMPRESSED SIZE')
for header in sorted(size_by_header, key=size_by_header.get, reverse=True):
if size_by_header[header] < min_pak_resource_size:
break
print '%80s %9s %6.2f%%' % (
header,
_FormatBytes(size_by_header[header]),
100.0 * size_by_header[header] / total_resource_size)
def _AnnotatePakResources(out_dir):
"""Returns a pair of maps: id_name_map, id_header_map."""
print 'Looking at resources in: %s' % out_dir
......@@ -721,17 +601,6 @@ def _PrintDumpSIsCount(apk_so_name, unzipped_so, out_dir, tool_prefix):
so_with_symbols_path)
def _FormatBytes(byts):
"""Pretty-print a number of bytes."""
if byts > 2**20.0:
byts /= 2**20.0
return '%.2fm' % byts
if byts > 2**10.0:
byts /= 2**10.0
return '%.2fk' % byts
return str(byts)
def _CalculateCompressedSize(file_path):
CHUNK_SIZE = 256 * 1024
compressor = zlib.compressobj()
......@@ -869,7 +738,6 @@ def main():
if args.estimate_patch_size:
_PrintPatchSizeEstimate(args.apk, args.reference_apk_builder,
args.reference_apk_bucket, chartjson=chartjson)
PrintPakAnalysis(args.apk, args.min_pak_resource_size, out_dir)
if chartjson:
results_path = os.path.join(args.output_dir, 'results-chart.json')
logging.critical('Dumping json to %s', results_path)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment