Commit 6476de71 authored by Mikhail Khokhlov's avatar Mikhail Khokhlov Committed by Commit Bot

[tools/perf] Store power profiles in binary_deps_manager

Adds support for internal-only files in binary_deps_manager. Adds a
script to convert device power profiles to SQL script and upload it
to the internal cloud bucket.

Bug: b/158459921
Change-Id: I9c3fd954bae350497cdf366ce856794b78835b0f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2235617Reviewed-by: default avatarEric Seckler <eseckler@chromium.org>
Commit-Queue: Mikhail Khokhlov <khokhlov@google.com>
Cr-Commit-Position: refs/heads/master@{#776620}
parent 2729de03
...@@ -12,5 +12,9 @@ ...@@ -12,5 +12,9 @@
"hash": "5c546185954f415277a0510a552b950b0ac82749", "hash": "5c546185954f415277a0510a552b950b0ac82749",
"remote_path": "perfetto_binaries/trace_processor_shell/linux/f19675c75eab7f5f0ec5bd048fd6842896f146d0/trace_processor_shell" "remote_path": "perfetto_binaries/trace_processor_shell/linux/f19675c75eab7f5f0ec5bd048fd6842896f146d0/trace_processor_shell"
} }
},
"power_profile.sql": {
"hash": "33698fec6ab9e58c656d385e5daee52974ae01c2",
"remote_path": "perfetto_data/power_profile.sql/20200608T143524/power_profile.sql"
} }
} }
\ No newline at end of file
...@@ -11,8 +11,11 @@ import py_utils ...@@ -11,8 +11,11 @@ import py_utils
from py_utils import cloud_storage from py_utils import cloud_storage
from py_utils import tempfile_ext from py_utils import tempfile_ext
CS_BUCKET = cloud_storage.PUBLIC_BUCKET # Binaries are publicly readable, data files are for internal use only.
CS_FOLDER = 'perfetto_binaries' BINARY_BUCKET = cloud_storage.PUBLIC_BUCKET
BINARY_CS_FOLDER = 'perfetto_binaries'
DATA_BUCKET = cloud_storage.INTERNAL_BUCKET
DATA_CS_FOLDER = 'perfetto_data'
LATEST_FILENAME = 'latest' LATEST_FILENAME = 'latest'
LOCAL_STORAGE_FOLDER = os.path.join(os.path.dirname(__file__), 'bin') LOCAL_STORAGE_FOLDER = os.path.join(os.path.dirname(__file__), 'bin')
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'binary_deps.json') CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'binary_deps.json')
...@@ -30,7 +33,7 @@ def _GetHostPlatform(): ...@@ -30,7 +33,7 @@ def _GetHostPlatform():
def _CalculateHash(remote_path): def _CalculateHash(remote_path):
with tempfile_ext.NamedTemporaryFile() as f: with tempfile_ext.NamedTemporaryFile() as f:
f.close() f.close()
cloud_storage.Get(CS_BUCKET, remote_path, f.name) cloud_storage.Get(BINARY_BUCKET, remote_path, f.name)
return cloud_storage.CalculateHash(f.name) return cloud_storage.CalculateHash(f.name)
...@@ -38,10 +41,12 @@ def _SetLatestPathForBinary(binary_name, platform, latest_path): ...@@ -38,10 +41,12 @@ def _SetLatestPathForBinary(binary_name, platform, latest_path):
with tempfile_ext.NamedTemporaryFile() as latest_file: with tempfile_ext.NamedTemporaryFile() as latest_file:
latest_file.write(latest_path) latest_file.write(latest_path)
latest_file.close() latest_file.close()
remote_latest_file = posixpath.join(CS_FOLDER, binary_name, platform, remote_latest_file = posixpath.join(BINARY_CS_FOLDER, binary_name, platform,
LATEST_FILENAME) LATEST_FILENAME)
cloud_storage.Insert( cloud_storage.Insert(BINARY_BUCKET,
CS_BUCKET, remote_latest_file, latest_file.name, publicly_readable=True) remote_latest_file,
latest_file.name,
publicly_readable=True)
def UploadHostBinary(binary_name, binary_path, version): def UploadHostBinary(binary_name, binary_path, version):
...@@ -54,20 +59,22 @@ def UploadHostBinary(binary_name, binary_path, version): ...@@ -54,20 +59,22 @@ def UploadHostBinary(binary_name, binary_path, version):
""" """
filename = os.path.basename(binary_path) filename = os.path.basename(binary_path)
platform = _GetHostPlatform() platform = _GetHostPlatform()
remote_path = posixpath.join(CS_FOLDER, binary_name, platform, version, remote_path = posixpath.join(BINARY_CS_FOLDER, binary_name, platform, version,
filename) filename)
if not cloud_storage.Exists(CS_BUCKET, remote_path): if not cloud_storage.Exists(BINARY_BUCKET, remote_path):
cloud_storage.Insert( cloud_storage.Insert(BINARY_BUCKET,
CS_BUCKET, remote_path, binary_path, publicly_readable=True) remote_path,
binary_path,
publicly_readable=True)
_SetLatestPathForBinary(binary_name, platform, remote_path) _SetLatestPathForBinary(binary_name, platform, remote_path)
def GetLatestPath(binary_name, platform): def GetLatestPath(binary_name, platform):
with tempfile_ext.NamedTemporaryFile() as latest_file: with tempfile_ext.NamedTemporaryFile() as latest_file:
latest_file.close() latest_file.close()
remote_path = posixpath.join(CS_FOLDER, binary_name, platform, remote_path = posixpath.join(BINARY_CS_FOLDER, binary_name, platform,
LATEST_FILENAME) LATEST_FILENAME)
cloud_storage.Get(CS_BUCKET, remote_path, latest_file.name) cloud_storage.Get(BINARY_BUCKET, remote_path, latest_file.name)
with open(latest_file.name) as latest: with open(latest_file.name) as latest:
return latest.read() return latest.read()
...@@ -106,9 +113,42 @@ def FetchHostBinary(binary_name): ...@@ -106,9 +113,42 @@ def FetchHostBinary(binary_name):
expected_hash = config[binary_name][platform]['hash'] expected_hash = config[binary_name][platform]['hash']
filename = posixpath.basename(remote_path) filename = posixpath.basename(remote_path)
local_path = os.path.join(LOCAL_STORAGE_FOLDER, filename) local_path = os.path.join(LOCAL_STORAGE_FOLDER, filename)
cloud_storage.Get(CS_BUCKET, remote_path, local_path) cloud_storage.Get(BINARY_BUCKET, remote_path, local_path)
if cloud_storage.CalculateHash(local_path) != expected_hash: if cloud_storage.CalculateHash(local_path) != expected_hash:
raise RuntimeError('The downloaded binary has wrong hash.') raise RuntimeError('The downloaded binary has wrong hash.')
mode = os.stat(local_path).st_mode mode = os.stat(local_path).st_mode
os.chmod(local_path, mode | stat.S_IXUSR) os.chmod(local_path, mode | stat.S_IXUSR)
return local_path return local_path
def FetchDataFile(data_file_name):
"""Download the file from the cloud."""
with open(CONFIG_PATH) as f:
config = json.load(f)
remote_path = config[data_file_name]['remote_path']
expected_hash = config[data_file_name]['hash']
filename = posixpath.basename(remote_path)
local_path = os.path.join(LOCAL_STORAGE_FOLDER, filename)
cloud_storage.Get(DATA_BUCKET, remote_path, local_path)
if cloud_storage.CalculateHash(local_path) != expected_hash:
raise RuntimeError('The downloaded data file has wrong hash.')
return local_path
def UploadAndSwitchDataFile(data_file_name, data_file_path, version):
"""Upload the script to the cloud and update config to use the new version."""
filename = os.path.basename(data_file_path)
remote_path = posixpath.join(DATA_CS_FOLDER, data_file_name, version,
filename)
if not cloud_storage.Exists(DATA_BUCKET, remote_path):
cloud_storage.Insert(DATA_BUCKET,
remote_path,
data_file_path,
publicly_readable=False)
with open(CONFIG_PATH) as f:
config = json.load(f)
config[data_file_name]['remote_path'] = remote_path
config[data_file_name]['hash'] = cloud_storage.CalculateHash(data_file_path)
with open(CONFIG_PATH, 'w') as f:
json.dump(config, f, indent=4, separators=(',', ': '))
...@@ -132,3 +132,46 @@ class BinaryDepsManagerTests(unittest.TestCase): ...@@ -132,3 +132,46 @@ class BinaryDepsManagerTests(unittest.TestCase):
get_os_patch.return_value = 'testos' get_os_patch.return_value = 'testos'
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
binary_deps_manager.FetchHostBinary('dep') binary_deps_manager.FetchHostBinary('dep')
def testUploadAndSwitchDataFile(self):
self.writeConfig({'data_dep': {'remote_path': 'old/path/to/data'}})
new_path = 'new/path/to/data'
with mock.patch('py_utils.cloud_storage.Exists') as exists_patch:
with mock.patch('py_utils.cloud_storage.Insert') as insert_patch:
with mock.patch('py_utils.cloud_storage.CalculateHash') as hash_patch:
exists_patch.return_value = False
hash_patch.return_value = '123'
binary_deps_manager.UploadAndSwitchDataFile('data_dep', new_path,
'abc123')
insert_patch.assert_called_once_with(
'chrome-telemetry',
'perfetto_data/data_dep/abc123/data',
'new/path/to/data',
publicly_readable=False,
)
self.assertEqual(
self.readConfig(), {
'data_dep': {
'remote_path': 'perfetto_data/data_dep/abc123/data',
'hash': '123',
}
})
def testFetchDataFile(self):
remote_path = 'remote/path/to/data'
self.writeConfig(
{'data_dep': {
'remote_path': remote_path,
'hash': '123',
}})
with mock.patch('py_utils.cloud_storage.Get') as get_patch:
with mock.patch('py_utils.cloud_storage.CalculateHash') as hash_patch:
hash_patch.return_value = '123'
local_path = binary_deps_manager.FetchDataFile('data_dep')
self.assertEqual(os.path.basename(local_path), 'data')
get_patch.assert_called_once_with('chrome-telemetry', remote_path,
local_path)
#!/usr/bin/env vpython
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import datetime
import os
import sys
import xml.etree.ElementTree as ET
# Add tools/perf to sys.path.
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
from core import path_util
path_util.AddPyUtilsToPath()
path_util.AddTracingToPath()
from core.perfetto_binary_roller import binary_deps_manager
from core.tbmv3 import trace_processor
from py_utils import tempfile_ext
def ExtractValues(xml_path):
root = ET.parse(xml_path).getroot()
speeds = []
power = []
clusters = []
for array in root.iter('array'):
if array.get('name') == 'cpu.clusters.cores':
clusters = [int(value.text) for value in array.iter('value')]
if array.get('name').startswith('cpu.core_speeds.'):
speeds.append([int(value.text) for value in array.iter('value')])
if array.get('name').startswith('cpu.core_power.'):
power.append([float(value.text) for value in array.iter('value')])
values = []
cpu = 0
for cluster, n_cpus in enumerate(clusters):
for _ in range(n_cpus):
for freq, drain in zip(speeds[cluster], power[cluster]):
# TODO(khokhlov): Remove the correction when power profiles are updated.
corrected_drain = drain / n_cpus
values.append((cpu, cluster, freq, corrected_drain))
cpu += 1
return values
def ExportProfiles(xml_path, sql_path):
sql_values = ['(%s, %s, %s, %s)' % v for v in ExtractValues(xml_path)]
with open(sql_path, 'w') as sql_file:
sql_file.write('DROP TABLE IF EXISTS power_profile;\n\n')
sql_file.write('CREATE TABLE power_profile '
'(cpu INT, cluster INT, freq INT, power DOUBLE);\n\n')
sql_file.write('INSERT INTO power_profile VALUES\n')
sql_file.write(',\n'.join(sql_values))
sql_file.write(';\n')
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument(
'--xml', help='Path to the XML file with the device power profile.')
args = parser.parse_args(args)
with tempfile_ext.NamedTemporaryDirectory() as tempdir:
sql_path = os.path.join(tempdir, trace_processor.POWER_PROFILE_SQL)
ExportProfiles(args.xml, sql_path)
version = datetime.datetime.now().strftime('%Y%m%dT%H%M%S')
binary_deps_manager.UploadAndSwitchDataFile(
trace_processor.POWER_PROFILE_SQL, sql_path, version)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
...@@ -19,7 +19,7 @@ TP_BINARY_NAME = 'trace_processor_shell' ...@@ -19,7 +19,7 @@ TP_BINARY_NAME = 'trace_processor_shell'
EXPORT_JSON_QUERY_TEMPLATE = 'select export_json(%s)\n' EXPORT_JSON_QUERY_TEMPLATE = 'select export_json(%s)\n'
METRICS_PATH = os.path.realpath(os.path.join(os.path.dirname(__file__), METRICS_PATH = os.path.realpath(os.path.join(os.path.dirname(__file__),
'metrics')) 'metrics'))
POWER_PROFILE_SQL = 'power_profile.sql'
MetricFiles = namedtuple('MetricFiles', ('sql', 'proto')) MetricFiles = namedtuple('MetricFiles', ('sql', 'proto'))
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment