Commit c8bd2273 authored by Takuto Ikuta's avatar Takuto Ikuta Committed by Commit Bot

Revert "chrome_repack_locales: compress .pak files on Chrome OS"

This reverts commit d21a8cdd.

Reason for revert: skeptical revert for
https://ci.chromium.org/p/chromium/builders/ci/Deterministic%20Linux/25684

Original change's description:
> chrome_repack_locales: compress .pak files on Chrome OS
> 
> This CL adds support for compressing .pak files, and adds compression
> for locales/*.pak on chromeos.
> 
> Note: We keep the extension for compressed .pak files to avoid
> dependency complexity in tests.
> 
> Bug: 1017864
> Change-Id: Ifee472b7e15673fcb835ae75aca768265592460b
> Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1912621
> Commit-Queue: Steven Bennetts <stevenjb@chromium.org>
> Reviewed-by: Andrew Grieve <agrieve@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#715173}

TBR=stevenjb@chromium.org,sky@chromium.org,agrieve@chromium.org

Change-Id: I701b27c18bd5db83d078e413c9a14915a83c9329
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: 1017864
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1915653Reviewed-by: default avatarTakuto Ikuta <tikuta@chromium.org>
Commit-Queue: Takuto Ikuta <tikuta@chromium.org>
Cr-Commit-Position: refs/heads/master@{#715203}
parent e0b114ef
...@@ -78,7 +78,6 @@ template("chrome_repack_locales") { ...@@ -78,7 +78,6 @@ template("chrome_repack_locales") {
"//remoting/resources", "//remoting/resources",
"//ui/chromeos/strings", "//ui/chromeos/strings",
] ]
compress = true
} }
if (enable_extensions) { if (enable_extensions) {
source_patterns += source_patterns +=
......
...@@ -215,8 +215,7 @@ def ReadGrdInfo(grd_file): ...@@ -215,8 +215,7 @@ def ReadGrdInfo(grd_file):
def RePack(output_file, input_files, whitelist_file=None, def RePack(output_file, input_files, whitelist_file=None,
suppress_removed_key_output=False, suppress_removed_key_output=False):
output_info_filepath=None):
"""Write a new data pack file by combining input pack files. """Write a new data pack file by combining input pack files.
Args: Args:
...@@ -227,7 +226,6 @@ def RePack(output_file, input_files, whitelist_file=None, ...@@ -227,7 +226,6 @@ def RePack(output_file, input_files, whitelist_file=None,
all resources. all resources.
suppress_removed_key_output: allows the caller to suppress the output from suppress_removed_key_output: allows the caller to suppress the output from
RePackFromDataPackStrings. RePackFromDataPackStrings.
output_info_file: If not None, specify the output .info filepath.
Raises: Raises:
KeyError: if there are duplicate keys or resource encoding is KeyError: if there are duplicate keys or resource encoding is
...@@ -245,9 +243,7 @@ def RePack(output_file, input_files, whitelist_file=None, ...@@ -245,9 +243,7 @@ def RePack(output_file, input_files, whitelist_file=None,
resources, encoding = RePackFromDataPackStrings( resources, encoding = RePackFromDataPackStrings(
inputs, whitelist, suppress_removed_key_output) inputs, whitelist, suppress_removed_key_output)
WriteDataPack(resources, output_file, encoding) WriteDataPack(resources, output_file, encoding)
if output_info_filepath is None: with open(output_file + '.info', 'w') as output_info_file:
output_info_filepath = output_file + '.info'
with open(output_info_filepath, 'w') as output_info_file:
for filename in input_info_files: for filename in input_info_files:
with open(filename, 'r') as info_file: with open(filename, 'r') as info_file:
output_info_file.writelines(info_file.readlines()) output_info_file.writelines(info_file.readlines())
......
...@@ -12,12 +12,9 @@ https://dev.chromium.org/developers/design-documents/linuxresourcesandlocalizeds ...@@ -12,12 +12,9 @@ https://dev.chromium.org/developers/design-documents/linuxresourcesandlocalizeds
from __future__ import print_function from __future__ import print_function
import argparse import argparse
import gzip
import hashlib import hashlib
import os import os
import shutil
import sys import sys
import tempfile
# Import grit first to get local third_party modules. # Import grit first to get local third_party modules.
import grit # pylint: disable=ungrouped-imports,unused-import import grit # pylint: disable=ungrouped-imports,unused-import
...@@ -28,24 +25,8 @@ from grit.format import data_pack ...@@ -28,24 +25,8 @@ from grit.format import data_pack
def _RepackMain(args): def _RepackMain(args):
output_info_filepath = args.output_pak_file + '.info' data_pack.RePack(args.output_pak_file, args.input_pak_files, args.whitelist,
if args.compress: args.suppress_removed_key_output)
# If the file needs to be compressed, call RePack with a tempfile path,
# then compress the tempfile to args.output_pak_file.
temp_outfile = tempfile.NamedTemporaryFile()
out_path = temp_outfile.name
# Strip any non .pak extension from the .info output file path.
splitext = os.path.splitext(args.output_pak_file)
if splitext[1] != '.pak':
output_info_filepath = splitext[0] + '.info'
else:
out_path = args.output_pak_file
data_pack.RePack(out_path, args.input_pak_files, args.whitelist,
args.suppress_removed_key_output,
output_info_filepath=output_info_filepath)
if args.compress:
with gzip.open(args.output_pak_file, 'wb') as output_pak_file:
shutil.copyfileobj(temp_outfile, output_pak_file)
def _ExtractMain(args): def _ExtractMain(args):
...@@ -159,8 +140,6 @@ def main(): ...@@ -159,8 +140,6 @@ def main():
help='Path to a whitelist used to filter output pak file resource IDs.') help='Path to a whitelist used to filter output pak file resource IDs.')
sub_parser.add_argument('--suppress-removed-key-output', action='store_true', sub_parser.add_argument('--suppress-removed-key-output', action='store_true',
help='Do not log which keys were removed by the whitelist.') help='Do not log which keys were removed by the whitelist.')
sub_parser.add_argument('--compress', dest='compress', action='store_true',
default=False, help='Compress output_pak_file using gzip.')
sub_parser.set_defaults(func=_RepackMain) sub_parser.set_defaults(func=_RepackMain)
sub_parser = sub_parsers.add_parser('extract', help='Extracts pak file') sub_parser = sub_parsers.add_parser('extract', help='Extracts pak file')
......
...@@ -20,9 +20,6 @@ import("//tools/grit/grit_rule.gni") ...@@ -20,9 +20,6 @@ import("//tools/grit/grit_rule.gni")
# Path of the file in the application bundle, defaults to # Path of the file in the application bundle, defaults to
# {{bundle_resources_dir}}/{{source_file_part}}. # {{bundle_resources_dir}}/{{source_file_part}}.
# #
# compress [optional]
# Gzip the resulting bundle (and append .gz to the output name).
#
# deps [optional] # deps [optional]
# public_deps [optional] # public_deps [optional]
# visibility [optional] # visibility [optional]
...@@ -35,8 +32,6 @@ template("repack") { ...@@ -35,8 +32,6 @@ template("repack") {
_repack_target_name = "${target_name}__repack" _repack_target_name = "${target_name}__repack"
} }
_compress = defined(invoker.compress) && invoker.compress
action(_repack_target_name) { action(_repack_target_name) {
forward_variables_from(invoker, forward_variables_from(invoker,
[ [
...@@ -71,9 +66,6 @@ template("repack") { ...@@ -71,9 +66,6 @@ template("repack") {
} }
args += [ rebase_path(invoker.output, root_build_dir) ] args += [ rebase_path(invoker.output, root_build_dir) ]
args += rebase_path(invoker.sources, root_build_dir) args += rebase_path(invoker.sources, root_build_dir)
if (_compress) {
args += [ "--compress" ]
}
} }
if (_copy_data_to_bundle) { if (_copy_data_to_bundle) {
...@@ -161,7 +153,6 @@ template("repack_locales") { ...@@ -161,7 +153,6 @@ template("repack_locales") {
[ [
"copy_data_to_bundle", "copy_data_to_bundle",
"bundle_output", "bundle_output",
"compress",
"deps", "deps",
"repack_whitelist", "repack_whitelist",
"testonly", "testonly",
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment