Commit 6caaa909 authored by Andrew Grieve's avatar Andrew Grieve Committed by Commit Bot

Cronet: Make jar_src.py zero-out timestamps

Zero'ed timestamps means the build output is the same for each run,
which is generally desireable.

I checked that these two files were the same before/after:
 * cronet_api-src.jar
 * cronet_impl_common_java-src.jar

Where "the same" means they no longer contain directory entries
and also no "META-INF/MANIFEST.MF" file.

Bug: 383340
Change-Id: I4f644378dc3c33e429010509202011dd7b7f9f95
Reviewed-on: https://chromium-review.googlesource.com/c/1351425
Commit-Queue: agrieve <agrieve@chromium.org>
Reviewed-by: default avatarPaul Jensen <pauljensen@chromium.org>
Cr-Commit-Position: refs/heads/master@{#612049}
parent 347c3a95
...@@ -354,7 +354,7 @@ def DoZip(inputs, output, base_dir=None, compress_fn=None, ...@@ -354,7 +354,7 @@ def DoZip(inputs, output, base_dir=None, compress_fn=None,
Args: Args:
inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples. inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
output: Destination .zip file. output: Path, fileobj, or ZipFile instance to add files to.
base_dir: Prefix to strip from inputs. base_dir: Prefix to strip from inputs.
compress_fn: Applied to each input to determine whether or not to compress. compress_fn: Applied to each input to determine whether or not to compress.
By default, items will be |zipfile.ZIP_STORED|. By default, items will be |zipfile.ZIP_STORED|.
...@@ -368,12 +368,20 @@ def DoZip(inputs, output, base_dir=None, compress_fn=None, ...@@ -368,12 +368,20 @@ def DoZip(inputs, output, base_dir=None, compress_fn=None,
# Sort by zip path to ensure stable zip ordering. # Sort by zip path to ensure stable zip ordering.
input_tuples.sort(key=lambda tup: tup[0]) input_tuples.sort(key=lambda tup: tup[0])
with zipfile.ZipFile(output, 'w') as outfile:
out_zip = output
if not isinstance(output, zipfile.ZipFile):
out_zip = zipfile.ZipFile(output, 'w')
try:
for zip_path, fs_path in input_tuples: for zip_path, fs_path in input_tuples:
if zip_prefix_path: if zip_prefix_path:
zip_path = os.path.join(zip_prefix_path, zip_path) zip_path = os.path.join(zip_prefix_path, zip_path)
compress = compress_fn(zip_path) if compress_fn else None compress = compress_fn(zip_path) if compress_fn else None
AddToZipHermetic(outfile, zip_path, src_path=fs_path, compress=compress) AddToZipHermetic(out_zip, zip_path, src_path=fs_path, compress=compress)
finally:
if output is not out_zip:
out_zip.close()
def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None): def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
...@@ -393,23 +401,21 @@ def MatchesGlob(path, filters): ...@@ -393,23 +401,21 @@ def MatchesGlob(path, filters):
return filters and any(fnmatch.fnmatch(path, f) for f in filters) return filters and any(fnmatch.fnmatch(path, f) for f in filters)
def MergeZips(output, input_zips, path_transform=None): def MergeZips(output, input_zips, path_transform=None, compress=None):
"""Combines all files from |input_zips| into |output|. """Combines all files from |input_zips| into |output|.
Args: Args:
output: Path or ZipFile instance to add files to. output: Path, fileobj, or ZipFile instance to add files to.
input_zips: Iterable of paths to zip files to merge. input_zips: Iterable of paths to zip files to merge.
path_transform: Called for each entry path. Returns a new path, or None to path_transform: Called for each entry path. Returns a new path, or None to
skip the file. skip the file.
compress: Overrides compression setting from origin zip entries.
""" """
path_transform = path_transform or (lambda p: p) path_transform = path_transform or (lambda p: p)
added_names = set() added_names = set()
output_is_already_open = not isinstance(output, basestring) out_zip = output
if output_is_already_open: if not isinstance(output, zipfile.ZipFile):
assert isinstance(output, zipfile.ZipFile)
out_zip = output
else:
out_zip = zipfile.ZipFile(output, 'w') out_zip = zipfile.ZipFile(output, 'w')
try: try:
...@@ -426,11 +432,18 @@ def MergeZips(output, input_zips, path_transform=None): ...@@ -426,11 +432,18 @@ def MergeZips(output, input_zips, path_transform=None):
continue continue
already_added = dst_name in added_names already_added = dst_name in added_names
if not already_added: if not already_added:
AddToZipHermetic(out_zip, dst_name, data=in_zip.read(info), if compress is not None:
compress=info.compress_type != zipfile.ZIP_STORED) compress_entry = compress
else:
compress_entry = info.compress_type != zipfile.ZIP_STORED
AddToZipHermetic(
out_zip,
dst_name,
data=in_zip.read(info),
compress=compress_entry)
added_names.add(dst_name) added_names.add(dst_name)
finally: finally:
if not output_is_already_open: if output is not out_zip:
out_zip.close() out_zip.close()
...@@ -561,7 +574,7 @@ def ExpandFileArgs(args): ...@@ -561,7 +574,7 @@ def ExpandFileArgs(args):
for k in lookup_path[1:]: for k in lookup_path[1:]:
expansion = expansion[k] expansion = expansion[k]
# This should match ParseGNList. The output is either a GN-formatted list # This should match ParseGnList. The output is either a GN-formatted list
# or a literal (with no quotes). # or a literal (with no quotes).
if isinstance(expansion, list): if isinstance(expansion, list):
new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(expansion) new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(expansion)
......
...@@ -4,10 +4,10 @@ ...@@ -4,10 +4,10 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
import optparse import argparse
import os import os
import sys import sys
import tempfile import zipfile
REPOSITORY_ROOT = os.path.abspath(os.path.join( REPOSITORY_ROOT = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', '..')) os.path.dirname(__file__), '..', '..', '..'))
...@@ -17,59 +17,36 @@ import build_utils ...@@ -17,59 +17,36 @@ import build_utils
JAVA_PACKAGE_PREFIX = 'org/chromium/' JAVA_PACKAGE_PREFIX = 'org/chromium/'
def JarSources(src_dir, src_files, jar_path):
# The paths of the files in the jar will be the same as they are passed in to
# the command. Because of this, the command should be run in
# options.src_dir so the .java file paths in the jar are correct.
jar_cwd = src_dir
jar_path = os.path.abspath(jar_path)
if os.path.exists(jar_path):
jar_cmd = ['jar', 'uf', jar_path]
else:
jar_cmd = ['jar', 'cf', jar_path]
jar_cmd.extend(src_files)
build_utils.CheckOutput(jar_cmd, cwd=jar_cwd)
# Uncompress source jars so that they can be combined with other sources
def UnzipSourceJar(jar, unzipped_jar_path):
if os.path.exists(jar):
jar_cmd = ['jar', 'xf', os.path.abspath(jar)]
build_utils.CheckOutput(jar_cmd, cwd=unzipped_jar_path)
else:
raise Exception('Jar file does not exist %s' % jar)
def main(): def main():
parser = optparse.OptionParser() parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser) build_utils.AddDepfileOption(parser)
parser.add_option('--excluded-classes', parser.add_argument(
'--excluded-classes',
help='A list of .class file patterns to exclude from the jar.') help='A list of .class file patterns to exclude from the jar.')
parser.add_option('--src-search-dirs', action="append", parser.add_argument(
'--src-search-dirs',
action='append',
help='A list of directories that should be searched' help='A list of directories that should be searched'
' for the source files.') ' for the source files.')
parser.add_option('--src-files', action="append", parser.add_argument(
help='A list of source files to jar.') '--src-files', action='append', help='A list of source files to jar.')
parser.add_option('--src-jars', action="append", parser.add_argument(
'--src-jars',
action='append',
help='A list of source jars to include in addition to source files.') help='A list of source jars to include in addition to source files.')
parser.add_option('--src-list-files', action="append", parser.add_argument(
'--src-list-files',
action='append',
help='A list of files that contain a list of sources,' help='A list of files that contain a list of sources,'
' e.g. a list of \'.sources\' files generated by GN.') ' e.g. a list of \'.sources\' files generated by GN.')
parser.add_option('--jar-path', help='Jar output path.') parser.add_argument('--jar-path', help='Jar output path.', required=True)
parser.add_option('--stamp', help='Path to touch on success.')
options, _ = parser.parse_args()
# A temporary directory to put the output of jar files. options = parser.parse_args()
unzipped_jar_path = None
if options.src_jars:
unzipped_jar_path = tempfile.mkdtemp(dir=os.path.dirname(options.jar_path))
jar_list = []
for gn_list in options.src_jars:
jar_list.extend(build_utils.ParseGnList(gn_list))
for jar in jar_list: src_jars = []
UnzipSourceJar(jar, unzipped_jar_path) for gn_list in options.src_jars:
src_jars.extend(build_utils.ParseGnList(gn_list))
src_search_dirs = [] src_search_dirs = []
for gn_src_search_dirs in options.src_search_dirs: for gn_src_search_dirs in options.src_search_dirs:
...@@ -115,10 +92,11 @@ def main(): ...@@ -115,10 +92,11 @@ def main():
for src_file in src_files: for src_file in src_files:
number_of_file_instances = 0 number_of_file_instances = 0
for src_search_dir in src_search_dirs: for src_search_dir in src_search_dirs:
if os.path.isfile(os.path.join(src_search_dir, src_file)): target_path = os.path.join(src_search_dir, src_file)
if os.path.isfile(target_path):
number_of_file_instances += 1 number_of_file_instances += 1
if not predicate or predicate(src_file): if not predicate or predicate(src_file):
dir_to_files_map[src_search_dir].append(src_file) dir_to_files_map[src_search_dir].append(target_path)
if (number_of_file_instances > 1): if (number_of_file_instances > 1):
raise Exception( raise Exception(
'There is more than one instance of file %s in %s' 'There is more than one instance of file %s in %s'
...@@ -127,39 +105,28 @@ def main(): ...@@ -127,39 +105,28 @@ def main():
raise Exception( raise Exception(
'Unable to find file %s in %s' % (src_file, src_search_dirs)) 'Unable to find file %s in %s' % (src_file, src_search_dirs))
# Delete the old output file if any.
if os.path.isfile(options.jar_path):
os.remove(options.jar_path)
# Jar the sources from every source search directory. # Jar the sources from every source search directory.
for src_search_dir in src_search_dirs: with build_utils.AtomicOutput(options.jar_path) as o, \
if len(dir_to_files_map[src_search_dir]) > 0: zipfile.ZipFile(o, 'w', zipfile.ZIP_DEFLATED) as z:
JarSources(src_search_dir, dir_to_files_map[src_search_dir], for src_search_dir in src_search_dirs:
options.jar_path) subpaths = dir_to_files_map[src_search_dir]
else: if subpaths:
raise Exception( build_utils.DoZip(subpaths, z, base_dir=src_search_dir)
'Directory %s does not contain any files and can be' else:
' removed from the list of directories to search' % src_search_dir) raise Exception(
'Directory %s does not contain any files and can be'
# Jar additional src jars ' removed from the list of directories to search' % src_search_dir)
if unzipped_jar_path:
JarSources(unzipped_jar_path, ['.'], options.jar_path) # Jar additional src jars
if src_jars:
build_utils.MergeZips(z, src_jars, compress=True)
if options.depfile: if options.depfile:
deps = [] deps = []
for src_dir in src_search_dirs: for sources in dir_to_files_map.itervalues():
for root, _, filenames in os.walk(src_dir): deps.extend(sources)
deps.extend(os.path.join(root, f) for f in filenames)
# Srcjar deps already captured in GN rules (no need to list them here). # Srcjar deps already captured in GN rules (no need to list them here).
build_utils.WriteDepfile(options.depfile, options.jar_path, deps) build_utils.WriteDepfile(options.depfile, options.jar_path, deps)
# Clean up temporary output directory.
if unzipped_jar_path:
build_utils.DeleteDirectory(unzipped_jar_path)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment