Commit b256caff authored by dtu@chromium.org's avatar dtu@chromium.org

[telemetry] Minor Cloud Storage script fixes.

- Fix error where you couldn't list buckets if you didn't have access to all of them.
- Fix bug where listing with shell completion (page_sets/*) would only give the first file.
- Skip moving files if the destination bucket is the same as the source bucket.
- Remove checks that all files exist. If you use shell completion, you will include files that don't have hash files, just ignore them.


BUG=None.
TEST=None.

Review URL: https://codereview.chromium.org/215423004

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@260435 0039d316-1c4b-4281-b951-d872f2087c98
parent 7f49ae01
...@@ -57,7 +57,7 @@ def _FindFilesInCloudStorage(files): ...@@ -57,7 +57,7 @@ def _FindFilesInCloudStorage(files):
file_hash = cloud_storage.ReadHash(hash_path) file_hash = cloud_storage.ReadHash(hash_path)
file_buckets[file_path] = [] file_buckets[file_path] = []
for bucket in BUCKETS: for bucket in BUCKETS:
if file_hash in bucket_contents[bucket]: if bucket in bucket_contents and file_hash in bucket_contents[bucket]:
file_buckets[file_path].append(bucket) file_buckets[file_path].append(bucket)
return file_buckets return file_buckets
...@@ -85,7 +85,7 @@ class Ls(command_line.Command): ...@@ -85,7 +85,7 @@ class Ls(command_line.Command):
for path in paths: for path in paths:
if not os.path.isdir(path): if not os.path.isdir(path):
yield path yield path
return continue
if recursive: if recursive:
for root, _, filenames in os.walk(path): for root, _, filenames in os.walk(path):
...@@ -121,11 +121,6 @@ class Mv(command_line.Command): ...@@ -121,11 +121,6 @@ class Mv(command_line.Command):
def ProcessCommandLineArgs(cls, parser, args): def ProcessCommandLineArgs(cls, parser, args):
args.bucket = BUCKET_ALIASES[args.bucket] args.bucket = BUCKET_ALIASES[args.bucket]
for path in args.files:
_, hash_path = _GetPaths(path)
if not os.path.exists(hash_path):
parser.error('File not found: %s' % hash_path)
def Run(self, args): def Run(self, args):
files = _FindFilesInCloudStorage(args.files) files = _FindFilesInCloudStorage(args.files)
...@@ -134,14 +129,18 @@ class Mv(command_line.Command): ...@@ -134,14 +129,18 @@ class Mv(command_line.Command):
raise IOError('%s not found in Cloud Storage.' % file_path) raise IOError('%s not found in Cloud Storage.' % file_path)
for file_path, buckets in sorted(files.iteritems()): for file_path, buckets in sorted(files.iteritems()):
if args.bucket in buckets:
buckets.remove(args.bucket)
if not buckets:
logging.info('Skipping %s, no action needed.' % file_path)
continue
# Move to the target bucket. # Move to the target bucket.
file_hash = cloud_storage.ReadHash(file_path + '.sha1') file_hash = cloud_storage.ReadHash(file_path + '.sha1')
cloud_storage.Move(buckets.pop(), args.bucket, file_hash) cloud_storage.Move(buckets.pop(), args.bucket, file_hash)
# Delete all additional copies. # Delete all additional copies.
for bucket in buckets: for bucket in buckets:
if bucket == args.bucket:
continue
cloud_storage.Delete(bucket, file_hash) cloud_storage.Delete(bucket, file_hash)
...@@ -152,13 +151,6 @@ class Rm(command_line.Command): ...@@ -152,13 +151,6 @@ class Rm(command_line.Command):
def AddCommandLineArgs(cls, parser): def AddCommandLineArgs(cls, parser):
parser.add_argument('files', nargs='+') parser.add_argument('files', nargs='+')
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
for path in args.files:
_, hash_path = _GetPaths(path)
if not os.path.exists(hash_path):
parser.error('File not found: %s' % hash_path)
def Run(self, args): def Run(self, args):
files = _FindFilesInCloudStorage(args.files) files = _FindFilesInCloudStorage(args.files)
for file_path, buckets in sorted(files.iteritems()): for file_path, buckets in sorted(files.iteritems()):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment