Add generation version number to GCS first-class entries.

Bug:b/328065301
Change-Id: I971c390a8f90e4f9734cd1e3d73ddb5fcf712b6c
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/5413954
Commit-Queue: Joanna Wang <jojwang@chromium.org>
Reviewed-by: Robbie Iannucci <iannucci@chromium.org>
changes/54/5413954/9
Joanna Wang 1 year ago committed by LUCI CQ
parent c810ef4b43
commit f1098b6bfe

@ -137,6 +137,7 @@ _GCLIENT_DEPS_SCHEMA = _NodeDictSchema({
'object_name': str,
'sha256sum': str,
'size_bytes': int,
'generation': int,
schema.Optional('output_file'): str,
schema.Optional('condition'): str,
schema.Optional('dep_type', default='gcs'): str,

@ -897,6 +897,7 @@ deps = {
'dep_type': 'gcs',
'sha256sum': 'abcd123',
'size_bytes': 10000,
'generation': 1542380408102454,
},
'src/another_gcs_dep': {
'bucket': '456bucket',
@ -904,6 +905,7 @@ deps = {
'dep_type': 'gcs',
'sha256sum': 'abcd123',
'size_bytes': 10000,
'generation': 1542380408102455,
},
'src/gcs_dep_with_output_file': {
'bucket': '789bucket',
@ -912,6 +914,7 @@ deps = {
'sha256sum': 'abcd123',
'output_file': 'clang-format-no-extract',
'size_bytes': 10000,
'generation': 1542380408102456,
},
}"""),
'origin':
@ -947,6 +950,7 @@ deps = {
'dep_type': 'gcs',
'sha256sum': 'abcd123',
'size_bytes': 10000,
'generation': 1542380408102454,
},
}
""",

@ -43,10 +43,44 @@ class UploadTests(unittest.TestCase):
shutil.rmtree(self.temp_dir)
sys.stdin = sys.__stdin__
def test_upload_single_file_missing_generation(self):
file = self.lorem_ipsum
object_name = 'gs_object_name/version123'
self.gsutil.add_expected(0, "", "") # ls call
self.gsutil.add_expected(0, "", 'weird output') # cp call
actual = upload_to_google_storage_first_class.upload_to_google_storage(
file=file,
base_url=self.base_url,
object_name=object_name,
gsutil=self.gsutil,
force=True,
gzip=False,
dry_run=False)
self.assertEqual(
self.gsutil.history,
[('check_call', ('ls', '%s/%s' % (self.base_url, object_name))),
('check_call',
('-h', 'Cache-Control:public, max-age=31536000', 'cp', '-v', file,
'%s/%s' % (self.base_url, object_name)))])
self.assertEqual(
actual, upload_to_google_storage_first_class.MISSING_GENERATION_MSG)
def test_upload_single_file(self):
file = self.lorem_ipsum
object_name = 'gs_object_name/version123'
upload_to_google_storage_first_class.upload_to_google_storage(
self.gsutil.add_expected(0, "", "") # ls call
expected_generation = '1712070862651948'
expected_cp_status = (
f'Copying file://{file} [Content-Type=text/plain].\n'
f'Created: {self.base_url}/{object_name}#{expected_generation}\n'
'Operation completed over 1 objects/8.0 B.')
self.gsutil.add_expected(0, "", expected_cp_status) # cp call
actual = upload_to_google_storage_first_class.upload_to_google_storage(
file=file,
base_url=self.base_url,
object_name=object_name,
@ -54,11 +88,14 @@ class UploadTests(unittest.TestCase):
force=True,
gzip=False,
dry_run=False)
self.assertEqual(self.gsutil.history, [
('check_call', ('ls', '%s/%s' % (self.base_url, object_name))),
('check_call', ('-h', 'Cache-Control:public, max-age=31536000',
'cp', file, '%s/%s' % (self.base_url, object_name)))
])
self.assertEqual(
self.gsutil.history,
[('check_call', ('ls', '%s/%s' % (self.base_url, object_name))),
('check_call',
('-h', 'Cache-Control:public, max-age=31536000', 'cp', '-v', file,
'%s/%s' % (self.base_url, object_name)))])
self.assertEqual(actual, expected_generation)
def test_upload_single_file_remote_exists(self):
file = self.lorem_ipsum

@ -17,6 +17,10 @@ import tarfile
from download_from_google_storage import Gsutil
from download_from_google_storage import GSUTIL_DEFAULT_PATH
MISSING_GENERATION_MSG = (
'missing generation number, please retrieve from Cloud Storage'
'before saving to DEPS')
USAGE_STRING = """%prog [options] target [target2 ...].
Target(s) is the files or directies intended to be uploaded to Google Storage.
If a single target is a directory, it will be compressed and uploaded as a
@ -108,7 +112,7 @@ def get_sha256sum(filename: str) -> str:
def upload_to_google_storage(file: str, base_url: str, object_name: str,
gsutil: Gsutil, force: bool, gzip: str,
dry_run: bool):
dry_run: bool) -> str:
"""Upload file to GCS"""
file_url = '%s/%s' % (base_url, object_name)
if gsutil.check_call('ls', file_url)[0] == 0 and not force:
@ -120,7 +124,7 @@ def upload_to_google_storage(file: str, base_url: str, object_name: str,
if dry_run:
return
print("Uploading %s as %s" % (file, file_url))
gsutil_args = ['-h', 'Cache-Control:public, max-age=31536000', 'cp']
gsutil_args = ['-h', 'Cache-Control:public, max-age=31536000', 'cp', '-v']
if gzip:
gsutil_args.extend(['-z', gzip])
gsutil_args.extend([file, file_url])
@ -129,9 +133,21 @@ def upload_to_google_storage(file: str, base_url: str, object_name: str,
raise Exception(
code, 'Encountered error on uploading %s to %s\n%s' %
(file, file_url, err))
def construct_deps_blob(bucket: str, object_name: str, file: str) -> dict:
pattern = re.escape(file_url) + '#(?P<generation>\d+)'
# The geneartion number is printed as part of the progress / status info
# which gsutil outputs to stderr to keep separated from any final output
# data.
for line in err.strip().splitlines():
m = re.search(pattern, line)
if m:
return m.group('generation')
print('Warning: generation number could not be parsed from status'
f'info: {err}')
return MISSING_GENERATION_MSG
def construct_deps_blob(bucket: str, object_name: str, file: str,
generation: str) -> dict:
"""Output a blob hint that would need be added to a DEPS file"""
sha256sum = get_sha256sum(file)
size_bytes = os.path.getsize(file)
@ -142,6 +158,7 @@ def construct_deps_blob(bucket: str, object_name: str, file: str) -> dict:
'object_name': object_name,
'sha256sum': sha256sum,
'size_bytes': size_bytes,
'generation': int(generation),
}
}
@ -233,10 +250,12 @@ def main():
base_url = 'gs://%s' % options.bucket
upload_to_google_storage(file, base_url, object_name, gsutil, options.force,
options.gzip, options.dry_run)
generation = upload_to_google_storage(file, base_url, object_name, gsutil,
options.force, options.gzip,
options.dry_run)
print(
json.dumps(construct_deps_blob(options.bucket, object_name, file),
json.dumps(construct_deps_blob(options.bucket, object_name, file,
generation),
indent=2))

Loading…
Cancel
Save