mirror of
https://chromium.googlesource.com/chromium/tools/depot_tools.git
synced 2026-01-11 18:51:29 +00:00
Add is_first_class_gcs migration file
If is_first_class_gcs file exists with contents '1' - GCS hooks will download content and then overwrite the file with '0' - First class GCS deps will download as necessary by checking the hash file. If there is no is_first_class_gcs file or its content is '0' for a GCS hook: - Hook will run normally and, if content is downloaded, an is_first_class_gcs file will be written with '0' If there is no is_first_class_gcs file or its content is '0' for a first class gcs dep: - Content will be downloaded and is_first_class_gcs will be set to '1' Verified locally with migrating and unmigrating a GCS hook. TODO: update other GCS hook scripts in chromium/src to check migration file. Bug: b/324418194 Change-Id: Ida8541cb70839b86e99115bcabfc38bc87e40136 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/5368250 Reviewed-by: Joanna Wang <jojwang@chromium.org> Commit-Queue: Stephanie Kim <kimstephanie@google.com>
This commit is contained in:
@@ -38,6 +38,9 @@ PLATFORM_MAPPING = {
|
||||
'zos': 'zos',
|
||||
}
|
||||
|
||||
# (b/328065301): Remove when all GCS hooks are migrated to first class deps
|
||||
MIGRATION_TOGGLE_FILE_NAME = 'is_first_class_gcs'
|
||||
|
||||
|
||||
class InvalidFileError(IOError):
|
||||
pass
|
||||
@@ -255,6 +258,12 @@ def _downloader_worker_thread(thread_num,
|
||||
input_sha1_sum, output_filename = q.get()
|
||||
if input_sha1_sum is None:
|
||||
return
|
||||
working_dir = os.path.dirname(output_filename)
|
||||
if not working_dir:
|
||||
raise Exception(
|
||||
'Unable to construct a working_dir from the output_filename.')
|
||||
migration_file_name = os.path.join(working_dir,
|
||||
MIGRATION_TOGGLE_FILE_NAME)
|
||||
extract_dir = None
|
||||
if extract:
|
||||
if not output_filename.endswith('.tar.gz'):
|
||||
@@ -284,6 +293,14 @@ def _downloader_worker_thread(thread_num,
|
||||
're-downloading...' %
|
||||
(thread_num, output_filename))
|
||||
skip = False
|
||||
# (b/328065301): Remove when all GCS hooks are migrated to first
|
||||
# class deps
|
||||
# If the directory was created by a first class GCS
|
||||
# dep, remove the migration file and re-download using the
|
||||
# latest hook.
|
||||
is_first_class_gcs = os.path.exists(migration_file_name)
|
||||
if is_first_class_gcs:
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
@@ -371,6 +388,8 @@ def _downloader_worker_thread(thread_num,
|
||||
with open(extract_dir + '.tmp', 'a'):
|
||||
tar.extractall(path=dirname)
|
||||
os.remove(extract_dir + '.tmp')
|
||||
if os.path.exists(migration_file_name):
|
||||
os.remove(migration_file_name)
|
||||
# Set executable bit.
|
||||
if sys.platform == 'cygwin':
|
||||
# Under cygwin, mark all files as executable. The executable flag in
|
||||
@@ -448,7 +467,23 @@ def download_from_google_storage(input_filename, base_url, gsutil, num_threads,
|
||||
|
||||
# Sequentially check for the most common case and see if we can bail out
|
||||
# early before making any slow calls to gsutil.
|
||||
if not force and all(
|
||||
if directory:
|
||||
working_dir = input_filename
|
||||
elif os.path.dirname(output):
|
||||
working_dir = os.path.dirname(output)
|
||||
|
||||
if not working_dir:
|
||||
raise Exception(
|
||||
'Unable to construct a working_dir from the inputted directory'
|
||||
' or sha1 file name.')
|
||||
|
||||
# (b/328065301): Remove when all GCS hooks are migrated to first class deps
|
||||
# If the directory was created by a first class GCS
|
||||
# dep, remove the migration file and re-download using the
|
||||
# latest hook.
|
||||
migration_file = os.path.join(working_dir, MIGRATION_TOGGLE_FILE_NAME)
|
||||
is_first_class_gcs = os.path.exists(migration_file)
|
||||
if not force and not is_first_class_gcs and all(
|
||||
_data_exists(sha1, path, extract) for sha1, path in input_data):
|
||||
return 0
|
||||
|
||||
|
||||
24
gclient.py
24
gclient.py
@@ -2548,9 +2548,8 @@ class GcsDependency(Dependency):
|
||||
|
||||
def IsDownloadNeeded(self, output_dir, output_file):
|
||||
"""Check if download and extract is needed."""
|
||||
download_needed = False
|
||||
if not os.path.exists(output_file):
|
||||
download_needed = True
|
||||
return True
|
||||
|
||||
hash_file = os.path.join(output_dir, 'hash')
|
||||
existing_hash = None
|
||||
@@ -2559,13 +2558,21 @@ class GcsDependency(Dependency):
|
||||
with open(hash_file, 'r') as f:
|
||||
existing_hash = f.read().rstrip()
|
||||
except IOError:
|
||||
download_needed = True
|
||||
return True
|
||||
else:
|
||||
download_needed = True
|
||||
return True
|
||||
|
||||
# (b/328065301): Remove is_first_class_gcs_file logic when all GCS
|
||||
# hooks are migrated to first class deps
|
||||
is_first_class_gcs_file = os.path.join(
|
||||
output_dir, download_from_google_storage.MIGRATION_TOGGLE_FILE_NAME)
|
||||
is_first_class_gcs = os.path.exists(is_first_class_gcs_file)
|
||||
if not is_first_class_gcs:
|
||||
return True
|
||||
|
||||
if existing_hash != self.sha256sum:
|
||||
download_needed = True
|
||||
return download_needed
|
||||
return True
|
||||
return False
|
||||
|
||||
def GetSha256Sum(self, filename):
|
||||
sha = hashlib.sha256()
|
||||
@@ -2654,6 +2661,11 @@ class GcsDependency(Dependency):
|
||||
with tarfile.open(output_file, 'r:*') as tar:
|
||||
tar.extractall(path=output_dir)
|
||||
self.WriteFilenameHash(calculated_sha256sum, hash_file)
|
||||
migration_toggle_file = os.path.join(
|
||||
output_dir, download_from_google_storage.MIGRATION_TOGGLE_FILE_NAME)
|
||||
with open(migration_toggle_file, 'w') as f:
|
||||
f.write(str(1))
|
||||
f.write('\n')
|
||||
|
||||
#override
|
||||
def GetScmName(self):
|
||||
|
||||
@@ -39,16 +39,22 @@ class GClientSmokeGcs(gclient_smoketest_base.GClientSmokeBase):
|
||||
'abcd123\n',
|
||||
'src/another_gcs_dep/llvmfile.tar.gz':
|
||||
'tarfile',
|
||||
'src/another_gcs_dep/is_first_class_gcs':
|
||||
'1\n',
|
||||
'src/another_gcs_dep/extracted_dir/extracted_file':
|
||||
'extracted text',
|
||||
'src/gcs_dep/deadbeef':
|
||||
'tarfile',
|
||||
'src/gcs_dep/hash':
|
||||
'abcd123\n',
|
||||
'src/gcs_dep/is_first_class_gcs':
|
||||
'1\n',
|
||||
'src/gcs_dep/extracted_dir/extracted_file':
|
||||
'extracted text',
|
||||
'src/gcs_dep_with_output_file/hash':
|
||||
'abcd123\n',
|
||||
'src/gcs_dep_with_output_file/is_first_class_gcs':
|
||||
'1\n',
|
||||
'src/gcs_dep_with_output_file/clang-format-no-extract':
|
||||
'non-extractable file',
|
||||
})
|
||||
@@ -77,6 +83,7 @@ class GClientSmokeGcs(gclient_smoketest_base.GClientSmokeBase):
|
||||
tree.update({
|
||||
'src/repo12/extracted_dir/extracted_file': 'extracted text',
|
||||
'src/repo12/hash': 'abcd123\n',
|
||||
'src/repo12/is_first_class_gcs': '1\n',
|
||||
'src/repo12/path_to_file.tar.gz': 'tarfile',
|
||||
})
|
||||
self.assertTree(tree)
|
||||
@@ -94,6 +101,7 @@ class GClientSmokeGcs(gclient_smoketest_base.GClientSmokeBase):
|
||||
tree.update({
|
||||
'src/repo12/extracted_dir/extracted_file': 'extracted text',
|
||||
'src/repo12/hash': 'abcd123\n',
|
||||
'src/repo12/is_first_class_gcs': '1\n',
|
||||
'src/repo12/path_to_file.tar.gz': 'tarfile',
|
||||
})
|
||||
self.assertTree(tree)
|
||||
@@ -109,6 +117,7 @@ class GClientSmokeGcs(gclient_smoketest_base.GClientSmokeBase):
|
||||
tree.update({
|
||||
'src/repo12/extracted_dir/extracted_file': 'extracted text',
|
||||
'src/repo12/hash': 'abcd123\n',
|
||||
'src/repo12/is_first_class_gcs': '1\n',
|
||||
'src/repo12/path_to_file.tar.gz': 'tarfile',
|
||||
})
|
||||
self.assertTree(tree)
|
||||
|
||||
Reference in New Issue
Block a user