@ -563,6 +563,8 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
package = self . GetExpandedPackageName ( )
package = self . GetExpandedPackageName ( )
url = ' %s /p/ %s /+/ %s ' % ( scm . GetActualRemoteURL ( None ) , package ,
url = ' %s /p/ %s /+/ %s ' % ( scm . GetActualRemoteURL ( None ) , package ,
revision )
revision )
if scm . name == ' gcs ' :
url = self . url
if os . path . isdir ( scm . checkout_path ) :
if os . path . isdir ( scm . checkout_path ) :
revision = scm . revinfo ( None , None , None )
revision = scm . revinfo ( None , None , None )
@ -755,18 +757,27 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
relative = use_relative_paths ,
relative = use_relative_paths ,
condition = condition ) )
condition = condition ) )
elif dep_type == ' gcs ' :
elif dep_type == ' gcs ' :
deps_to_add . append (
# Validate that all objects are unique
GcsDependency ( parent = self ,
object_name_set = {
name = name ,
o [ ' object_name ' ]
bucket = dep_value [ ' bucket ' ] ,
for o in dep_value [ ' objects ' ]
object_name = dep_value [ ' object_name ' ] ,
}
sha256sum = dep_value [ ' sha256sum ' ] ,
if len ( object_name_set ) != len ( dep_value [ ' objects ' ] ) :
output_file = dep_value . get ( ' output_file ' ) ,
raise Exception ( ' Duplicate object names detected in {} GCS '
size_bytes = dep_value [ ' size_bytes ' ] ,
' dependency. ' . format ( name ) )
custom_vars = self . custom_vars ,
for obj in dep_value [ ' objects ' ] :
should_process = should_process ,
deps_to_add . append (
relative = use_relative_paths ,
GcsDependency ( parent = self ,
condition = condition ) )
name = name ,
bucket = dep_value [ ' bucket ' ] ,
object_name = obj [ ' object_name ' ] ,
sha256sum = obj [ ' sha256sum ' ] ,
output_file = obj . get ( ' output_file ' ) ,
size_bytes = obj [ ' size_bytes ' ] ,
custom_vars = self . custom_vars ,
should_process = should_process ,
relative = use_relative_paths ,
condition = condition ) )
else :
else :
url = dep_value . get ( ' url ' )
url = dep_value . get ( ' url ' )
deps_to_add . append (
deps_to_add . append (
@ -2513,12 +2524,9 @@ class GcsDependency(Dependency):
self . sha256sum = sha256sum
self . sha256sum = sha256sum
self . output_file = output_file
self . output_file = output_file
self . size_bytes = size_bytes
self . size_bytes = size_bytes
url = ' gs:// {bucket} / {object_name} ' . format (
url = f ' gs:// { self . bucket } / { self . object_name } '
bucket = self . bucket ,
object_name = self . object_name ,
)
super ( GcsDependency , self ) . __init__ ( parent = parent ,
super ( GcsDependency , self ) . __init__ ( parent = parent ,
name = name ,
name = f ' { name } : { object_name } ' ,
url = url ,
url = url ,
managed = None ,
managed = None ,
custom_deps = None ,
custom_deps = None ,
@ -2530,6 +2538,12 @@ class GcsDependency(Dependency):
relative = relative ,
relative = relative ,
condition = condition )
condition = condition )
#override
def verify_validity ( self ) :
""" GCS dependencies allow duplicate name for objects in same directory. """
logging . info ( ' Dependency( %s ).verify_validity() ' % self . name )
return True
#override
#override
def run ( self , revision_overrides , command , args , work_queue , options ,
def run ( self , revision_overrides , command , args , work_queue , options ,
patch_refs , target_branches , skip_sync_revisions ) :
patch_refs , target_branches , skip_sync_revisions ) :
@ -2547,12 +2561,12 @@ class GcsDependency(Dependency):
f . write ( sha1 )
f . write ( sha1 )
f . write ( ' \n ' )
f . write ( ' \n ' )
def IsDownloadNeeded ( self , output_dir , output_file ) :
def IsDownloadNeeded ( self , output_dir , output_file , hash_file ,
migration_toggle_file ) :
""" Check if download and extract is needed. """
""" Check if download and extract is needed. """
if not os . path . exists ( output_file ) :
if not os . path . exists ( output_file ) :
return True
return True
hash_file = os . path . join ( output_dir , ' hash ' )
existing_hash = None
existing_hash = None
if os . path . exists ( hash_file ) :
if os . path . exists ( hash_file ) :
try :
try :
@ -2565,9 +2579,7 @@ class GcsDependency(Dependency):
# (b/328065301): Remove is_first_class_gcs_file logic when all GCS
# (b/328065301): Remove is_first_class_gcs_file logic when all GCS
# hooks are migrated to first class deps
# hooks are migrated to first class deps
is_first_class_gcs_file = os . path . join (
is_first_class_gcs = os . path . exists ( migration_toggle_file )
output_dir , download_from_google_storage . MIGRATION_TOGGLE_FILE_NAME )
is_first_class_gcs = os . path . exists ( is_first_class_gcs_file )
if not is_first_class_gcs :
if not is_first_class_gcs :
return True
return True
@ -2603,15 +2615,22 @@ class GcsDependency(Dependency):
root_dir = self . root . root_dir
root_dir = self . root . root_dir
# Directory of the extracted tarfile contents
# Directory of the extracted tarfile contents
output_dir = os . path . join ( root_dir , self . name )
output_dir = os . path . join ( root_dir , self . name . split ( ' : ' ) [ 0 ] )
output_file = os . path . join ( output_dir , self . output_file
output_file = os . path . join ( output_dir , self . output_file
or gcs_file_name )
or gcs_file_name )
if not self . IsDownloadNeeded ( output_dir , output_file ) :
# Remove any forward slashes and drop any extensions
hash_name = self . object_name . replace ( ' / ' , ' _ ' ) . split ( ' . ' ) [ 0 ]
hash_file = os . path . join ( output_dir , hash_name + ' _hash ' )
migration_toggle_file = os . path . join (
output_dir ,
download_from_google_storage . construct_migration_file_name (
self . object_name ) )
if not self . IsDownloadNeeded ( output_dir , output_file , hash_file ,
migration_toggle_file ) :
return
return
# Remove hashfile
# Remove hashfile
hash_file = os . path . join ( output_dir , ' hash ' )
if os . path . exists ( hash_file ) :
if os . path . exists ( hash_file ) :
os . remove ( hash_file )
os . remove ( hash_file )
@ -2619,10 +2638,10 @@ class GcsDependency(Dependency):
if os . path . exists ( output_file ) :
if os . path . exists ( output_file ) :
os . remove ( output_file )
os . remove ( output_file )
# Remove extracted contents
# Another GCS dep could be using the same output_dir, so don't remove
if os . path . exists ( output_dir ) :
# it
shutil . rmtree ( output_dir )
if not os . path . exists ( output_dir ) :
os . makedirs ( output_dir )
os . makedirs ( output_dir )
if os . getenv ( ' GCLIENT_TEST ' ) == ' 1 ' :
if os . getenv ( ' GCLIENT_TEST ' ) == ' 1 ' :
if ' no-extract ' in output_file :
if ' no-extract ' in output_file :
@ -2631,7 +2650,7 @@ class GcsDependency(Dependency):
else :
else :
# Create fake tar file and extracted tar contents
# Create fake tar file and extracted tar contents
tmpdir = tempfile . mkdtemp ( )
tmpdir = tempfile . mkdtemp ( )
copy_dir = os . path . join ( tmpdir , self . name, ' extracted_dir ' )
copy_dir = os . path . join ( tmpdir , gcs_file_ name, ' extracted_dir ' )
if os . path . exists ( copy_dir ) :
if os . path . exists ( copy_dir ) :
shutil . rmtree ( copy_dir )
shutil . rmtree ( copy_dir )
os . makedirs ( copy_dir )
os . makedirs ( copy_dir )
@ -2640,10 +2659,9 @@ class GcsDependency(Dependency):
with tarfile . open ( output_file , " w:gz " ) as tar :
with tarfile . open ( output_file , " w:gz " ) as tar :
tar . add ( copy_dir , arcname = os . path . basename ( copy_dir ) )
tar . add ( copy_dir , arcname = os . path . basename ( copy_dir ) )
else :
else :
gcs_url = ' gs:// %s / %s ' % ( self . bucket , self . object_name )
gsutil = download_from_google_storage . Gsutil (
gsutil = download_from_google_storage . Gsutil (
download_from_google_storage . GSUTIL_DEFAULT_PATH )
download_from_google_storage . GSUTIL_DEFAULT_PATH )
gsutil . check_call ( ' cp ' , gcs_ url, output_file )
gsutil . check_call ( ' cp ' , self . url, output_file )
calculated_sha256sum = ' '
calculated_sha256sum = ' '
calculated_size_bytes = None
calculated_size_bytes = None
@ -2680,8 +2698,6 @@ class GcsDependency(Dependency):
raise Exception ( ' tarfile contains invalid entries ' )
raise Exception ( ' tarfile contains invalid entries ' )
tar . extractall ( path = output_dir )
tar . extractall ( path = output_dir )
self . WriteFilenameHash ( calculated_sha256sum , hash_file )
self . WriteFilenameHash ( calculated_sha256sum , hash_file )
migration_toggle_file = os . path . join (
output_dir , download_from_google_storage . MIGRATION_TOGGLE_FILE_NAME )
with open ( migration_toggle_file , ' w ' ) as f :
with open ( migration_toggle_file , ' w ' ) as f :
f . write ( str ( 1 ) )
f . write ( str ( 1 ) )
f . write ( ' \n ' )
f . write ( ' \n ' )