in gslib/utils/copy_helper.py [0:0]
def _CopyObjToObjDaisyChainMode(src_url,
src_obj_metadata,
dst_url,
dst_obj_metadata,
preconditions,
gsutil_api,
logger,
decryption_key=None):
"""Copies from src_url to dst_url in "daisy chain" mode.
See -D OPTION documentation about what daisy chain mode is.
Args:
src_url: Source CloudUrl
src_obj_metadata: Metadata from source object
dst_url: Destination CloudUrl
dst_obj_metadata: Object-specific metadata that should be overidden during
the copy.
preconditions: Preconditions to use for the copy.
gsutil_api: gsutil Cloud API to use for the copy.
logger: For outputting log messages.
decryption_key: Base64-encoded decryption key for the source object, if any.
Returns:
(elapsed_time, bytes_transferred, dst_url with generation,
md5 hash of destination) excluding overhead like initial GET.
Raises:
CommandException: if errors encountered.
"""
# We don't attempt to preserve ACLs across providers because
# GCS and S3 support different ACLs and disjoint principals.
if (global_copy_helper_opts.preserve_acl and
src_url.scheme != dst_url.scheme):
raise NotImplementedError('Cross-provider cp -p not supported')
if not global_copy_helper_opts.preserve_acl:
dst_obj_metadata.acl = []
# Don't use callbacks for downloads on the daisy chain wrapper because
# upload callbacks will output progress, but respect test hooks if present.
progress_callback = None
if global_copy_helper_opts.test_callback_file:
with open(global_copy_helper_opts.test_callback_file, 'rb') as test_fp:
progress_callback = pickle.loads(test_fp.read()).call
compressed_encoding = ObjectIsGzipEncoded(src_obj_metadata)
encryption_keywrapper = GetEncryptionKeyWrapper(config)
start_time = time.time()
upload_fp = DaisyChainWrapper(src_url,
src_obj_metadata.size,
gsutil_api,
compressed_encoding=compressed_encoding,
progress_callback=progress_callback,
decryption_key=decryption_key)
uploaded_object = None
if src_obj_metadata.size == 0:
# Resumable uploads of size 0 are not supported.
uploaded_object = gsutil_api.UploadObject(
upload_fp,
object_metadata=dst_obj_metadata,
canned_acl=global_copy_helper_opts.canned_acl,
preconditions=preconditions,
provider=dst_url.scheme,
fields=UPLOAD_RETURN_FIELDS,
size=src_obj_metadata.size,
encryption_tuple=encryption_keywrapper)
else:
# TODO: Support process-break resumes. This will resume across connection
# breaks and server errors, but the tracker callback is a no-op so this
# won't resume across gsutil runs.
# TODO: Test retries via test_callback_file.
uploaded_object = gsutil_api.UploadObjectResumable(
upload_fp,
object_metadata=dst_obj_metadata,
canned_acl=global_copy_helper_opts.canned_acl,
preconditions=preconditions,
provider=dst_url.scheme,
fields=UPLOAD_RETURN_FIELDS,
size=src_obj_metadata.size,
progress_callback=FileProgressCallbackHandler(
gsutil_api.status_queue,
src_url=src_url,
dst_url=dst_url,
operation_name='Uploading').call,
tracker_callback=_DummyTrackerCallback,
encryption_tuple=encryption_keywrapper)
end_time = time.time()
try:
_CheckCloudHashes(logger, src_url, dst_url, src_obj_metadata,
uploaded_object)
except HashMismatchException:
if _RENAME_ON_HASH_MISMATCH:
corrupted_obj_metadata = apitools_messages.Object(
name=dst_obj_metadata.name,
bucket=dst_obj_metadata.bucket,
etag=uploaded_object.etag)
dst_obj_metadata.name = (dst_url.object_name +
_RENAME_ON_HASH_MISMATCH_SUFFIX)
decryption_keywrapper = CryptoKeyWrapperFromKey(decryption_key)
gsutil_api.CopyObject(corrupted_obj_metadata,
dst_obj_metadata,
provider=dst_url.scheme,
decryption_tuple=decryption_keywrapper,
encryption_tuple=encryption_keywrapper)
# If the digest doesn't match, delete the object.
gsutil_api.DeleteObject(dst_url.bucket_name,
dst_url.object_name,
generation=uploaded_object.generation,
provider=dst_url.scheme)
raise
result_url = dst_url.Clone()
result_url.generation = GenerationFromUrlAndString(result_url,
uploaded_object.generation)
PutToQueueWithTimeout(
gsutil_api.status_queue,
FileMessage(src_url,
dst_url,
end_time,
message_type=FileMessage.FILE_DAISY_COPY,
size=src_obj_metadata.size,
finished=True))
return (end_time - start_time, src_obj_metadata.size, result_url,
uploaded_object.md5Hash)