in gslib/gcs_json_api.py [0:0]
def _UploadObject(self,
upload_stream,
object_metadata,
canned_acl=None,
size=None,
preconditions=None,
encryption_tuple=None,
provider=None,
fields=None,
serialization_data=None,
tracker_callback=None,
progress_callback=None,
apitools_strategy=apitools_transfer.SIMPLE_UPLOAD,
total_size=0,
gzip_encoded=False):
# pylint: disable=g-doc-args
"""Upload implementation. Cloud API arguments, plus two more.
Additional args:
apitools_strategy: SIMPLE_UPLOAD or RESUMABLE_UPLOAD.
total_size: Total size of the upload; None if it is unknown (streaming).
Returns:
Uploaded object metadata.
"""
# pylint: enable=g-doc-args
ValidateDstObjectMetadata(object_metadata)
predefined_acl = None
if canned_acl:
predefined_acl = (apitools_messages.StorageObjectsInsertRequest.
PredefinedAclValueValuesEnum(
self._ObjectCannedAclToPredefinedAcl(canned_acl)))
bytes_uploaded_container = BytesTransferredContainer()
if progress_callback and size:
total_size = size
progress_callback(0, size)
callback_class_factory = UploadCallbackConnectionClassFactory(
bytes_uploaded_container,
total_size=total_size,
progress_callback=progress_callback,
logger=self.logger,
debug=self.debug)
upload_http_class = callback_class_factory.GetConnectionClass()
self.upload_http.connections = {
'http': upload_http_class,
'https': upload_http_class,
}
# Since bytes_http is created in this function, we don't get the
# user-agent header from api_client's http automatically.
additional_headers = {
'user-agent': self.api_client.user_agent,
}
self._UpdateHeaders(additional_headers)
try:
content_type = None
apitools_request = None
global_params = None
if not serialization_data:
# This is a new upload, set up initial upload state.
content_type = object_metadata.contentType
if not content_type:
content_type = DEFAULT_CONTENT_TYPE
if not preconditions:
preconditions = Preconditions()
apitools_request = apitools_messages.StorageObjectsInsertRequest(
bucket=object_metadata.bucket,
object=object_metadata,
ifGenerationMatch=preconditions.gen_match,
ifMetagenerationMatch=preconditions.meta_gen_match,
predefinedAcl=predefined_acl,
userProject=self.user_project)
global_params = apitools_messages.StandardQueryParameters()
if fields:
global_params.fields = ','.join(set(fields))
encryption_headers = self._EncryptionHeadersFromTuple(
crypto_tuple=encryption_tuple)
if apitools_strategy == apitools_transfer.SIMPLE_UPLOAD:
# One-shot upload.
apitools_upload = apitools_transfer.Upload(upload_stream,
content_type,
total_size=size,
auto_transfer=True,
num_retries=self.num_retries,
gzip_encoded=gzip_encoded)
apitools_upload.strategy = apitools_strategy
apitools_upload.bytes_http = self.authorized_upload_http
with self._ApitoolsRequestHeaders(encryption_headers):
return self.api_client.objects.Insert(apitools_request,
upload=apitools_upload,
global_params=global_params)
else: # Resumable upload.
# Since bytes_http is created in this function, we don't get the
# user-agent header from api_client's http automatically.
additional_headers = {
'user-agent': self.api_client.user_agent,
}
additional_headers.update(encryption_headers)
self._UpdateHeaders(additional_headers)
return self._PerformResumableUpload(
upload_stream, self.authorized_upload_http, content_type, size,
serialization_data, apitools_strategy, apitools_request,
global_params, bytes_uploaded_container, tracker_callback,
additional_headers, progress_callback, gzip_encoded)
except TRANSLATABLE_APITOOLS_EXCEPTIONS as e:
not_found_exception = CreateNotFoundExceptionForObjectWrite(
self.provider, object_metadata.bucket)
self._TranslateExceptionAndRaise(e,
bucket_name=object_metadata.bucket,
object_name=object_metadata.name,
not_found_exception=not_found_exception)