in azure/multiapi/storagev2/blob/v2019_07_07/_upload_helpers.py [0:0]
def upload_block_blob( # pylint: disable=too-many-locals
client=None,
data=None,
stream=None,
length=None,
overwrite=None,
headers=None,
validate_content=None,
max_concurrency=None,
blob_settings=None,
encryption_options=None,
**kwargs):
try:
if not overwrite and not _any_conditions(**kwargs):
kwargs['modified_access_conditions'].if_none_match = '*'
adjusted_count = length
if (encryption_options.get('key') is not None) and (adjusted_count is not None):
adjusted_count += (16 - (length % 16))
blob_headers = kwargs.pop('blob_headers', None)
tier = kwargs.pop('standard_blob_tier', None)
# Do single put if the size is smaller than config.max_single_put_size
if adjusted_count is not None and (adjusted_count < blob_settings.max_single_put_size):
try:
data = data.read(length)
if not isinstance(data, six.binary_type):
raise TypeError('Blob data should be of type bytes.')
except AttributeError:
pass
if encryption_options.get('key'):
encryption_data, data = encrypt_blob(data, encryption_options['key'])
headers['x-ms-meta-encryptiondata'] = encryption_data
return client.upload(
data,
content_length=adjusted_count,
blob_http_headers=blob_headers,
headers=headers,
cls=return_response_headers,
validate_content=validate_content,
data_stream_total=adjusted_count,
upload_stream_current=0,
tier=tier.value if tier else None,
**kwargs)
use_original_upload_path = blob_settings.use_byte_buffer or \
validate_content or encryption_options.get('required') or \
blob_settings.max_block_size < blob_settings.min_large_block_upload_threshold or \
hasattr(stream, 'seekable') and not stream.seekable() or \
not hasattr(stream, 'seek') or not hasattr(stream, 'tell')
if use_original_upload_path:
if encryption_options.get('key'):
cek, iv, encryption_data = generate_blob_encryption_data(encryption_options['key'])
headers['x-ms-meta-encryptiondata'] = encryption_data
encryption_options['cek'] = cek
encryption_options['vector'] = iv
block_ids = upload_data_chunks(
service=client,
uploader_class=BlockBlobChunkUploader,
total_size=length,
chunk_size=blob_settings.max_block_size,
max_concurrency=max_concurrency,
stream=stream,
validate_content=validate_content,
encryption_options=encryption_options,
**kwargs
)
else:
block_ids = upload_substream_blocks(
service=client,
uploader_class=BlockBlobChunkUploader,
total_size=length,
chunk_size=blob_settings.max_block_size,
max_concurrency=max_concurrency,
stream=stream,
validate_content=validate_content,
**kwargs
)
block_lookup = BlockLookupList(committed=[], uncommitted=[], latest=[])
block_lookup.latest = block_ids
return client.commit_block_list(
block_lookup,
blob_http_headers=blob_headers,
cls=return_response_headers,
validate_content=validate_content,
headers=headers,
tier=tier.value if tier else None,
**kwargs)
except StorageErrorException as error:
try:
process_storage_error(error)
except ResourceModifiedError as mod_error:
if not overwrite:
_convert_mod_error(mod_error)
raise