in OSPatching/azure/storage/blobservice.py [0:0]
def put_block_blob_from_file(self, container_name, blob_name, stream,
count=None, content_encoding=None,
content_language=None, content_md5=None,
cache_control=None,
x_ms_blob_content_type=None,
x_ms_blob_content_encoding=None,
x_ms_blob_content_language=None,
x_ms_blob_content_md5=None,
x_ms_blob_cache_control=None,
x_ms_meta_name_values=None,
x_ms_lease_id=None, progress_callback=None):
'''
Creates a new block blob from a file/stream, or updates the content of
an existing block blob, with automatic chunking and progress
notifications.
container_name: Name of existing container.
blob_name: Name of blob to create or update.
stream: Opened file/stream to upload as the blob content.
count:
Number of bytes to read from the stream. This is optional, but
should be supplied for optimal performance.
content_encoding:
Optional. Specifies which content encodings have been applied to
the blob. This value is returned to the client when the Get Blob
(REST API) operation is performed on the blob resource. The client
can use this value when returned to decode the blob content.
content_language:
Optional. Specifies the natural languages used by this resource.
content_md5:
Optional. An MD5 hash of the blob content. This hash is used to
verify the integrity of the blob during transport. When this header
is specified, the storage service checks the hash that has arrived
with the one that was sent. If the two hashes do not match, the
operation will fail with error code 400 (Bad Request).
cache_control:
Optional. The Blob service stores this value but does not use or
modify it.
x_ms_blob_content_type: Optional. Set the blob's content type.
x_ms_blob_content_encoding: Optional. Set the blob's content encoding.
x_ms_blob_content_language: Optional. Set the blob's content language.
x_ms_blob_content_md5: Optional. Set the blob's MD5 hash.
x_ms_blob_cache_control: Optional. Sets the blob's cache control.
x_ms_meta_name_values: A dict containing name, value for metadata.
x_ms_lease_id: Required if the blob has an active lease.
progress_callback:
Callback for progress with signature function(current, total) where
current is the number of bytes transfered so far, and total is the
size of the blob, or None if the total size is unknown.
'''
_validate_not_none('container_name', container_name)
_validate_not_none('blob_name', blob_name)
_validate_not_none('stream', stream)
if count and count < self._BLOB_MAX_DATA_SIZE:
if progress_callback:
progress_callback(0, count)
data = stream.read(count)
self.put_blob(container_name,
blob_name,
data,
'BlockBlob',
content_encoding,
content_language,
content_md5,
cache_control,
x_ms_blob_content_type,
x_ms_blob_content_encoding,
x_ms_blob_content_language,
x_ms_blob_content_md5,
x_ms_blob_cache_control,
x_ms_meta_name_values,
x_ms_lease_id)
if progress_callback:
progress_callback(count, count)
else:
if progress_callback:
progress_callback(0, count)
self.put_blob(container_name,
blob_name,
None,
'BlockBlob',
content_encoding,
content_language,
content_md5,
cache_control,
x_ms_blob_content_type,
x_ms_blob_content_encoding,
x_ms_blob_content_language,
x_ms_blob_content_md5,
x_ms_blob_cache_control,
x_ms_meta_name_values,
x_ms_lease_id)
remain_bytes = count
block_ids = []
block_index = 0
index = 0
while True:
request_count = self._BLOB_MAX_CHUNK_DATA_SIZE\
if remain_bytes is None else min(
remain_bytes,
self._BLOB_MAX_CHUNK_DATA_SIZE)
data = stream.read(request_count)
if data:
length = len(data)
index += length
remain_bytes = remain_bytes - \
length if remain_bytes else None
block_id = '{0:08d}'.format(block_index)
self.put_block(container_name, blob_name,
data, block_id, x_ms_lease_id=x_ms_lease_id)
block_ids.append(block_id)
block_index += 1
if progress_callback:
progress_callback(index, count)
else:
break
self.put_block_list(container_name, blob_name, block_ids,
content_md5, x_ms_blob_cache_control,
x_ms_blob_content_type,
x_ms_blob_content_encoding,
x_ms_blob_content_language,
x_ms_blob_content_md5,
x_ms_meta_name_values,
x_ms_lease_id)