in s3transfer/upload.py [0:0]
def yield_upload_part_bodies(self, transfer_future, chunksize):
full_file_size = transfer_future.meta.size
num_parts = self._get_num_parts(transfer_future, chunksize)
for part_number in range(1, num_parts + 1):
callbacks = self._get_progress_callbacks(transfer_future)
close_callbacks = self._get_close_callbacks(callbacks)
start_byte = chunksize * (part_number - 1)
# Get a file-like object for that part and the size of the full
# file size for the associated file-like object for that part.
fileobj, full_size = self._get_upload_part_fileobj_with_full_size(
transfer_future.meta.call_args.fileobj,
start_byte=start_byte,
part_size=chunksize,
full_file_size=full_file_size,
)
# Wrap fileobj with interrupt reader that will quickly cancel
# uploads if needed instead of having to wait for the socket
# to completely read all of the data.
fileobj = self._wrap_fileobj(fileobj)
# Wrap the file-like object into a ReadFileChunk to get progress.
read_file_chunk = self._osutil.open_file_chunk_reader_from_fileobj(
fileobj=fileobj,
chunk_size=chunksize,
full_file_size=full_size,
callbacks=callbacks,
close_callbacks=close_callbacks,
)
yield part_number, read_file_chunk