def put_chunk()

in azure/datalake/store/multithread.py [0:0]


def put_chunk(adlfs, src, dst, offset, size, buffersize, blocksize, delimiter=None,
              shutdown_event=None):
    """ Upload a piece of a local file

    Internal function used by `upload`.
    """
    nbytes = 0
    try:
        with adlfs.open(dst, 'wb', blocksize=buffersize, delimiter=delimiter) as fout:
            end = offset + size
            miniblock = min(size, blocksize)
            # For empty files there is no need to take the IO hit.
            if size != 0:
                with open(src, 'rb') as fin:
                    for o in range(offset, end, miniblock):
                        if shutdown_event and shutdown_event.is_set():
                            return nbytes, None
                        data = read_block(fin, o, miniblock, delimiter)
                        nbytes += fout.write(data)
                
    except Exception as e:
        exception = repr(e)
        logger.error('Upload failed %s; %s', src, exception)
        return nbytes, exception
    logger.debug('Uploaded from %s, byte offset %s', src, offset)
    return nbytes, None