in azure/datalake/store/multithread.py [0:0]
def __init__(self, adlfs, rpath, lpath, nthreads=None, chunksize=2**28,
buffersize=2**22, blocksize=2**22, client=None, run=True,
overwrite=False, verbose=False, progress_callback=None, timeout=0):
if client:
self.client = client
else:
self.client = ADLTransferClient(
adlfs,
transfer=put_chunk,
merge=merge_chunks,
nthreads=nthreads,
chunksize=chunksize,
buffersize=buffersize,
blocksize=blocksize,
delimiter=None, # TODO: see utils.cs for what is required to support delimiters.
parent=self,
verbose=verbose,
unique_temporary=True,
progress_callback=progress_callback,
timeout=timeout)
self._name = tokenize(adlfs, rpath, lpath, chunksize, blocksize)
self.rpath = AzureDLPath(rpath)
self.lpath = lpath
self._overwrite = overwrite
existing_files = self._setup()
if existing_files:
raise FileExistsError('Overwrite was not specified and the following files exist, blocking the transfer operation. Please specify overwrite to overwrite these files during transfer: {}'.format(','.join(existing_files)))
if run:
self.run()