in azure/datalake/store/multithread.py [0:0]
def merge_chunks(adlfs, outfile, files, shutdown_event=None, overwrite=False):
try:
# note that it is assumed that only temp files from this run are in the segment folder created.
# so this call is optimized to instantly delete the temp folder on concat.
# if somehow the target file was created between the beginning of upload
# and concat, we will remove it if the user specified overwrite.
# here we must get the most up to date information from the service,
# instead of relying on the local cache to ensure that we know if
# the merge target already exists.
if adlfs.exists(outfile, invalidate_cache=True):
if overwrite:
adlfs.remove(outfile, True)
else:
raise FileExistsError(outfile)
adlfs.concat(outfile, files, delete_source=True)
except Exception as e:
exception = repr(e)
logger.error('Merged failed %s; %s', outfile, exception)
return exception
logger.debug('Merged %s', outfile)
adlfs.invalidate_cache(outfile)
return None