sandbox/snowball_uploader_26-inputFile.py [78:150]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
min_part_size = 5 * 1024 ** 2 # 5MiB, Don't change it, it is limit of snowball
max_part_count = int(math.ceil(max_tarfile_size / max_part_size))
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
parts = []
delimiter = ', '
## if python2, exclude encoding parameter 
if sys.version_info.major > 2:
    do_open = lambda filename, flag: open(filename, flag, encoding='utf-8')
else:
    do_open = lambda filename, flag: open(filename, flag)
## Caution: you have to modify rename_file function to fit your own naming rule
def rename_file(org_file):
    #return org_file + "_new_name"
    return org_file

def write_to_file(fl_name, subfl_list):
    with do_open(fl_name, 'w') as f:
        for line in subfl_list:
            f.write("%s\n" %line)
        #writer = csv.writer(fl_content)
        #writer.writerows(subfl_list)
    return 0

def gen_filelist():
    sum_size = 0
    fl_prefix = 'fl_'
    fl_index = 1
    subfl_list = []
    shutil.rmtree(filelist_dir,ignore_errors=True)
    try:
        os.makedirs(filelist_dir)
    except: pass
    print('generating file list by size %s bytes' % max_tarfile_size)
    for r,d,f in os.walk(target_path):
        for file in f:
            fl_name = filelist_dir + '/' + fl_prefix + str(fl_index) + ".txt"
            file_name = os.path.join(r,file)
            f_meta = os.stat(file_name)
            f_inode = f_meta.st_ino
            f_size = f_meta.st_size
            #f_dict[f_inode] = {"fname":file_name, "fsize":f_size}
            sum_size = sum_size + f_size
            target_file_name = rename_file(file_name)
            #f_info = [file_name , target_file_name , f_inode, f_size]
            f_info = [file_name , target_file_name]
            f_info_str = delimiter.join(f_info)
            subfl_list.append(f_info_str)
            #print("f_info: ", subfl_list)
            #with do_open(fl_name, 'a') as fl_content:
            #    target_file_name = rename_file(file_name)
            #    fl_content.write(file_name + delimiter + target_file_name + '\n')                
            #    print('%s, %s' % (file_name, target_file_name))
            if max_tarfile_size < sum_size:
                write_to_file(fl_name, subfl_list)
                fl_index = fl_index + 1
                print('%s is generated' % fl_name)
                sum_size = 0
                subfl_list=[]
    ## generate file list for remaings
    write_to_file(fl_name, subfl_list)
    ## archive file list files with tar.gz
    fl_arc_file = "filelist-" + current_time +".gz" 
    with tarfile.open(fl_arc_file, "w:gz") as tar:
        tar.add(filelist_dir, arcname=os.path.basename(filelist_dir))
    print('file lists are generated!!')
    print('check %s' % filelist_dir)
    #return os.listdir(filelist_dir)
    return 0

def get_org_files_list(source_file):
    filelist = []
    with do_open(source_file, 'r') as fn:
        for line in fn.readlines():
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



snowball_uploader_26-success.py [80:146]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
min_part_size = 5 * 1024 ** 2 # 5MiB, Don't change it, it is limit of snowball
max_part_count = int(math.ceil(max_tarfile_size / max_part_size))
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
parts = []
delimiter = ', '
## if python2, exclude encoding parameter 
if sys.version_info.major > 2:
    do_open = lambda filename, flag: open(filename, flag, encoding='utf-8')
else:
    do_open = lambda filename, flag: open(filename, flag)
## Caution: you have to modify rename_file function to fit your own naming rule
def rename_file(org_file):
    #return org_file + "_new_name"
    return org_file

def write_to_file(fl_name, subfl_list):
    with do_open(fl_name, 'w') as f:
        for line in subfl_list:
            f.write("%s\n" %line)
        #writer = csv.writer(fl_content)
        #writer.writerows(subfl_list)
    return 0

def gen_filelist():
    sum_size = 0
    fl_prefix = 'fl_'
    fl_index = 1
    subfl_list = []
    shutil.rmtree(filelist_dir,ignore_errors=True)
    try:
        os.makedirs(filelist_dir)
    except: pass
    print('generating file list by size %s bytes' % max_tarfile_size)
    for r,d,f in os.walk(target_path):
        for file in f:
            fl_name = filelist_dir + '/' + fl_prefix + str(fl_index) + ".txt"
            file_name = os.path.join(r,file)
            f_meta = os.stat(file_name)
            f_inode = f_meta.st_ino
            f_size = f_meta.st_size
            #f_dict[f_inode] = {"fname":file_name, "fsize":f_size}
            sum_size = sum_size + f_size
            target_file_name = rename_file(file_name)
            #f_info = [file_name , target_file_name , f_inode, f_size]
            f_info = [file_name , target_file_name]
            f_info_str = delimiter.join(f_info)
            subfl_list.append(f_info_str)
            if max_tarfile_size < sum_size:
                write_to_file(fl_name, subfl_list)
                fl_index = fl_index + 1
                print('%s is generated' % fl_name)
                sum_size = 0
                subfl_list=[]
    ## generate file list for remaings
    write_to_file(fl_name, subfl_list)
    ## archive file list files with tar.gz
    fl_arc_file = "filelist-" + current_time +".gz" 
    with tarfile.open(fl_arc_file, "w:gz") as tar:
        tar.add(filelist_dir, arcname=os.path.basename(filelist_dir))
    print('file lists are generated!!')
    print('check %s' % filelist_dir)
    return 0

def get_org_files_list(source_file):
    filelist = []
    with do_open(source_file, 'r') as fn:
        for line in fn.readlines():
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



