def upload_files()

in src/s3_util.py [0:0]


    def upload_files(self, local_dir, remote_path, quite_mode=True, num_workers=os.cpu_count() - 1):
        """
Uploads the files in local directory to s3
        :param quite_mode: If False, prints the status of each file downloaded
        :param num_workers: The number of multi-processes to use
        :param local_dir: The local directory
        :param remote_path: The remote s3 prefix
        """
        rp = lambda f, r, l: "{}/{}".format(r.rstrip("/"), "/".join(
            os.path.expandvars(f).lstrip(os.path.expandvars(l)).lstrip(os.path.sep).split(os.path.sep)))

        input_tuples = [(f, rp(f, remote_path, local_dir), quite_mode) for f in
                        glob.glob("{}/**".format(local_dir), recursive=True) if os.path.isfile(f)]

        partition_size = int(math.ceil(len(input_tuples) / num_workers))
        partitioned_input_tuples = [input_tuples[i:i + partition_size] for i in
                                    range(0, len(input_tuples), partition_size)]

        with Pool(max(1, num_workers)) as processpool:
            processpool.map(self._uploadfiles_multi_thread, partitioned_input_tuples)