def upload_to_native_imagestore()

in src/sfctl/custom_app.py [0:0]


def upload_to_native_imagestore(sesh, endpoint, abspath, basename, #pylint: disable=too-many-locals,too-many-arguments
                                show_progress, timeout):
    """
    Upload the application package to cluster

    :param sesh: A requests (module) session object.
    :param endpoint: Connection url endpoint for upload requests.
    :param abspath: Application source path.
    :param basename: Image store destination path.
    :param show_progress: boolean to determine whether to log upload progress.
    :param timeout: Total upload timeout in seconds.
    """

    try:
        from urllib.parse import urlparse, urlencode, urlunparse
    except ImportError:
        from urllib import urlencode
        from urlparse import urlparse, urlunparse  # pylint: disable=import-error
    total_files_count = 0
    current_files_count = 0
    for root, _, files in os.walk(abspath):
        # Number of uploads is number of files plus number of directories
        total_files_count += (len(files) + 1)

    target_timeout = int(time()) + timeout
    jobcount = get_job_count()

    # Note: while we are raising some exceptions regarding upload timeout, we are leaving the
    # timeouts raised by the requests library as is since it contains enough information
    for root, _, files in os.walk(abspath):
        rel_path = os.path.normpath(os.path.relpath(root, abspath))
        filecount = len(files)

        if show_progress:
            progressdescription = 'Uploading path: {}'.format(rel_path)
            with tqdm_joblib(tqdm(desc=progressdescription, total=filecount)):
                Parallel(n_jobs=jobcount)(
                    delayed(upload_single_file_native_imagestore)(
                        sesh, endpoint, basename, rel_path, single_file, root, target_timeout)
                        for single_file in files)
        else:
            Parallel(n_jobs=jobcount)(
                delayed(upload_single_file_native_imagestore)(
                    sesh, endpoint, basename, rel_path, single_file, root, target_timeout)
                    for single_file in files)

        current_time_left = get_timeout_left(target_timeout)

        if current_time_left == 0:
            raise SFCTLInternalException('Upload has timed out. Consider passing a longer '
                                         'timeout duration.')

        url_path = (
            os.path.normpath(os.path.join('ImageStore', basename,
                                          rel_path, '_.dir'))
        ).replace('\\', '/')
        url_parsed = list(urlparse(endpoint))
        url_parsed[2] = url_path
        url_parsed[4] = urlencode({'api-version': '6.1',
                                   'timeout': current_time_left})
        url = urlunparse(url_parsed)

        res = sesh.put(url,
                       timeout=(get_lesser(60, current_time_left), current_time_left))
        res.raise_for_status()
        current_files_count += filecount + 1
        print_progress(current_files_count, total_files_count,
                       os.path.normpath(os.path.join(rel_path, '_.dir')),
                       show_progress, get_timeout_left(target_timeout))
    if show_progress:
        print('Complete', file=sys.stderr)