def RunCommand()

in gslib/commands/cp.py [0:0]


  def RunCommand(self):
    copy_helper_opts = self._ParseOpts()

    self.total_bytes_transferred = 0

    dst_url = StorageUrlFromString(self.args[-1])
    if dst_url.IsFileUrl() and (dst_url.object_name == '-' or dst_url.IsFifo()):
      if self.preserve_posix_attrs:
        raise CommandException('Cannot preserve POSIX attributes with a '
                               'stream or a named pipe.')
      cat_out_fd = (GetStreamFromFileUrl(dst_url, mode='wb')
                    if dst_url.IsFifo() else None)
      return cat_helper.CatHelper(self).CatUrlStrings(self.args[:-1],
                                                      cat_out_fd=cat_out_fd)

    if copy_helper_opts.read_args_from_stdin:
      if len(self.args) != 1:
        raise CommandException('Source URLs cannot be specified with -I option')
      # Use StdinIteratorCls instead of StdinIterator here to avoid Python 3
      # generator pickling errors when multiprocessing a command.
      src_url_strs = [StdinIteratorCls()]
    else:
      if len(self.args) < 2:
        raise CommandException('Wrong number of arguments for "cp" command.')
      src_url_strs = [self.args[:-1]]

    dst_url_strs = [self.args[-1]]

    self.combined_src_urls = []
    self.has_file_dst = False
    self.has_cloud_dst = False
    self.provider_types = set()
    # Because cp may have multiple source URLs and multiple destinations, we
    # wrap the name expansion iterator in order to collect analytics.
    name_expansion_iterator = CopyObjectsIterator(
        self._ConstructNameExpansionIteratorDstTupleIterator(
            src_url_strs, dst_url_strs),
        copy_helper_opts.daisy_chain,
    )

    process_count, thread_count = self._GetProcessAndThreadCount(
        process_count=None,
        thread_count=None,
        parallel_operations_override=None,
        print_macos_warning=False)
    copy_helper.TriggerReauthForDestinationProviderIfNecessary(
        dst_url, self.gsutil_api, process_count * thread_count)

    seek_ahead_iterator = None
    # Cannot seek ahead with stdin args, since we can only iterate them
    # once without buffering in memory.
    if not copy_helper_opts.read_args_from_stdin:
      seek_ahead_iterator = SeekAheadNameExpansionIterator(
          self.command_name,
          self.debug,
          self.GetSeekAheadGsutilApi(),
          self.combined_src_urls,
          self.recursion_requested or copy_helper_opts.perform_mv,
          all_versions=self.all_versions,
          project_id=self.project_id,
          ignore_symlinks=self.exclude_symlinks,
          file_size_will_change=self.use_stet)

    # Use a lock to ensure accurate statistics in the face of
    # multi-threading/multi-processing.
    self.stats_lock = parallelism_framework_util.CreateLock()

    # Tracks if any copies failed.
    self.op_failure_count = 0

    # Start the clock.
    start_time = time.time()

    # Tuple of attributes to share/manage across multiple processes in
    # parallel (-m) mode.
    shared_attrs = ('op_failure_count', 'total_bytes_transferred')

    # Perform copy requests in parallel (-m) mode, if requested, using
    # configured number of parallel processes and threads. Otherwise,
    # perform requests with sequential function calls in current process.
    self.Apply(_CopyFuncWrapper,
               name_expansion_iterator,
               _CopyExceptionHandler,
               shared_attrs,
               fail_on_error=(not self.continue_on_error),
               seek_ahead_iterator=seek_ahead_iterator)
    self.logger.debug('total_bytes_transferred: %d',
                      self.total_bytes_transferred)

    end_time = time.time()
    self.total_elapsed_time = end_time - start_time
    self.total_bytes_per_second = CalculateThroughput(
        self.total_bytes_transferred, self.total_elapsed_time)
    LogPerformanceSummaryParams(
        has_file_dst=self.has_file_dst,
        has_cloud_dst=self.has_cloud_dst,
        avg_throughput=self.total_bytes_per_second,
        total_bytes_transferred=self.total_bytes_transferred,
        total_elapsed_time=self.total_elapsed_time,
        uses_fan=self.parallel_operations,
        is_daisy_chain=copy_helper_opts.daisy_chain,
        provider_types=list(self.provider_types))

    if self.debug >= DEBUGLEVEL_DUMP_REQUESTS:
      # Note that this only counts the actual GET and PUT bytes for the copy
      # - not any transfers for doing wildcard expansion, the initial
      # HEAD/GET request performed to get the object metadata, etc.
      if self.total_bytes_transferred != 0:
        self.logger.info(
            'Total bytes copied=%d, total elapsed time=%5.3f secs (%sps)',
            self.total_bytes_transferred, self.total_elapsed_time,
            MakeHumanReadable(self.total_bytes_per_second))
    if self.op_failure_count:
      plural_str = 's' if self.op_failure_count > 1 else ''
      raise CommandException('{count} file{pl}/object{pl} could '
                             'not be transferred.'.format(
                                 count=self.op_failure_count, pl=plural_str))

    return 0