def _Run()

in perfkitbenchmarker/linux_benchmarks/speccpu2017_benchmark.py [0:0]


def _Run(vm):
  """See base method.

  Args:
    vm: The vm to run the benchmark on.

  Returns:
    A list of sample.Sample objects.
  """

  # Make changes e.g. compiler flags to spec config file.
  if 'gcc' in FLAGS.runspec_config:
    _OverwriteGccO3(vm)

  # swap only if necessary; free local node memory and avoid remote memory;
  # reset caches; set stack size to unlimited
  # Also consider setting enable_transparent_hugepages flag to true
  cmd = (
      'echo 1 | sudo tee /proc/sys/vm/swappiness && '
      'echo 1 | sudo tee /proc/sys/vm/zone_reclaim_mode && '
      'sync ; echo 3 | sudo tee /proc/sys/vm/drop_caches && '
      'ulimit -s unlimited && '
  )

  cmd += 'runcpu '
  if FLAGS.spec17_build_only:
    cmd += '--action build '
  if FLAGS.spec17_rebuild:
    cmd += '--rebuild '

  version_specific_parameters = []
  copies = FLAGS.spec17_copies or vm.NumCpusForBenchmark()
  version_specific_parameters.append(f' --copies={copies} ')
  version_specific_parameters.append(
      ' --threads=%s ' % (FLAGS.spec17_threads or vm.NumCpusForBenchmark())
  )
  version_specific_parameters.append(
      ' --define build_ncpus=%s ' % (vm.NumCpusForBenchmark())
  )

  if FLAGS.spec17_fdo:
    version_specific_parameters.append('--feedback ')
    vm.RemoteCommand('cd /scratch/cpu2017; mkdir fdo_profiles')

  start_time = time.time()
  stdout, _ = speccpu.Run(
      vm, cmd, ' '.join(FLAGS.spec17_subset), version_specific_parameters
  )

  if not FLAGS.spec17_best_effort:
    if 'Error' in stdout and 'Please review this file' in stdout:
      raise errors.Benchmarks.RunError('Error during SPEC compilation.')

  if FLAGS.spec17_build_only:
    return [
        sample.Sample(
            'compilation_time',
            time.time() - start_time,
            's',
            {
                'spec17_subset': FLAGS.spec17_subset,
                'gcc_version': build_tools.GetVersion(vm, 'gcc'),
            },
        )
    ]

  partial_results = True
  # Do not allow partial results if any benchmark subset is a full suite.
  for benchmark_subset in FLAGS.benchmark_subset:
    if benchmark_subset in ['intspeed', 'fpspeed', 'intrate', 'fprate']:
      partial_results = False

  log_files = set()
  for test in FLAGS.spec17_subset:
    if test in LOG_FILENAME:
      log_files.add(LOG_FILENAME[test])
    else:
      if test in INTSPEED_SUITE:
        log_files.add(LOG_FILENAME['intspeed'])
      elif test in INTRATE_SUITE:
        log_files.add(LOG_FILENAME['intrate'])
      elif test in FPSPEED_SUITE:
        log_files.add(LOG_FILENAME['fpspeed'])
      elif test in FPRATE_SUITE:
        log_files.add(LOG_FILENAME['fprate'])

  for log_file in log_files:
    vm.RemoteCommand(
        f'cp {vm.GetScratchDir()}/cpu2017/result/{log_file} ~/{log_file}.log'
    )
    vm.PullFile(vm_util.GetTempDir(), f'~/{log_file}.log')

  samples = speccpu.ParseOutput(vm, log_files, partial_results, None)
  for item in samples:
    item.metadata['vm_name'] = vm.name
    item.metadata['spec17_gcc_flags'] = FLAGS.spec17_gcc_flags
    item.metadata['spec17_numa_bind_config'] = FLAGS.spec17_numa_bind_config
    item.metadata['spec17_copies'] = copies

  return samples