in sweet/cmd/sweet/benchmark.go [159:304]
func (b *benchmark) execute(cfgs []*common.Config, r *runCfg) error {
log.Printf("Setting up benchmark: %s", b.name)
// Compute top-level directories for this benchmark to work in.
topAssetsDir := filepath.Join(r.assetsDir, b.name)
benchDir := filepath.Join(r.benchDir, b.name)
topDir := filepath.Join(r.workDir, b.name)
srcDir := filepath.Join(topDir, "src")
hasAssets, err := fileutil.FileExists(topAssetsDir)
if err != nil {
return err
}
// Retrieve the benchmark's source.
if err := b.harness.Get(srcDir); err != nil {
return fmt.Errorf("retrieving source for %s: %v", b.name, err)
}
// Create the results directory for the benchmark.
resultsDir := filepath.Join(r.resultsDir, b.name)
if err := mkdirAll(resultsDir); err != nil {
return fmt.Errorf("creating results directory for %s: %v", b.name, err)
}
// Perform a setup step for each config for the benchmark.
setups := make([]common.RunConfig, 0, len(cfgs))
for _, cfg := range cfgs {
// Create directory heirarchy for benchmarks.
workDir := filepath.Join(topDir, cfg.Name)
binDir := filepath.Join(workDir, "bin")
tmpDir := filepath.Join(workDir, "tmp")
assetsDir := filepath.Join(workDir, "assets")
if err := mkdirAll(binDir); err != nil {
return fmt.Errorf("create %s bin for %s: %v", b.name, cfg.Name, err)
}
if err := mkdirAll(srcDir); err != nil {
return fmt.Errorf("create %s src for %s: %v", b.name, cfg.Name, err)
}
if err := mkdirAll(tmpDir); err != nil {
return fmt.Errorf("create %s tmp for %s: %v", b.name, cfg.Name, err)
}
if hasAssets {
if err := mkdirAll(assetsDir); err != nil {
return fmt.Errorf("create %s assets dir for %s: %v", b.name, cfg.Name, err)
}
}
// Build the benchmark (application and any other necessary components).
bcfg := common.BuildConfig{
BinDir: binDir,
SrcDir: srcDir,
BenchDir: benchDir,
}
if err := b.harness.Build(cfg, &bcfg); err != nil {
return fmt.Errorf("build %s for %s: %v", b.name, cfg.Name, err)
}
// Generate any args to funnel through to benchmarks.
args := []string{}
if r.dumpCore {
// Create a directory for the core files to live in.
resultsCoresDir := filepath.Join(resultsDir, "core")
mkdirAll(resultsCoresDir)
// We need to pass an argument to the benchmark binary to generate
// a core file. See benchmarks/internal/driver for details.
args = append(args, "-dump-cores", resultsCoresDir)
// Copy the bin directory so that the binaries may be used to analyze
// the core dump.
resultsBinDir := filepath.Join(resultsDir, "bin")
mkdirAll(resultsBinDir)
copyDirContents(resultsBinDir, binDir)
}
if r.cpuProfile || r.memProfile || r.perf {
// Create a directory for any profile files to live in.
resultsProfilesDir := filepath.Join(resultsDir, fmt.Sprintf("%s.debug", cfg.Name))
mkdirAll(resultsProfilesDir)
// We need to pass arguments to the benchmark binary to generate
// profiles. See benchmarks/internal/driver for details.
if r.cpuProfile {
args = append(args, "-cpuprofile", resultsProfilesDir)
}
if r.memProfile {
args = append(args, "-memprofile", resultsProfilesDir)
}
if r.perf {
args = append(args, "-perf", resultsProfilesDir)
if r.perfFlags != "" {
args = append(args, "-perf-flags", r.perfFlags)
}
}
}
results, err := os.Create(filepath.Join(resultsDir, fmt.Sprintf("%s.results", cfg.Name)))
if err != nil {
return fmt.Errorf("create %s results file for %s: %v", b.name, cfg.Name, err)
}
defer results.Close()
setups = append(setups, common.RunConfig{
BinDir: binDir,
TmpDir: tmpDir,
AssetsDir: assetsDir,
Args: args,
Results: results,
})
}
for j := 0; j < r.count; j++ {
// Execute the benchmark for each configuration.
for i, setup := range setups {
if hasAssets {
// Set up assets directory for test run.
if err := copyDirContents(setup.AssetsDir, topAssetsDir); err != nil {
return err
}
}
log.Printf("Running benchmark %s for %s: run %d", b.name, cfgs[i].Name, j+1)
// Force a GC now because we're about to turn it off.
runtime.GC()
// Hold your breath: we're turning off GC for the duration of the
// run so that the suite's GC doesn't start blasting on all Ps,
// introducing undue noise into the experiments.
gogc := debug.SetGCPercent(-1)
if err := b.harness.Run(cfgs[i], &setup); err != nil {
debug.SetGCPercent(gogc)
setup.Results.Close()
return fmt.Errorf("run benchmark %s for config %s: %v", b.name, cfgs[i].Name, err)
}
debug.SetGCPercent(gogc)
// Clean up tmp directory so benchmarks may assume it's empty.
if err := rmDirContents(setup.TmpDir); err != nil {
return err
}
if hasAssets {
// Clean up assets directory just in case any of the files were written to.
if err := rmDirContents(setup.AssetsDir); err != nil {
return err
}
}
}
}
return nil
}