in go/analysis/unitchecker/unitchecker.go [192:391]
func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]result, error) {
// Load, parse, typecheck.
var files []*ast.File
for _, name := range cfg.GoFiles {
f, err := parser.ParseFile(fset, name, nil, parser.ParseComments)
if err != nil {
if cfg.SucceedOnTypecheckFailure {
// Silently succeed; let the compiler
// report parse errors.
err = nil
}
return nil, err
}
files = append(files, f)
}
compilerImporter := importerForCompiler(fset, cfg.Compiler, func(path string) (io.ReadCloser, error) {
// path is a resolved package path, not an import path.
file, ok := cfg.PackageFile[path]
if !ok {
if cfg.Compiler == "gccgo" && cfg.Standard[path] {
return nil, nil // fall back to default gccgo lookup
}
return nil, fmt.Errorf("no package file for %q", path)
}
return os.Open(file)
})
importer := importerFunc(func(importPath string) (*types.Package, error) {
path, ok := cfg.ImportMap[importPath] // resolve vendoring, etc
if !ok {
return nil, fmt.Errorf("can't resolve import %q", path)
}
return compilerImporter.Import(path)
})
tc := &types.Config{
Importer: importer,
Sizes: types.SizesFor("gc", build.Default.GOARCH), // assume gccgo ≡ gc?
}
info := &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
Uses: make(map[*ast.Ident]types.Object),
Implicits: make(map[ast.Node]types.Object),
Scopes: make(map[ast.Node]*types.Scope),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
}
typeparams.InitInstanceInfo(info)
pkg, err := tc.Check(cfg.ImportPath, fset, files, info)
if err != nil {
if cfg.SucceedOnTypecheckFailure {
// Silently succeed; let the compiler
// report type errors.
err = nil
}
return nil, err
}
// Register fact types with gob.
// In VetxOnly mode, analyzers are only for their facts,
// so we can skip any analysis that neither produces facts
// nor depends on any analysis that produces facts.
// Also build a map to hold working state and result.
type action struct {
once sync.Once
result interface{}
err error
usesFacts bool // (transitively uses)
diagnostics []analysis.Diagnostic
}
actions := make(map[*analysis.Analyzer]*action)
var registerFacts func(a *analysis.Analyzer) bool
registerFacts = func(a *analysis.Analyzer) bool {
act, ok := actions[a]
if !ok {
act = new(action)
var usesFacts bool
for _, f := range a.FactTypes {
usesFacts = true
gob.Register(f)
}
for _, req := range a.Requires {
if registerFacts(req) {
usesFacts = true
}
}
act.usesFacts = usesFacts
actions[a] = act
}
return act.usesFacts
}
var filtered []*analysis.Analyzer
for _, a := range analyzers {
if registerFacts(a) || !cfg.VetxOnly {
filtered = append(filtered, a)
}
}
analyzers = filtered
// Read facts from imported packages.
read := func(path string) ([]byte, error) {
if vetx, ok := cfg.PackageVetx[path]; ok {
return ioutil.ReadFile(vetx)
}
return nil, nil // no .vetx file, no facts
}
facts, err := facts.Decode(pkg, read)
if err != nil {
return nil, err
}
// In parallel, execute the DAG of analyzers.
var exec func(a *analysis.Analyzer) *action
var execAll func(analyzers []*analysis.Analyzer)
exec = func(a *analysis.Analyzer) *action {
act := actions[a]
act.once.Do(func() {
execAll(a.Requires) // prefetch dependencies in parallel
// The inputs to this analysis are the
// results of its prerequisites.
inputs := make(map[*analysis.Analyzer]interface{})
var failed []string
for _, req := range a.Requires {
reqact := exec(req)
if reqact.err != nil {
failed = append(failed, req.String())
continue
}
inputs[req] = reqact.result
}
// Report an error if any dependency failed.
if failed != nil {
sort.Strings(failed)
act.err = fmt.Errorf("failed prerequisites: %s", strings.Join(failed, ", "))
return
}
factFilter := make(map[reflect.Type]bool)
for _, f := range a.FactTypes {
factFilter[reflect.TypeOf(f)] = true
}
pass := &analysis.Pass{
Analyzer: a,
Fset: fset,
Files: files,
OtherFiles: cfg.NonGoFiles,
IgnoredFiles: cfg.IgnoredFiles,
Pkg: pkg,
TypesInfo: info,
TypesSizes: tc.Sizes,
ResultOf: inputs,
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
ImportObjectFact: facts.ImportObjectFact,
ExportObjectFact: facts.ExportObjectFact,
AllObjectFacts: func() []analysis.ObjectFact { return facts.AllObjectFacts(factFilter) },
ImportPackageFact: facts.ImportPackageFact,
ExportPackageFact: facts.ExportPackageFact,
AllPackageFacts: func() []analysis.PackageFact { return facts.AllPackageFacts(factFilter) },
}
t0 := time.Now()
act.result, act.err = a.Run(pass)
if false {
log.Printf("analysis %s = %s", pass, time.Since(t0))
}
})
return act
}
execAll = func(analyzers []*analysis.Analyzer) {
var wg sync.WaitGroup
for _, a := range analyzers {
wg.Add(1)
go func(a *analysis.Analyzer) {
_ = exec(a)
wg.Done()
}(a)
}
wg.Wait()
}
execAll(analyzers)
// Return diagnostics and errors from root analyzers.
results := make([]result, len(analyzers))
for i, a := range analyzers {
act := actions[a]
results[i].a = a
results[i].err = act.err
results[i].diagnostics = act.diagnostics
}
data := facts.Encode()
if err := ioutil.WriteFile(cfg.VetxOutput, data, 0666); err != nil {
return nil, fmt.Errorf("failed to write analysis facts: %v", err)
}
return results, nil
}