in cmd/gazelle/update-repos.go [124:306]
func updateRepos(wd string, args []string) (err error) {
// Build configuration with all languages.
cexts := make([]config.Configurer, 0, len(languages)+2)
cexts = append(cexts, &config.CommonConfigurer{}, &updateReposConfigurer{})
kinds := make(map[string]rule.KindInfo)
loads := []rule.LoadInfo{}
for _, lang := range languages {
cexts = append(cexts, lang)
loads = append(loads, lang.Loads()...)
for kind, info := range lang.Kinds() {
kinds[kind] = info
}
}
c, err := newUpdateReposConfiguration(wd, args, cexts)
if err != nil {
return err
}
uc := getUpdateReposConfig(c)
// TODO(jayconrod): move Go-specific RemoteCache logic to language/go.
var knownRepos []repo.Repo
for _, r := range c.Repos {
if r.Kind() == "go_repository" {
knownRepos = append(knownRepos, repo.Repo{
Name: r.Name(),
GoPrefix: r.AttrString("importpath"),
Remote: r.AttrString("remote"),
VCS: r.AttrString("vcs"),
})
}
}
rc, cleanup := repo.NewRemoteCache(knownRepos)
defer func() {
if cerr := cleanup(); err == nil && cerr != nil {
err = cerr
}
}()
// Fix the workspace file with each language.
for _, lang := range filterLanguages(c, languages) {
lang.Fix(c, uc.workspace)
}
// Generate rules from command language arguments or by importing a file.
var gen, empty []*rule.Rule
if uc.repoFilePath == "" {
gen, err = updateRepoImports(c, rc)
} else {
gen, empty, err = importRepos(c, rc)
}
if err != nil {
return err
}
// Organize generated and empty rules by file. A rule should go into the file
// it came from (by name). New rules should go into WORKSPACE or the file
// specified with -to_macro.
var newGen []*rule.Rule
genForFiles := make(map[*rule.File][]*rule.Rule)
emptyForFiles := make(map[*rule.File][]*rule.Rule)
genNames := make(map[string]*rule.Rule)
for _, r := range gen {
if existingRule := genNames[r.Name()]; existingRule != nil {
import1 := existingRule.AttrString("importpath")
import2 := r.AttrString("importpath")
return fmt.Errorf("imports %s and %s resolve to the same repository rule name %s",
import1, import2, r.Name())
} else {
genNames[r.Name()] = r
}
f := uc.repoFileMap[r.Name()]
if f != nil {
genForFiles[f] = append(genForFiles[f], r)
} else {
newGen = append(newGen, r)
}
}
for _, r := range empty {
f := uc.repoFileMap[r.Name()]
if f == nil {
panic(fmt.Sprintf("empty rule %q for deletion that was not found", r.Name()))
}
emptyForFiles[f] = append(emptyForFiles[f], r)
}
var newGenFile *rule.File
var macroPath string
if uc.macroFileName != "" {
macroPath = filepath.Join(c.RepoRoot, filepath.Clean(uc.macroFileName))
}
for f := range genForFiles {
if macroPath == "" && wspace.IsWORKSPACE(f.Path) ||
macroPath != "" && f.Path == macroPath && f.DefName == uc.macroDefName {
newGenFile = f
break
}
}
if newGenFile == nil {
if uc.macroFileName == "" {
newGenFile = uc.workspace
} else {
var err error
newGenFile, err = rule.LoadMacroFile(macroPath, "", uc.macroDefName)
if os.IsNotExist(err) {
newGenFile, err = rule.EmptyMacroFile(macroPath, "", uc.macroDefName)
if err != nil {
return fmt.Errorf("error creating %q: %v", macroPath, err)
}
} else if err != nil {
return fmt.Errorf("error loading %q: %v", macroPath, err)
}
}
}
genForFiles[newGenFile] = append(genForFiles[newGenFile], newGen...)
workspaceInsertIndex := findWorkspaceInsertIndex(uc.workspace, kinds, loads)
for _, r := range genForFiles[uc.workspace] {
r.SetPrivateAttr(merger.UnstableInsertIndexKey, workspaceInsertIndex)
}
// Merge rules and fix loads in each file.
seenFile := make(map[*rule.File]bool)
sortedFiles := make([]*rule.File, 0, len(genForFiles))
for f := range genForFiles {
if !seenFile[f] {
seenFile[f] = true
sortedFiles = append(sortedFiles, f)
}
}
for f := range emptyForFiles {
if !seenFile[f] {
seenFile[f] = true
sortedFiles = append(sortedFiles, f)
}
}
if ensureMacroInWorkspace(uc, workspaceInsertIndex) {
if !seenFile[uc.workspace] {
seenFile[uc.workspace] = true
sortedFiles = append(sortedFiles, uc.workspace)
}
}
sort.Slice(sortedFiles, func(i, j int) bool {
if cmp := strings.Compare(sortedFiles[i].Path, sortedFiles[j].Path); cmp != 0 {
return cmp < 0
}
return sortedFiles[i].DefName < sortedFiles[j].DefName
})
updatedFiles := make(map[string]*rule.File)
for _, f := range sortedFiles {
merger.MergeFile(f, emptyForFiles[f], genForFiles[f], merger.PreResolve, kinds)
merger.FixLoads(f, loads)
if f == uc.workspace {
if err := merger.CheckGazelleLoaded(f); err != nil {
return err
}
}
f.Sync()
if uf, ok := updatedFiles[f.Path]; ok {
uf.SyncMacroFile(f)
} else {
updatedFiles[f.Path] = f
}
}
// Write updated files to disk.
for _, f := range sortedFiles {
if uf := updatedFiles[f.Path]; uf != nil {
if f.DefName != "" {
uf.SortMacro()
}
newContent := f.Format()
if !bytes.Equal(f.Content, newContent) {
if err := uf.Save(uf.Path); err != nil {
return err
}
}
delete(updatedFiles, f.Path)
}
}
return nil
}