in astro/sessions.go [248:330]
func (s *Session) plan(boundExecutions []*boundExecution, detach bool) (<-chan string, <-chan *Result, error) {
logger.Trace.Println("astro session: running plan")
numberOfExecutions := len(boundExecutions)
// Needs to be big enough to buffer log lines from below for tests that
// don't consume from the channel.
status := make(chan string, numberOfExecutions*10)
results := make(chan *Result, numberOfExecutions)
logger.Trace.Printf("astro: %d executions to plan\n", numberOfExecutions)
// Create plan functions
fns := []func(){}
for _, e := range boundExecutions {
b := e // save for use inside the loop
fns = append(fns, func() {
terraform, err := s.newTerraformSession(b)
if err != nil {
results <- &Result{
id: b.ID(),
err: err,
}
return
}
for _, hook := range e.ModuleConfig().Hooks.PreModuleRun {
status <- fmt.Sprintf("[%s] Running PreModuleRun hook...", b.ID())
if err := runCommandkAndSetEnvironment(s.path, hook); err != nil {
results <- &Result{
id: b.ID(),
err: fmt.Errorf("error running PreModuleRun hook: %v", err),
}
return
}
}
status <- fmt.Sprintf("[%s] Initializing...", b.ID())
if result, err := terraform.Init(); err != nil {
results <- &Result{
id: b.ID(),
terraformResult: result,
err: err,
}
return
}
if detach {
status <- fmt.Sprintf("[%s] Disconnecting remote state...", b.ID())
if result, err := terraform.Detach(); err != nil {
results <- &Result{
id: b.ID(),
terraformResult: result,
err: err,
}
return
}
}
status <- fmt.Sprintf("[%s] Planning...", b.ID())
result, err := terraform.Plan()
results <- &Result{
id: b.ID(),
terraformResult: result,
err: err,
}
})
}
ctx, cancel := context.WithCancel(context.Background())
go func() {
sig := <-s.signalChan
fmt.Printf("\nReceived signal: %s, cancelling all operations...\n", sig)
cancel()
}()
// Run plans in parallel
go func() {
defer close(results) // signals the end of all executions
utils.Parallel(ctx, 10, fns...)
}()
return status, results, nil
}