in internal/search/search.go [124:195]
func (searcher *Searcher) multiNodeSearch(ctx context.Context, s *SearchRequest) (*SearchResult, error) {
var wg sync.WaitGroup
responses := make(chan ZoektResponse, len(s.ForwardTo))
timeoutCtx, cancel := context.WithTimeout(ctx, s.Timeout)
defer cancel()
for i := range s.ForwardTo {
wg.Add(1)
go func(i int) {
defer wg.Done()
resp := searcher.DoSearch(timeoutCtx, s, &s.ForwardTo[i])
select {
case responses <- resp:
case <-timeoutCtx.Done():
}
}(i)
}
go func() {
wg.Wait()
close(responses)
}()
result := combineResults(timeoutCtx, responses, s, cancel)
if result.TimedOut && len(result.Result.Files) == 0 {
return nil, fmt.Errorf("search timed out")
}
if len(result.Failures) == len(s.ForwardTo) {
var errMsgs []error
for _, failure := range result.Failures {
errMsgs = append(errMsgs, fmt.Errorf("%s: %s", failure.Endpoint, failure.Error))
}
combinedErr := errors.Join(errMsgs...)
return nil, fmt.Errorf("all searches failed: %w", combinedErr)
}
result.sort()
if s.Options.MaxFileMatchResults > 0 && len(result.Result.Files) > int(s.Options.MaxFileMatchResults) {
result.Result.Files = result.Result.Files[:s.Options.MaxFileMatchResults]
}
if s.Options.MaxLineMatchResults > 0 {
remaining := int(s.Options.MaxLineMatchResults)
maxPerFile := int(s.Options.MaxLineMatchResultsPerFile)
for i := range result.Result.Files {
lines := result.Result.Files[i].LineMatches
if maxPerFile > 0 && len(lines) > maxPerFile {
lines = lines[:maxPerFile]
}
if len(lines) > remaining {
lines = lines[:remaining]
}
result.Result.Files[i].LineMatches = lines
remaining -= len(lines)
if remaining <= 0 {
result.Result.Files = result.Result.Files[:i+1]
break
}
}
}
return result, nil
}