in go/cmd/aggregate-crls/aggregate-crls.go [308:346]
func (ae *AggregateEngine) downloadCRLs(ctx context.Context, issuerToUrls types.IssuerCrlMap) (<-chan types.IssuerCrlUrlPaths, int64) {
var wg sync.WaitGroup
crlChan := make(chan types.IssuerCrlUrls, 16*1024*1024)
var count int64
for issuer, crlMap := range issuerToUrls {
var urls []url.URL
for iUrl := range crlMap {
urlObj, err := url.Parse(strings.TrimSpace(iUrl))
if err != nil {
glog.Warningf("Ignoring URL %s: %s", iUrl, err)
continue
}
urls = append(urls, *urlObj)
}
if len(urls) > 0 {
crlChan <- types.IssuerCrlUrls{
Issuer: types.NewIssuerFromString(issuer),
Urls: urls,
}
count = count + 1
}
}
close(crlChan)
resultChan := make(chan types.IssuerCrlUrlPaths, count)
// Start the workers
for t := 0; t < *ctconfig.NumThreads; t++ {
wg.Add(1)
go ae.crlFetchWorker(ctx, &wg, crlChan, resultChan)
}
wg.Wait()
close(resultChan)
return resultChan, count
}