in collector/scraper.go [216:287]
func (s *Scraper) scrapeTargets(ctx context.Context) {
targets := s.Targets()
scrapeTime := time.Now().UnixNano() / 1e6
wr := prompb.WriteRequestPool.Get()
defer prompb.WriteRequestPool.Put(wr)
for _, target := range targets {
logger.Infof("Scraping %s", target.String())
iter, err := s.scrapeClient.FetchMetricsIterator(target.Addr)
if err != nil {
logger.Warnf("Failed to create scrape iterator %s/%s/%s at %s: %s",
target.Namespace, target.Pod, target.Container, target.Addr, err.Error())
continue
}
for iter.Next() {
pt := prompb.TimeSeriesPool.Get()
ts, err := iter.TimeSeriesInto(pt)
if err != nil {
logger.Warnf("Failed to parse series %s/%s/%s at %s: %s",
target.Namespace, target.Pod, target.Container, target.Addr, err.Error())
continue
}
name := prompb.MetricName(ts)
if s.requestTransformer.ShouldDropMetric(ts, name) {
prompb.TimeSeriesPool.Put(ts)
metrics.MetricsDroppedTotal.WithLabelValues(string(name)).Add(1)
continue
}
for i, s := range ts.Samples {
if s.Timestamp == 0 {
s.Timestamp = scrapeTime
}
ts.Samples[i] = s
}
if target.Namespace != "" {
ts.AppendLabelString("adxmon_namespace", target.Namespace)
}
if target.Pod != "" {
ts.AppendLabelString("adxmon_pod", target.Pod)
}
if target.Container != "" {
ts.AppendLabelString("adxmon_container", target.Container)
}
prompb.Sort(ts.Labels)
ts = s.requestTransformer.TransformTimeSeries(ts)
wr.Timeseries = append(wr.Timeseries, ts)
wr = s.flushBatchIfNecessary(ctx, wr)
}
if err := iter.Err(); err != nil {
logger.Warnf("Failed to scrape %s/%s/%s at %s: %s", target.Namespace, target.Pod, target.Container, target.Addr, err.Error())
}
if err := iter.Close(); err != nil {
logger.Errorf("Failed to close iterator for %s/%s/%s: %s", target.Namespace, target.Pod, target.Container, err.Error())
}
wr = s.flushBatchIfNecessary(ctx, wr)
}
if err := s.sendBatch(ctx, wr); err != nil {
logger.Errorf(err.Error())
}
wr.Timeseries = wr.Timeseries[:0]
}