in exporter/splunkhecexporter/metricdata_to_splunk.go [54:220]
func mapMetricToSplunkEvent(res pcommon.Resource, m pmetric.Metric, config *Config, logger *zap.Logger) []*splunk.Event {
sourceKey := config.HecToOtelAttrs.Source
sourceTypeKey := config.HecToOtelAttrs.SourceType
indexKey := config.HecToOtelAttrs.Index
hostKey := config.HecToOtelAttrs.Host
host := unknownHostName
source := config.Source
sourceType := config.SourceType
index := config.Index
commonFields := map[string]any{}
for k, v := range res.Attributes().All() {
switch k {
case hostKey:
host = v.Str()
case sourceKey:
source = v.Str()
case sourceTypeKey:
sourceType = v.Str()
case indexKey:
index = v.Str()
case splunk.HecTokenLabel:
// ignore
default:
commonFields[k] = v.AsString()
}
}
metricFieldName := splunkMetricValue + ":" + m.Name()
//exhaustive:enforce
switch m.Type() {
case pmetric.MetricTypeGauge:
pts := m.Gauge().DataPoints()
splunkMetrics := make([]*splunk.Event, pts.Len())
for gi := 0; gi < pts.Len(); gi++ {
dataPt := pts.At(gi)
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
switch dataPt.ValueType() {
case pmetric.NumberDataPointValueTypeInt:
fields[metricFieldName] = dataPt.IntValue()
case pmetric.NumberDataPointValueTypeDouble:
fields[metricFieldName] = sanitizeFloat(dataPt.DoubleValue())
}
fields[splunkMetricTypeKey] = pmetric.MetricTypeGauge.String()
splunkMetrics[gi] = createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields)
}
return splunkMetrics
case pmetric.MetricTypeHistogram:
pts := m.Histogram().DataPoints()
var splunkMetrics []*splunk.Event
for gi := 0; gi < pts.Len(); gi++ {
dataPt := pts.At(gi)
bounds := dataPt.ExplicitBounds()
counts := dataPt.BucketCounts()
// first, add one event for sum, and one for count
if dataPt.HasSum() && !math.IsNaN(dataPt.Sum()) {
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
fields[metricFieldName+sumSuffix] = dataPt.Sum()
fields[splunkMetricTypeKey] = pmetric.MetricTypeHistogram.String()
splunkMetrics = append(splunkMetrics, createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields))
}
{
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
fields[metricFieldName+countSuffix] = dataPt.Count()
fields[splunkMetricTypeKey] = pmetric.MetricTypeHistogram.String()
splunkMetrics = append(splunkMetrics, createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields))
}
// Spec says counts is optional but if present it must have one more
// element than the bounds array.
if counts.Len() == 0 || counts.Len() != bounds.Len()+1 {
continue
}
value := uint64(0)
// now create buckets for each bound.
for bi := 0; bi < bounds.Len(); bi++ {
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
fields["le"] = float64ToDimValue(bounds.At(bi))
value += counts.At(bi)
fields[metricFieldName+bucketSuffix] = value
fields[splunkMetricTypeKey] = pmetric.MetricTypeHistogram.String()
sm := createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields)
splunkMetrics = append(splunkMetrics, sm)
}
// add an upper bound for +Inf
{
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
fields["le"] = float64ToDimValue(math.Inf(1))
fields[metricFieldName+bucketSuffix] = value + counts.At(counts.Len()-1)
fields[splunkMetricTypeKey] = pmetric.MetricTypeHistogram.String()
sm := createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields)
splunkMetrics = append(splunkMetrics, sm)
}
}
return splunkMetrics
case pmetric.MetricTypeSum:
pts := m.Sum().DataPoints()
splunkMetrics := make([]*splunk.Event, pts.Len())
for gi := 0; gi < pts.Len(); gi++ {
dataPt := pts.At(gi)
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
switch dataPt.ValueType() {
case pmetric.NumberDataPointValueTypeInt:
fields[metricFieldName] = dataPt.IntValue()
case pmetric.NumberDataPointValueTypeDouble:
fields[metricFieldName] = sanitizeFloat(dataPt.DoubleValue())
}
fields[splunkMetricTypeKey] = pmetric.MetricTypeSum.String()
sm := createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields)
splunkMetrics[gi] = sm
}
return splunkMetrics
case pmetric.MetricTypeSummary:
pts := m.Summary().DataPoints()
var splunkMetrics []*splunk.Event
for gi := 0; gi < pts.Len(); gi++ {
dataPt := pts.At(gi)
// first, add one event for sum, and one for count
if !math.IsNaN(dataPt.Sum()) {
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
fields[metricFieldName+sumSuffix] = dataPt.Sum()
fields[splunkMetricTypeKey] = pmetric.MetricTypeSummary.String()
sm := createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields)
splunkMetrics = append(splunkMetrics, sm)
}
{
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
fields[metricFieldName+countSuffix] = dataPt.Count()
fields[splunkMetricTypeKey] = pmetric.MetricTypeSummary.String()
sm := createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields)
splunkMetrics = append(splunkMetrics, sm)
}
// now create values for each quantile.
for bi := 0; bi < dataPt.QuantileValues().Len(); bi++ {
fields := cloneMap(commonFields)
populateAttributes(fields, dataPt.Attributes())
dp := dataPt.QuantileValues().At(bi)
fields["qt"] = float64ToDimValue(dp.Quantile())
fields[metricFieldName+"_"+strconv.FormatFloat(dp.Quantile(), 'f', -1, 64)] = sanitizeFloat(dp.Value())
fields[splunkMetricTypeKey] = pmetric.MetricTypeSummary.String()
sm := createEvent(dataPt.Timestamp(), host, source, sourceType, index, fields)
splunkMetrics = append(splunkMetrics, sm)
}
}
return splunkMetrics
case pmetric.MetricTypeExponentialHistogram:
logger.Warn(
"Point with unsupported type ExponentialHistogram",
zap.Any("metric", m))
return nil
case pmetric.MetricTypeEmpty:
return nil
default:
logger.Warn(
"Point with unsupported type",
zap.Any("metric", m))
return nil
}
}