diff --git a/CHANGELOG.md b/CHANGELOG.md index 0b81d70ccdc..3c21c4726cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,6 +42,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm - Cumulative metrics from the OpenCensus bridge (`go.opentelemetry.io/otel/bridge/opencensus`) are defined as monotonic sums, instead of non-monotonic. (#3389) - Asynchronous counters (`Counter` and `UpDownCounter`) from the metric SDK now produce delta sums when configured with delta temporality. (#3398) - Exported `Status` codes in the `go.opentelemetry.io/otel/exporters/zipkin` exporter are now exported as all upper case values. (#3340) +- `Aggregation`s from `go.opentelemetry.io/otel/sdk/metric` with no data are not exported. (#3394, #3436) - Reenabled Attribute Filters in the Metric SDK. (#3396) - Do not report empty partial-success responses in the `go.opentelemetry.io/otel/exporters/otlp` exporters. (#3438, #3432) - Handle partial success responses in `go.opentelemetry.io/otel/exporters/otlp/otlpmetric` exporters. (#3162, #3440) diff --git a/sdk/metric/internal/histogram.go b/sdk/metric/internal/histogram.go index b04b71baf12..058af8419b1 100644 --- a/sdk/metric/internal/histogram.go +++ b/sdk/metric/internal/histogram.go @@ -130,20 +130,21 @@ type deltaHistogram[N int64 | float64] struct { } func (s *deltaHistogram[N]) Aggregation() metricdata.Aggregation { - h := metricdata.Histogram{Temporality: metricdata.DeltaTemporality} - s.valuesMu.Lock() defer s.valuesMu.Unlock() if len(s.values) == 0 { - return h + return nil } + t := now() // Do not allow modification of our copy of bounds. bounds := make([]float64, len(s.bounds)) copy(bounds, s.bounds) - t := now() - h.DataPoints = make([]metricdata.HistogramDataPoint, 0, len(s.values)) + h := metricdata.Histogram{ + Temporality: metricdata.DeltaTemporality, + DataPoints: make([]metricdata.HistogramDataPoint, 0, len(s.values)), + } for a, b := range s.values { hdp := metricdata.HistogramDataPoint{ Attributes: a, @@ -192,20 +193,21 @@ type cumulativeHistogram[N int64 | float64] struct { } func (s *cumulativeHistogram[N]) Aggregation() metricdata.Aggregation { - h := metricdata.Histogram{Temporality: metricdata.CumulativeTemporality} - s.valuesMu.Lock() defer s.valuesMu.Unlock() if len(s.values) == 0 { - return h + return nil } + t := now() // Do not allow modification of our copy of bounds. bounds := make([]float64, len(s.bounds)) copy(bounds, s.bounds) - t := now() - h.DataPoints = make([]metricdata.HistogramDataPoint, 0, len(s.values)) + h := metricdata.Histogram{ + Temporality: metricdata.CumulativeTemporality, + DataPoints: make([]metricdata.HistogramDataPoint, 0, len(s.values)), + } for a, b := range s.values { // The HistogramDataPoint field values returned need to be copies of // the buckets value as we will keep updating them. diff --git a/sdk/metric/internal/histogram_test.go b/sdk/metric/internal/histogram_test.go index 5b18d8eee4a..fb2968fc325 100644 --- a/sdk/metric/internal/histogram_test.go +++ b/sdk/metric/internal/histogram_test.go @@ -169,17 +169,17 @@ func TestCumulativeHistogramImutableCounts(t *testing.T) { func TestDeltaHistogramReset(t *testing.T) { t.Cleanup(mockTime(now)) - expect := metricdata.Histogram{Temporality: metricdata.DeltaTemporality} a := NewDeltaHistogram[int64](histConf) - metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) + assert.Nil(t, a.Aggregation()) a.Aggregate(1, alice) + expect := metricdata.Histogram{Temporality: metricdata.DeltaTemporality} expect.DataPoints = []metricdata.HistogramDataPoint{hPoint(alice, 1, 1)} metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) // The attr set should be forgotten once Aggregations is called. expect.DataPoints = nil - metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) + assert.Nil(t, a.Aggregation()) // Aggregating another set should not affect the original (alice). a.Aggregate(1, bob) @@ -187,6 +187,13 @@ func TestDeltaHistogramReset(t *testing.T) { metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) } +func TestEmptyHistogramNilAggregation(t *testing.T) { + assert.Nil(t, NewCumulativeHistogram[int64](histConf).Aggregation()) + assert.Nil(t, NewCumulativeHistogram[float64](histConf).Aggregation()) + assert.Nil(t, NewDeltaHistogram[int64](histConf).Aggregation()) + assert.Nil(t, NewDeltaHistogram[float64](histConf).Aggregation()) +} + func BenchmarkHistogram(b *testing.B) { b.Run("Int64", benchmarkHistogram[int64]) b.Run("Float64", benchmarkHistogram[float64]) diff --git a/sdk/metric/internal/lastvalue.go b/sdk/metric/internal/lastvalue.go index d5ee3b009ba..16ee77362c4 100644 --- a/sdk/metric/internal/lastvalue.go +++ b/sdk/metric/internal/lastvalue.go @@ -49,16 +49,16 @@ func (s *lastValue[N]) Aggregate(value N, attr attribute.Set) { } func (s *lastValue[N]) Aggregation() metricdata.Aggregation { - gauge := metricdata.Gauge[N]{} - s.Lock() defer s.Unlock() if len(s.values) == 0 { - return gauge + return nil } - gauge.DataPoints = make([]metricdata.DataPoint[N], 0, len(s.values)) + gauge := metricdata.Gauge[N]{ + DataPoints: make([]metricdata.DataPoint[N], 0, len(s.values)), + } for a, v := range s.values { gauge.DataPoints = append(gauge.DataPoints, metricdata.DataPoint[N]{ Attributes: a, diff --git a/sdk/metric/internal/lastvalue_test.go b/sdk/metric/internal/lastvalue_test.go index 3f84af14304..4d78373c328 100644 --- a/sdk/metric/internal/lastvalue_test.go +++ b/sdk/metric/internal/lastvalue_test.go @@ -17,6 +17,8 @@ package internal // import "go.opentelemetry.io/otel/sdk/metric/internal" import ( "testing" + "github.com/stretchr/testify/assert" + "go.opentelemetry.io/otel/sdk/metric/metricdata" "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" ) @@ -52,20 +54,21 @@ func testLastValueReset[N int64 | float64](t *testing.T) { t.Cleanup(mockTime(now)) a := NewLastValue[N]() - expect := metricdata.Gauge[N]{} - metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) + assert.Nil(t, a.Aggregation()) a.Aggregate(1, alice) - expect.DataPoints = []metricdata.DataPoint[N]{{ - Attributes: alice, - Time: now(), - Value: 1, - }} + expect := metricdata.Gauge[N]{ + DataPoints: []metricdata.DataPoint[N]{{ + Attributes: alice, + Time: now(), + Value: 1, + }}, + } metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) // The attr set should be forgotten once Aggregations is called. expect.DataPoints = nil - metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) + assert.Nil(t, a.Aggregation()) // Aggregating another set should not affect the original (alice). a.Aggregate(1, bob) @@ -82,6 +85,11 @@ func TestLastValueReset(t *testing.T) { t.Run("Float64", testLastValueReset[float64]) } +func TestEmptyLastValueNilAggregation(t *testing.T) { + assert.Nil(t, NewLastValue[int64]().Aggregation()) + assert.Nil(t, NewLastValue[float64]().Aggregation()) +} + func BenchmarkLastValue(b *testing.B) { b.Run("Int64", benchmarkAggregator(NewLastValue[int64])) b.Run("Float64", benchmarkAggregator(NewLastValue[float64])) diff --git a/sdk/metric/internal/sum.go b/sdk/metric/internal/sum.go index 210d2370120..ecf22f44b6d 100644 --- a/sdk/metric/internal/sum.go +++ b/sdk/metric/internal/sum.go @@ -76,20 +76,19 @@ type deltaSum[N int64 | float64] struct { } func (s *deltaSum[N]) Aggregation() metricdata.Aggregation { - out := metricdata.Sum[N]{ - Temporality: metricdata.DeltaTemporality, - IsMonotonic: s.monotonic, - } - s.Lock() defer s.Unlock() if len(s.values) == 0 { - return out + return nil } t := now() - out.DataPoints = make([]metricdata.DataPoint[N], 0, len(s.values)) + out := metricdata.Sum[N]{ + Temporality: metricdata.DeltaTemporality, + IsMonotonic: s.monotonic, + DataPoints: make([]metricdata.DataPoint[N], 0, len(s.values)), + } for attr, value := range s.values { out.DataPoints = append(out.DataPoints, metricdata.DataPoint[N]{ Attributes: attr, @@ -137,20 +136,19 @@ type cumulativeSum[N int64 | float64] struct { } func (s *cumulativeSum[N]) Aggregation() metricdata.Aggregation { - out := metricdata.Sum[N]{ - Temporality: metricdata.CumulativeTemporality, - IsMonotonic: s.monotonic, - } - s.Lock() defer s.Unlock() if len(s.values) == 0 { - return out + return nil } t := now() - out.DataPoints = make([]metricdata.DataPoint[N], 0, len(s.values)) + out := metricdata.Sum[N]{ + Temporality: metricdata.CumulativeTemporality, + IsMonotonic: s.monotonic, + DataPoints: make([]metricdata.DataPoint[N], 0, len(s.values)), + } for attr, value := range s.values { out.DataPoints = append(out.DataPoints, metricdata.DataPoint[N]{ Attributes: attr, @@ -204,20 +202,19 @@ func (s *precomputedDeltaSum[N]) Aggregate(value N, attr attribute.Set) { } func (s *precomputedDeltaSum[N]) Aggregation() metricdata.Aggregation { - out := metricdata.Sum[N]{ - Temporality: metricdata.DeltaTemporality, - IsMonotonic: s.monotonic, - } - s.Lock() defer s.Unlock() if len(s.recorded) == 0 { - return out + return nil } t := now() - out.DataPoints = make([]metricdata.DataPoint[N], 0, len(s.recorded)) + out := metricdata.Sum[N]{ + Temporality: metricdata.DeltaTemporality, + IsMonotonic: s.monotonic, + DataPoints: make([]metricdata.DataPoint[N], 0, len(s.recorded)), + } for attr, recorded := range s.recorded { value := recorded - s.reported[attr] out.DataPoints = append(out.DataPoints, metricdata.DataPoint[N]{ diff --git a/sdk/metric/internal/sum_test.go b/sdk/metric/internal/sum_test.go index 92e3b394675..359b73aa50a 100644 --- a/sdk/metric/internal/sum_test.go +++ b/sdk/metric/internal/sum_test.go @@ -17,6 +17,8 @@ package internal // import "go.opentelemetry.io/otel/sdk/metric/internal" import ( "testing" + "github.com/stretchr/testify/assert" + "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/sdk/metric/metricdata" "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" @@ -138,17 +140,17 @@ func point[N int64 | float64](a attribute.Set, v N) metricdata.DataPoint[N] { func testDeltaSumReset[N int64 | float64](t *testing.T) { t.Cleanup(mockTime(now)) - expect := metricdata.Sum[N]{Temporality: metricdata.DeltaTemporality} a := NewDeltaSum[N](false) - metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) + assert.Nil(t, a.Aggregation()) a.Aggregate(1, alice) + expect := metricdata.Sum[N]{Temporality: metricdata.DeltaTemporality} expect.DataPoints = []metricdata.DataPoint[N]{point[N](alice, 1)} metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) // The attr set should be forgotten once Aggregations is called. expect.DataPoints = nil - metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) + assert.Nil(t, a.Aggregation()) // Aggregating another set should not affect the original (alice). a.Aggregate(1, bob) @@ -161,6 +163,25 @@ func TestDeltaSumReset(t *testing.T) { t.Run("Float64", testDeltaSumReset[float64]) } +func TestEmptySumNilAggregation(t *testing.T) { + assert.Nil(t, NewCumulativeSum[int64](true).Aggregation()) + assert.Nil(t, NewCumulativeSum[int64](false).Aggregation()) + assert.Nil(t, NewCumulativeSum[float64](true).Aggregation()) + assert.Nil(t, NewCumulativeSum[float64](false).Aggregation()) + assert.Nil(t, NewDeltaSum[int64](true).Aggregation()) + assert.Nil(t, NewDeltaSum[int64](false).Aggregation()) + assert.Nil(t, NewDeltaSum[float64](true).Aggregation()) + assert.Nil(t, NewDeltaSum[float64](false).Aggregation()) + assert.Nil(t, NewPrecomputedCumulativeSum[int64](true).Aggregation()) + assert.Nil(t, NewPrecomputedCumulativeSum[int64](false).Aggregation()) + assert.Nil(t, NewPrecomputedCumulativeSum[float64](true).Aggregation()) + assert.Nil(t, NewPrecomputedCumulativeSum[float64](false).Aggregation()) + assert.Nil(t, NewPrecomputedDeltaSum[int64](true).Aggregation()) + assert.Nil(t, NewPrecomputedDeltaSum[int64](false).Aggregation()) + assert.Nil(t, NewPrecomputedDeltaSum[float64](true).Aggregation()) + assert.Nil(t, NewPrecomputedDeltaSum[float64](false).Aggregation()) +} + func BenchmarkSum(b *testing.B) { b.Run("Int64", benchmarkSum[int64]) b.Run("Float64", benchmarkSum[float64]) diff --git a/sdk/metric/pipeline_test.go b/sdk/metric/pipeline_test.go index 75cc06d4878..678d173c4e6 100644 --- a/sdk/metric/pipeline_test.go +++ b/sdk/metric/pipeline_test.go @@ -167,6 +167,9 @@ func testDefaultViewImplicit[N int64 | float64]() func(t *testing.T) { got, err := i.Instrument(inst) require.NoError(t, err) assert.Len(t, got, 1, "default view not applied") + for _, a := range got { + a.Aggregate(1, *attribute.EmptySet()) + } out, err := test.pipe.produce(context.Background()) require.NoError(t, err) @@ -180,6 +183,7 @@ func testDefaultViewImplicit[N int64 | float64]() func(t *testing.T) { Data: metricdata.Sum[N]{ Temporality: metricdata.CumulativeTemporality, IsMonotonic: true, + DataPoints: []metricdata.DataPoint[N]{{Value: N(1)}}, }, }, sm.Metrics[0], metricdatatest.IgnoreTimestamp()) })