opentelemetry_sdk/metrics/aggregation.rs
1use std::fmt;
2
3use crate::metrics::error::{MetricError, MetricResult};
4use crate::metrics::internal::{EXPO_MAX_SCALE, EXPO_MIN_SCALE};
5
6/// The way recorded measurements are summarized.
7#[derive(Clone, Debug, PartialEq)]
8#[non_exhaustive]
9pub enum Aggregation {
10 /// An aggregation that drops all recorded data.
11 Drop,
12
13 /// An aggregation that uses the default instrument kind selection mapping to
14 /// select another aggregation.
15 ///
16 /// A metric reader can be configured to make an aggregation selection based on
17 /// instrument kind that differs from the default. This aggregation ensures the
18 /// default is used.
19 ///
20 /// See the [the spec] for information about the default
21 /// instrument kind selection mapping.
22 ///
23 /// [the spec]: https://github.com/open-telemetry/opentelemetry-specification/blob/v1.19.0/specification/metrics/sdk.md#default-aggregation
24 Default,
25
26 /// An aggregation that summarizes a set of measurements as their arithmetic
27 /// sum.
28 Sum,
29
30 /// An aggregation that summarizes a set of measurements as the last one made.
31 LastValue,
32
33 /// An aggregation that summarizes a set of measurements as a histogram with
34 /// explicitly defined buckets.
35 ExplicitBucketHistogram {
36 /// The increasing bucket boundary values.
37 ///
38 /// Boundary values define bucket upper bounds. Buckets are exclusive of their
39 /// lower boundary and inclusive of their upper bound (except at positive
40 /// infinity). A measurement is defined to fall into the greatest-numbered
41 /// bucket with a boundary that is greater than or equal to the measurement. As
42 /// an example, boundaries defined as:
43 ///
44 /// vec![0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0, 750.0,
45 /// 1000.0, 2500.0, 5000.0, 7500.0, 10000.0];
46 ///
47 /// Will define these buckets:
48 ///
49 /// (-∞, 0], (0, 5.0], (5.0, 10.0], (10.0, 25.0], (25.0, 50.0], (50.0,
50 /// 75.0], (75.0, 100.0], (100.0, 250.0], (250.0, 500.0], (500.0,
51 /// 750.0], (750.0, 1000.0], (1000.0, 2500.0], (2500.0, 5000.0],
52 /// (5000.0, 7500.0], (7500.0, 10000.0], (10000.0, +∞)
53 boundaries: Vec<f64>,
54
55 /// Indicates whether to not record the min and max of the distribution.
56 ///
57 /// By default, these values are recorded.
58 ///
59 /// Recording these values for cumulative data is expected to have little
60 /// value, they will represent the entire life of the instrument instead of
61 /// just the current collection cycle. It is recommended to set this to
62 /// `false` for that type of data to avoid computing the low-value
63 /// instances.
64 record_min_max: bool,
65 },
66
67 /// An aggregation that summarizes a set of measurements as a histogram with
68 /// bucket widths that grow exponentially.
69 Base2ExponentialHistogram {
70 /// The maximum number of buckets to use for the histogram.
71 max_size: u32,
72
73 /// The maximum resolution scale to use for the histogram.
74 ///
75 /// The maximum value is `20`, in which case the maximum number of buckets
76 /// that can fit within the range of a signed 32-bit integer index could be
77 /// used.
78 ///
79 /// The minimum value is `-10` in which case only two buckets will be used.
80 max_scale: i8,
81
82 /// Indicates whether to not record the min and max of the distribution.
83 ///
84 /// By default, these values are recorded.
85 ///
86 /// It is generally not valuable to record min and max for cumulative data
87 /// as they will represent the entire life of the instrument instead of just
88 /// the current collection cycle, you can opt out by setting this value to
89 /// `false`
90 record_min_max: bool,
91 },
92}
93
94impl fmt::Display for Aggregation {
95 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
96 // used for stream id comparisons
97 let name = match self {
98 Aggregation::Drop => "Drop",
99 Aggregation::Default => "Default",
100 Aggregation::Sum => "Sum",
101 Aggregation::LastValue => "LastValue",
102 Aggregation::ExplicitBucketHistogram { .. } => "ExplicitBucketHistogram",
103 Aggregation::Base2ExponentialHistogram { .. } => "Base2ExponentialHistogram",
104 };
105
106 f.write_str(name)
107 }
108}
109
110impl Aggregation {
111 /// Validate that this aggregation has correct configuration
112 #[allow(unused)]
113 pub(crate) fn validate(&self) -> MetricResult<()> {
114 match self {
115 Aggregation::Drop => Ok(()),
116 Aggregation::Default => Ok(()),
117 Aggregation::Sum => Ok(()),
118 Aggregation::LastValue => Ok(()),
119 Aggregation::ExplicitBucketHistogram { boundaries, .. } => {
120 for x in boundaries.windows(2) {
121 if x[0] >= x[1] {
122 return Err(MetricError::Config(format!(
123 "aggregation: explicit bucket histogram: non-monotonic boundaries: {boundaries:?}",
124 )));
125 }
126 }
127
128 Ok(())
129 }
130 Aggregation::Base2ExponentialHistogram { max_scale, .. } => {
131 if *max_scale > EXPO_MAX_SCALE {
132 return Err(MetricError::Config(format!(
133 "aggregation: exponential histogram: max scale ({max_scale}) is greater than 20",
134 )));
135 }
136 if *max_scale < EXPO_MIN_SCALE {
137 return Err(MetricError::Config(format!(
138 "aggregation: exponential histogram: max scale ({max_scale}) is less than -10",
139 )));
140 }
141
142 Ok(())
143 }
144 }
145 }
146}
147
148#[cfg(test)]
149mod tests {
150 use super::Aggregation;
151 use crate::metrics::error::{MetricError, MetricResult};
152 use crate::metrics::internal::{EXPO_MAX_SCALE, EXPO_MIN_SCALE};
153
154 #[test]
155 fn validate_aggregation() {
156 struct TestCase {
157 name: &'static str,
158 input: Aggregation,
159 check: Box<dyn Fn(MetricResult<()>) -> bool>,
160 }
161 let ok = Box::new(|result: MetricResult<()>| result.is_ok());
162 let config_error = Box::new(|result| matches!(result, Err(MetricError::Config(_))));
163
164 let test_cases: Vec<TestCase> = vec![
165 TestCase {
166 name: "base2 histogram with maximum max_scale",
167 input: Aggregation::Base2ExponentialHistogram {
168 max_size: 160,
169 max_scale: EXPO_MAX_SCALE,
170 record_min_max: true,
171 },
172 check: ok.clone(),
173 },
174 TestCase {
175 name: "base2 histogram with minimum max_scale",
176 input: Aggregation::Base2ExponentialHistogram {
177 max_size: 160,
178 max_scale: EXPO_MIN_SCALE,
179 record_min_max: true,
180 },
181 check: ok.clone(),
182 },
183 TestCase {
184 name: "base2 histogram with max_scale too small",
185 input: Aggregation::Base2ExponentialHistogram {
186 max_size: 160,
187 max_scale: EXPO_MIN_SCALE - 1,
188 record_min_max: true,
189 },
190 check: config_error.clone(),
191 },
192 TestCase {
193 name: "base2 histogram with max_scale too big",
194 input: Aggregation::Base2ExponentialHistogram {
195 max_size: 160,
196 max_scale: EXPO_MAX_SCALE + 1,
197 record_min_max: true,
198 },
199 check: config_error.clone(),
200 },
201 TestCase {
202 name: "explicit histogram with one boundary",
203 input: Aggregation::ExplicitBucketHistogram {
204 boundaries: vec![0.0],
205 record_min_max: true,
206 },
207 check: ok.clone(),
208 },
209 TestCase {
210 name: "explicit histogram with monotonic boundaries",
211 input: Aggregation::ExplicitBucketHistogram {
212 boundaries: vec![0.0, 2.0, 4.0, 8.0],
213 record_min_max: true,
214 },
215 check: ok.clone(),
216 },
217 TestCase {
218 name: "explicit histogram with non-monotonic boundaries",
219 input: Aggregation::ExplicitBucketHistogram {
220 boundaries: vec![2.0, 0.0, 4.0, 8.0],
221 record_min_max: true,
222 },
223 check: config_error.clone(),
224 },
225 ];
226 for test in test_cases {
227 assert!((test.check)(test.input.validate()), "{}", test.name)
228 }
229 }
230}