opentelemetry_sdk/metrics/data/
mod.rs

1//! Types for delivery of pre-aggregated metric time series data.
2
3use std::{any, borrow::Cow, fmt, time::SystemTime};
4
5use opentelemetry::{InstrumentationScope, KeyValue};
6
7use crate::Resource;
8
9use super::Temporality;
10
11/// A collection of [ScopeMetrics] and the associated [Resource] that created them.
12#[derive(Debug)]
13pub struct ResourceMetrics {
14    /// The entity that collected the metrics.
15    pub resource: Resource,
16    /// The collection of metrics with unique [InstrumentationScope]s.
17    pub scope_metrics: Vec<ScopeMetrics>,
18}
19
20/// A collection of metrics produced by a meter.
21#[derive(Default, Debug)]
22pub struct ScopeMetrics {
23    /// The [InstrumentationScope] that the meter was created with.
24    pub scope: InstrumentationScope,
25    /// The list of aggregations created by the meter.
26    pub metrics: Vec<Metric>,
27}
28
29/// A collection of one or more aggregated time series from an [Instrument].
30///
31/// [Instrument]: crate::metrics::Instrument
32#[derive(Debug)]
33pub struct Metric {
34    /// The name of the instrument that created this data.
35    pub name: Cow<'static, str>,
36    /// The description of the instrument, which can be used in documentation.
37    pub description: Cow<'static, str>,
38    /// The unit in which the instrument reports.
39    pub unit: Cow<'static, str>,
40    /// The aggregated data from an instrument.
41    pub data: Box<dyn Aggregation>,
42}
43
44/// The store of data reported by an [Instrument].
45///
46/// It will be one of: [Gauge], [Sum], or [Histogram].
47///
48/// [Instrument]: crate::metrics::Instrument
49pub trait Aggregation: fmt::Debug + any::Any + Send + Sync {
50    /// Support downcasting
51    fn as_any(&self) -> &dyn any::Any;
52    /// Support downcasting during aggregation
53    fn as_mut(&mut self) -> &mut dyn any::Any;
54}
55
56/// DataPoint is a single data point in a time series.
57#[derive(Debug, PartialEq)]
58pub struct GaugeDataPoint<T> {
59    /// Attributes is the set of key value pairs that uniquely identify the
60    /// time series.
61    pub attributes: Vec<KeyValue>,
62    /// The value of this data point.
63    pub value: T,
64    /// The sampled [Exemplar]s collected during the time series.
65    pub exemplars: Vec<Exemplar<T>>,
66}
67
68impl<T: Copy> Clone for GaugeDataPoint<T> {
69    fn clone(&self) -> Self {
70        Self {
71            attributes: self.attributes.clone(),
72            value: self.value,
73            exemplars: self.exemplars.clone(),
74        }
75    }
76}
77
78/// A measurement of the current value of an instrument.
79#[derive(Debug)]
80pub struct Gauge<T> {
81    /// Represents individual aggregated measurements with unique attributes.
82    pub data_points: Vec<GaugeDataPoint<T>>,
83    /// The time when the time series was started.
84    pub start_time: Option<SystemTime>,
85    /// The time when the time series was recorded.
86    pub time: SystemTime,
87}
88
89impl<T: fmt::Debug + Send + Sync + 'static> Aggregation for Gauge<T> {
90    fn as_any(&self) -> &dyn any::Any {
91        self
92    }
93    fn as_mut(&mut self) -> &mut dyn any::Any {
94        self
95    }
96}
97
98/// DataPoint is a single data point in a time series.
99#[derive(Debug, PartialEq)]
100pub struct SumDataPoint<T> {
101    /// Attributes is the set of key value pairs that uniquely identify the
102    /// time series.
103    pub attributes: Vec<KeyValue>,
104    /// The value of this data point.
105    pub value: T,
106    /// The sampled [Exemplar]s collected during the time series.
107    pub exemplars: Vec<Exemplar<T>>,
108}
109
110impl<T: Copy> Clone for SumDataPoint<T> {
111    fn clone(&self) -> Self {
112        Self {
113            attributes: self.attributes.clone(),
114            value: self.value,
115            exemplars: self.exemplars.clone(),
116        }
117    }
118}
119
120/// Represents the sum of all measurements of values from an instrument.
121#[derive(Debug)]
122pub struct Sum<T> {
123    /// Represents individual aggregated measurements with unique attributes.
124    pub data_points: Vec<SumDataPoint<T>>,
125    /// The time when the time series was started.
126    pub start_time: SystemTime,
127    /// The time when the time series was recorded.
128    pub time: SystemTime,
129    /// Describes if the aggregation is reported as the change from the last report
130    /// time, or the cumulative changes since a fixed start time.
131    pub temporality: Temporality,
132    /// Whether this aggregation only increases or decreases.
133    pub is_monotonic: bool,
134}
135
136impl<T: fmt::Debug + Send + Sync + 'static> Aggregation for Sum<T> {
137    fn as_any(&self) -> &dyn any::Any {
138        self
139    }
140    fn as_mut(&mut self) -> &mut dyn any::Any {
141        self
142    }
143}
144
145/// Represents the histogram of all measurements of values from an instrument.
146#[derive(Debug)]
147pub struct Histogram<T> {
148    /// Individual aggregated measurements with unique attributes.
149    pub data_points: Vec<HistogramDataPoint<T>>,
150    /// The time when the time series was started.
151    pub start_time: SystemTime,
152    /// The time when the time series was recorded.
153    pub time: SystemTime,
154    /// Describes if the aggregation is reported as the change from the last report
155    /// time, or the cumulative changes since a fixed start time.
156    pub temporality: Temporality,
157}
158
159impl<T: fmt::Debug + Send + Sync + 'static> Aggregation for Histogram<T> {
160    fn as_any(&self) -> &dyn any::Any {
161        self
162    }
163    fn as_mut(&mut self) -> &mut dyn any::Any {
164        self
165    }
166}
167
168/// A single histogram data point in a time series.
169#[derive(Debug, PartialEq)]
170pub struct HistogramDataPoint<T> {
171    /// The set of key value pairs that uniquely identify the time series.
172    pub attributes: Vec<KeyValue>,
173    /// The number of updates this histogram has been calculated with.
174    pub count: u64,
175    /// The upper bounds of the buckets of the histogram.
176    ///
177    /// Because the last boundary is +infinity this one is implied.
178    pub bounds: Vec<f64>,
179    /// The count of each of the buckets.
180    pub bucket_counts: Vec<u64>,
181
182    /// The minimum value recorded.
183    pub min: Option<T>,
184    /// The maximum value recorded.
185    pub max: Option<T>,
186    /// The sum of the values recorded.
187    pub sum: T,
188
189    /// The sampled [Exemplar]s collected during the time series.
190    pub exemplars: Vec<Exemplar<T>>,
191}
192
193impl<T: Copy> Clone for HistogramDataPoint<T> {
194    fn clone(&self) -> Self {
195        Self {
196            attributes: self.attributes.clone(),
197            count: self.count,
198            bounds: self.bounds.clone(),
199            bucket_counts: self.bucket_counts.clone(),
200            min: self.min,
201            max: self.max,
202            sum: self.sum,
203            exemplars: self.exemplars.clone(),
204        }
205    }
206}
207
208/// The histogram of all measurements of values from an instrument.
209#[derive(Debug)]
210pub struct ExponentialHistogram<T> {
211    /// The individual aggregated measurements with unique attributes.
212    pub data_points: Vec<ExponentialHistogramDataPoint<T>>,
213    /// When the time series was started.
214    pub start_time: SystemTime,
215    /// The time when the time series was recorded.
216    pub time: SystemTime,
217    /// Describes if the aggregation is reported as the change from the last report
218    /// time, or the cumulative changes since a fixed start time.
219    pub temporality: Temporality,
220}
221
222impl<T: fmt::Debug + Send + Sync + 'static> Aggregation for ExponentialHistogram<T> {
223    fn as_any(&self) -> &dyn any::Any {
224        self
225    }
226    fn as_mut(&mut self) -> &mut dyn any::Any {
227        self
228    }
229}
230
231/// A single exponential histogram data point in a time series.
232#[derive(Debug, PartialEq)]
233pub struct ExponentialHistogramDataPoint<T> {
234    /// The set of key value pairs that uniquely identify the time series.
235    pub attributes: Vec<KeyValue>,
236
237    /// The number of updates this histogram has been calculated with.
238    pub count: usize,
239    /// The minimum value recorded.
240    pub min: Option<T>,
241    /// The maximum value recorded.
242    pub max: Option<T>,
243    /// The sum of the values recorded.
244    pub sum: T,
245
246    /// Describes the resolution of the histogram.
247    ///
248    /// Boundaries are located at powers of the base, where:
249    ///
250    ///   base = 2 ^ (2 ^ -scale)
251    pub scale: i8,
252
253    /// The number of values whose absolute value is less than or equal to
254    /// `zero_threshold`.
255    ///
256    /// When `zero_threshold` is `0`, this is the number of values that cannot be
257    /// expressed using the standard exponential formula as well as values that have
258    /// been rounded to zero.
259    pub zero_count: u64,
260
261    /// The range of positive value bucket counts.
262    pub positive_bucket: ExponentialBucket,
263    /// The range of negative value bucket counts.
264    pub negative_bucket: ExponentialBucket,
265
266    /// The width of the zero region.
267    ///
268    /// Where the zero region is defined as the closed interval
269    /// [-zero_threshold, zero_threshold].
270    pub zero_threshold: f64,
271
272    /// The sampled exemplars collected during the time series.
273    pub exemplars: Vec<Exemplar<T>>,
274}
275
276impl<T: Copy> Clone for ExponentialHistogramDataPoint<T> {
277    fn clone(&self) -> Self {
278        Self {
279            attributes: self.attributes.clone(),
280            count: self.count,
281            min: self.min,
282            max: self.max,
283            sum: self.sum,
284            scale: self.scale,
285            zero_count: self.zero_count,
286            positive_bucket: self.positive_bucket.clone(),
287            negative_bucket: self.negative_bucket.clone(),
288            zero_threshold: self.zero_threshold,
289            exemplars: self.exemplars.clone(),
290        }
291    }
292}
293
294/// A set of bucket counts, encoded in a contiguous array of counts.
295#[derive(Debug, PartialEq)]
296pub struct ExponentialBucket {
297    /// The bucket index of the first entry in the `counts` vec.
298    pub offset: i32,
299
300    /// A vec where `counts[i]` carries the count of the bucket at index `offset + i`.
301    ///
302    /// `counts[i]` is the count of values greater than base^(offset+i) and less than
303    /// or equal to base^(offset+i+1).
304    pub counts: Vec<u64>,
305}
306
307impl Clone for ExponentialBucket {
308    fn clone(&self) -> Self {
309        Self {
310            offset: self.offset,
311            counts: self.counts.clone(),
312        }
313    }
314}
315
316/// A measurement sampled from a time series providing a typical example.
317#[derive(Debug, PartialEq)]
318pub struct Exemplar<T> {
319    /// The attributes recorded with the measurement but filtered out of the
320    /// time series' aggregated data.
321    pub filtered_attributes: Vec<KeyValue>,
322    /// The time when the measurement was recorded.
323    pub time: SystemTime,
324    /// The measured value.
325    pub value: T,
326    /// The ID of the span that was active during the measurement.
327    ///
328    /// If no span was active or the span was not sampled this will be empty.
329    pub span_id: [u8; 8],
330    /// The ID of the trace the active span belonged to during the measurement.
331    ///
332    /// If no span was active or the span was not sampled this will be empty.
333    pub trace_id: [u8; 16],
334}
335
336impl<T: Copy> Clone for Exemplar<T> {
337    fn clone(&self) -> Self {
338        Self {
339            filtered_attributes: self.filtered_attributes.clone(),
340            time: self.time,
341            value: self.value,
342            span_id: self.span_id,
343            trace_id: self.trace_id,
344        }
345    }
346}
347
348#[cfg(test)]
349mod tests {
350
351    use super::{Exemplar, ExponentialHistogramDataPoint, HistogramDataPoint, SumDataPoint};
352
353    use opentelemetry::time::now;
354    use opentelemetry::KeyValue;
355
356    #[test]
357    fn validate_cloning_data_points() {
358        let data_type = SumDataPoint {
359            attributes: vec![KeyValue::new("key", "value")],
360            value: 0u32,
361            exemplars: vec![Exemplar {
362                filtered_attributes: vec![],
363                time: now(),
364                value: 0u32,
365                span_id: [0; 8],
366                trace_id: [0; 16],
367            }],
368        };
369        assert_eq!(data_type.clone(), data_type);
370
371        let histogram_data_point = HistogramDataPoint {
372            attributes: vec![KeyValue::new("key", "value")],
373            count: 0,
374            bounds: vec![],
375            bucket_counts: vec![],
376            min: None,
377            max: None,
378            sum: 0u32,
379            exemplars: vec![Exemplar {
380                filtered_attributes: vec![],
381                time: now(),
382                value: 0u32,
383                span_id: [0; 8],
384                trace_id: [0; 16],
385            }],
386        };
387        assert_eq!(histogram_data_point.clone(), histogram_data_point);
388
389        let exponential_histogram_data_point = ExponentialHistogramDataPoint {
390            attributes: vec![KeyValue::new("key", "value")],
391            count: 0,
392            min: None,
393            max: None,
394            sum: 0u32,
395            scale: 0,
396            zero_count: 0,
397            positive_bucket: super::ExponentialBucket {
398                offset: 0,
399                counts: vec![],
400            },
401            negative_bucket: super::ExponentialBucket {
402                offset: 0,
403                counts: vec![],
404            },
405            zero_threshold: 0.0,
406            exemplars: vec![Exemplar {
407                filtered_attributes: vec![],
408                time: now(),
409                value: 0u32,
410                span_id: [0; 8],
411                trace_id: [0; 16],
412            }],
413        };
414        assert_eq!(
415            exponential_histogram_data_point.clone(),
416            exponential_histogram_data_point
417        );
418    }
419}