1
- use std:: sync:: atomic:: { AtomicBool , Ordering } ;
1
+ use std:: collections:: HashSet ;
2
+ use std:: sync:: atomic:: { AtomicBool , AtomicUsize , Ordering } ;
3
+ use std:: sync:: Arc ;
2
4
use std:: vec;
3
5
use std:: {
4
6
collections:: HashMap ,
@@ -8,19 +10,21 @@ use std::{
8
10
9
11
use crate :: metrics:: data:: { self , Aggregation , DataPoint , Temporality } ;
10
12
use crate :: metrics:: AttributeSet ;
13
+ use once_cell:: sync:: Lazy ;
11
14
use opentelemetry:: KeyValue ;
12
15
use opentelemetry:: { global, metrics:: MetricsError } ;
13
16
14
- use super :: {
15
- aggregate :: { is_under_cardinality_limit , STREAM_OVERFLOW_ATTRIBUTE_SET } ,
16
- AtomicTracker , Number ,
17
- } ;
17
+ use super :: { aggregate :: is_under_cardinality_limit , AtomicTracker , Number } ;
18
+
19
+ pub ( crate ) static STREAM_OVERFLOW_ATTRIBUTES : Lazy < Vec < KeyValue > > =
20
+ Lazy :: new ( || vec ! [ KeyValue :: new ( "otel.metric.overflow" , "true" ) ] ) ;
18
21
19
22
/// The storage for sums.
20
23
struct ValueMap < T : Number < T > > {
21
- values : RwLock < HashMap < AttributeSet , T :: AtomicTracker > > ,
24
+ values : RwLock < HashMap < Vec < KeyValue > , Arc < T :: AtomicTracker > > > ,
22
25
has_no_value_attribute_value : AtomicBool ,
23
26
no_attribute_value : T :: AtomicTracker ,
27
+ count : AtomicUsize ,
24
28
}
25
29
26
30
impl < T : Number < T > > Default for ValueMap < T > {
@@ -35,42 +39,59 @@ impl<T: Number<T>> ValueMap<T> {
35
39
values : RwLock :: new ( HashMap :: new ( ) ) ,
36
40
has_no_value_attribute_value : AtomicBool :: new ( false ) ,
37
41
no_attribute_value : T :: new_atomic_tracker ( ) ,
42
+ count : AtomicUsize :: new ( 0 ) ,
38
43
}
39
44
}
40
45
}
41
46
42
47
impl < T : Number < T > > ValueMap < T > {
43
- fn measure ( & self , measurement : T , attrs : AttributeSet ) {
48
+ fn measure ( & self , measurement : T , attrs : & [ KeyValue ] ) {
44
49
if attrs. is_empty ( ) {
45
50
self . no_attribute_value . add ( measurement) ;
46
51
self . has_no_value_attribute_value
47
52
. store ( true , Ordering :: Release ) ;
48
53
} else if let Ok ( values) = self . values . read ( ) {
49
- if let Some ( value_to_update) = values. get ( & attrs) {
54
+ // Try incoming order first
55
+ if let Some ( value_to_update) = values. get ( attrs) {
50
56
value_to_update. add ( measurement) ;
51
- return ;
52
57
} else {
53
- drop ( values) ;
54
- if let Ok ( mut values) = self . values . write ( ) {
55
- // Recheck after acquiring write lock, in case another
56
- // thread has added the value.
57
- if let Some ( value_to_update) = values. get ( & attrs) {
58
- value_to_update. add ( measurement) ;
59
- return ;
60
- } else if is_under_cardinality_limit ( values. len ( ) ) {
61
- let new_value = T :: new_atomic_tracker ( ) ;
62
- new_value. add ( measurement) ;
63
- values. insert ( attrs, new_value) ;
64
- } else if let Some ( overflow_value) =
65
- values. get_mut ( & STREAM_OVERFLOW_ATTRIBUTE_SET )
66
- {
67
- overflow_value. add ( measurement) ;
68
- return ;
69
- } else {
70
- let new_value = T :: new_atomic_tracker ( ) ;
71
- new_value. add ( measurement) ;
72
- values. insert ( STREAM_OVERFLOW_ATTRIBUTE_SET . clone ( ) , new_value) ;
73
- global:: handle_error ( MetricsError :: Other ( "Warning: Maximum data points for metric stream exceeded. Entry added to overflow. Subsequent overflows to same metric until next collect will not be logged." . into ( ) ) ) ;
58
+ // Then try sorted order.
59
+ let sorted_attrs = AttributeSet :: from ( attrs) . into_vec ( ) ;
60
+ if let Some ( value_to_update) = values. get ( sorted_attrs. as_slice ( ) ) {
61
+ value_to_update. add ( measurement) ;
62
+ } else {
63
+ // Give up the lock, before acquiring write lock.
64
+ drop ( values) ;
65
+ if let Ok ( mut values) = self . values . write ( ) {
66
+ // Recheck both incoming and sorted after acquiring
67
+ // write lock, in case another thread has added the
68
+ // value.
69
+ if let Some ( value_to_update) = values. get ( attrs) {
70
+ value_to_update. add ( measurement) ;
71
+ } else if let Some ( value_to_update) = values. get ( sorted_attrs. as_slice ( ) ) {
72
+ value_to_update. add ( measurement) ;
73
+ } else if is_under_cardinality_limit ( self . count . load ( Ordering :: SeqCst ) ) {
74
+ let new_value = T :: new_atomic_tracker ( ) ;
75
+ new_value. add ( measurement) ;
76
+ let new_value = Arc :: new ( new_value) ;
77
+
78
+ // Insert original order
79
+ values. insert ( attrs. to_vec ( ) , new_value. clone ( ) ) ;
80
+
81
+ // Insert sorted order
82
+ values. insert ( sorted_attrs, new_value) ;
83
+
84
+ self . count . fetch_add ( 1 , Ordering :: SeqCst ) ;
85
+ } else if let Some ( overflow_value) =
86
+ values. get ( STREAM_OVERFLOW_ATTRIBUTES . as_slice ( ) )
87
+ {
88
+ overflow_value. add ( measurement) ;
89
+ } else {
90
+ let new_value = T :: new_atomic_tracker ( ) ;
91
+ new_value. add ( measurement) ;
92
+ values. insert ( STREAM_OVERFLOW_ATTRIBUTES . clone ( ) , Arc :: new ( new_value) ) ;
93
+ global:: handle_error ( MetricsError :: Other ( "Warning: Maximum data points for metric stream exceeded. Entry added to overflow. Subsequent overflows to same metric until next collect will not be logged." . into ( ) ) ) ;
94
+ }
74
95
}
75
96
}
76
97
}
@@ -100,7 +121,6 @@ impl<T: Number<T>> Sum<T> {
100
121
}
101
122
102
123
pub ( crate ) fn measure ( & self , measurement : T , attrs : & [ KeyValue ] ) {
103
- let attrs: AttributeSet = attrs. into ( ) ;
104
124
self . value_map . measure ( measurement, attrs)
105
125
}
106
126
@@ -125,12 +145,9 @@ impl<T: Number<T>> Sum<T> {
125
145
s_data. is_monotonic = self . monotonic ;
126
146
s_data. data_points . clear ( ) ;
127
147
128
- let mut values = match self . value_map . values . write ( ) {
129
- Ok ( v) => v,
130
- Err ( _) => return ( 0 , None ) ,
131
- } ;
132
-
133
- let n = values. len ( ) + 1 ;
148
+ // Max number of data points need to account for the special casing
149
+ // of the no attribute value + overflow attribute.
150
+ let n = self . value_map . count . load ( Ordering :: SeqCst ) + 2 ;
134
151
if n > s_data. data_points . capacity ( ) {
135
152
s_data
136
153
. data_points
@@ -152,23 +169,29 @@ impl<T: Number<T>> Sum<T> {
152
169
} ) ;
153
170
}
154
171
172
+ let mut values = match self . value_map . values . write ( ) {
173
+ Ok ( v) => v,
174
+ Err ( _) => return ( 0 , None ) ,
175
+ } ;
176
+
177
+ let mut seen = HashSet :: new ( ) ;
155
178
for ( attrs, value) in values. drain ( ) {
156
- s_data. data_points . push ( DataPoint {
157
- attributes : attrs
158
- . iter ( )
159
- . map ( |( k, v) | KeyValue :: new ( k. clone ( ) , v. clone ( ) ) )
160
- . collect ( ) ,
161
- start_time : Some ( prev_start) ,
162
- time : Some ( t) ,
163
- value : value. get_value ( ) ,
164
- exemplars : vec ! [ ] ,
165
- } ) ;
179
+ if seen. insert ( Arc :: as_ptr ( & value) ) {
180
+ s_data. data_points . push ( DataPoint {
181
+ attributes : attrs. clone ( ) ,
182
+ start_time : Some ( prev_start) ,
183
+ time : Some ( t) ,
184
+ value : value. get_value ( ) ,
185
+ exemplars : vec ! [ ] ,
186
+ } ) ;
187
+ }
166
188
}
167
189
168
190
// The delta collection cycle resets.
169
191
if let Ok ( mut start) = self . start . lock ( ) {
170
192
* start = t;
171
193
}
194
+ self . value_map . count . store ( 0 , Ordering :: SeqCst ) ;
172
195
173
196
(
174
197
s_data. data_points . len ( ) ,
@@ -197,12 +220,9 @@ impl<T: Number<T>> Sum<T> {
197
220
s_data. is_monotonic = self . monotonic ;
198
221
s_data. data_points . clear ( ) ;
199
222
200
- let values = match self . value_map . values . write ( ) {
201
- Ok ( v) => v,
202
- Err ( _) => return ( 0 , None ) ,
203
- } ;
204
-
205
- let n = values. len ( ) + 1 ;
223
+ // Max number of data points need to account for the special casing
224
+ // of the no attribute value + overflow attribute.
225
+ let n = self . value_map . count . load ( Ordering :: SeqCst ) + 2 ;
206
226
if n > s_data. data_points . capacity ( ) {
207
227
s_data
208
228
. data_points
@@ -225,16 +245,18 @@ impl<T: Number<T>> Sum<T> {
225
245
} ) ;
226
246
}
227
247
248
+ let values = match self . value_map . values . write ( ) {
249
+ Ok ( v) => v,
250
+ Err ( _) => return ( 0 , None ) ,
251
+ } ;
252
+
228
253
// TODO: This will use an unbounded amount of memory if there
229
254
// are unbounded number of attribute sets being aggregated. Attribute
230
255
// sets that become "stale" need to be forgotten so this will not
231
256
// overload the system.
232
257
for ( attrs, value) in values. iter ( ) {
233
258
s_data. data_points . push ( DataPoint {
234
- attributes : attrs
235
- . iter ( )
236
- . map ( |( k, v) | KeyValue :: new ( k. clone ( ) , v. clone ( ) ) )
237
- . collect ( ) ,
259
+ attributes : attrs. clone ( ) ,
238
260
start_time : Some ( prev_start) ,
239
261
time : Some ( t) ,
240
262
value : value. get_value ( ) ,
@@ -254,7 +276,7 @@ pub(crate) struct PrecomputedSum<T: Number<T>> {
254
276
value_map : ValueMap < T > ,
255
277
monotonic : bool ,
256
278
start : Mutex < SystemTime > ,
257
- reported : Mutex < HashMap < AttributeSet , T > > ,
279
+ reported : Mutex < HashMap < Vec < KeyValue > , T > > ,
258
280
}
259
281
260
282
impl < T : Number < T > > PrecomputedSum < T > {
@@ -268,7 +290,6 @@ impl<T: Number<T>> PrecomputedSum<T> {
268
290
}
269
291
270
292
pub ( crate ) fn measure ( & self , measurement : T , attrs : & [ KeyValue ] ) {
271
- let attrs: AttributeSet = attrs. into ( ) ;
272
293
self . value_map . measure ( measurement, attrs)
273
294
}
274
295
@@ -294,12 +315,9 @@ impl<T: Number<T>> PrecomputedSum<T> {
294
315
s_data. temporality = Temporality :: Delta ;
295
316
s_data. is_monotonic = self . monotonic ;
296
317
297
- let mut values = match self . value_map . values . write ( ) {
298
- Ok ( v) => v,
299
- Err ( _) => return ( 0 , None ) ,
300
- } ;
301
-
302
- let n = values. len ( ) + 1 ;
318
+ // Max number of data points need to account for the special casing
319
+ // of the no attribute value + overflow attribute.
320
+ let n = self . value_map . count . load ( Ordering :: SeqCst ) + 2 ;
303
321
if n > s_data. data_points . capacity ( ) {
304
322
s_data
305
323
. data_points
@@ -325,17 +343,19 @@ impl<T: Number<T>> PrecomputedSum<T> {
325
343
} ) ;
326
344
}
327
345
346
+ let mut values = match self . value_map . values . write ( ) {
347
+ Ok ( v) => v,
348
+ Err ( _) => return ( 0 , None ) ,
349
+ } ;
350
+
328
351
let default = T :: default ( ) ;
329
352
for ( attrs, value) in values. drain ( ) {
330
353
let delta = value. get_value ( ) - * reported. get ( & attrs) . unwrap_or ( & default) ;
331
354
if delta != default {
332
355
new_reported. insert ( attrs. clone ( ) , value. get_value ( ) ) ;
333
356
}
334
357
s_data. data_points . push ( DataPoint {
335
- attributes : attrs
336
- . iter ( )
337
- . map ( |( k, v) | KeyValue :: new ( k. clone ( ) , v. clone ( ) ) )
338
- . collect ( ) ,
358
+ attributes : attrs. clone ( ) ,
339
359
start_time : Some ( prev_start) ,
340
360
time : Some ( t) ,
341
361
value : delta,
@@ -347,6 +367,7 @@ impl<T: Number<T>> PrecomputedSum<T> {
347
367
if let Ok ( mut start) = self . start . lock ( ) {
348
368
* start = t;
349
369
}
370
+ self . value_map . count . store ( 0 , Ordering :: SeqCst ) ;
350
371
351
372
* reported = new_reported;
352
373
drop ( reported) ; // drop before values guard is dropped
@@ -379,12 +400,9 @@ impl<T: Number<T>> PrecomputedSum<T> {
379
400
s_data. temporality = Temporality :: Cumulative ;
380
401
s_data. is_monotonic = self . monotonic ;
381
402
382
- let values = match self . value_map . values . write ( ) {
383
- Ok ( v) => v,
384
- Err ( _) => return ( 0 , None ) ,
385
- } ;
386
-
387
- let n = values. len ( ) + 1 ;
403
+ // Max number of data points need to account for the special casing
404
+ // of the no attribute value + overflow attribute.
405
+ let n = self . value_map . count . load ( Ordering :: SeqCst ) + 2 ;
388
406
if n > s_data. data_points . capacity ( ) {
389
407
s_data
390
408
. data_points
@@ -410,17 +428,18 @@ impl<T: Number<T>> PrecomputedSum<T> {
410
428
} ) ;
411
429
}
412
430
431
+ let values = match self . value_map . values . write ( ) {
432
+ Ok ( v) => v,
433
+ Err ( _) => return ( 0 , None ) ,
434
+ } ;
413
435
let default = T :: default ( ) ;
414
436
for ( attrs, value) in values. iter ( ) {
415
437
let delta = value. get_value ( ) - * reported. get ( attrs) . unwrap_or ( & default) ;
416
438
if delta != default {
417
439
new_reported. insert ( attrs. clone ( ) , value. get_value ( ) ) ;
418
440
}
419
441
s_data. data_points . push ( DataPoint {
420
- attributes : attrs
421
- . iter ( )
422
- . map ( |( k, v) | KeyValue :: new ( k. clone ( ) , v. clone ( ) ) )
423
- . collect ( ) ,
442
+ attributes : attrs. clone ( ) ,
424
443
start_time : Some ( prev_start) ,
425
444
time : Some ( t) ,
426
445
value : delta,
0 commit comments