1
- use std:: collections:: HashSet ;
2
- use std:: sync:: atomic:: Ordering ;
3
- use std:: sync:: Arc ;
1
+ use std:: mem:: take;
4
2
use std:: { sync:: Mutex , time:: SystemTime } ;
5
3
6
4
use crate :: metrics:: data:: HistogramDataPoint ;
7
5
use crate :: metrics:: data:: { self , Aggregation , Temporality } ;
8
6
use opentelemetry:: KeyValue ;
9
7
10
- use super :: Number ;
8
+ use super :: { collect_data_points_readonly , collect_data_points_reset , Number } ;
11
9
use super :: { AtomicTracker , AtomicallyUpdate , Operation , ValueMap } ;
12
10
13
11
struct HistogramUpdate ;
@@ -45,7 +43,6 @@ impl<T: Number<T>> AtomicallyUpdate<T> for HistogramTracker<T> {
45
43
}
46
44
}
47
45
48
- #[ derive( Default ) ]
49
46
struct Buckets < T > {
50
47
counts : Vec < u64 > ,
51
48
count : u64 ,
@@ -61,7 +58,8 @@ impl<T: Number<T>> Buckets<T> {
61
58
counts : vec ! [ 0 ; n] ,
62
59
min : T :: max ( ) ,
63
60
max : T :: min ( ) ,
64
- ..Default :: default ( )
61
+ count : 0 ,
62
+ total : T :: default ( ) ,
65
63
}
66
64
}
67
65
@@ -80,14 +78,17 @@ impl<T: Number<T>> Buckets<T> {
80
78
}
81
79
}
82
80
83
- fn reset ( & mut self ) {
84
- for item in & mut self . counts {
85
- * item = 0 ;
86
- }
87
- self . count = Default :: default ( ) ;
88
- self . total = Default :: default ( ) ;
89
- self . min = T :: max ( ) ;
90
- self . max = T :: min ( ) ;
81
+ fn clone_and_reset ( & mut self ) -> Self {
82
+ let n = self . counts . len ( ) ;
83
+ let res = Buckets {
84
+ counts : take ( & mut self . counts ) ,
85
+ count : self . count ,
86
+ total : self . total ,
87
+ min : self . min ,
88
+ max : self . max ,
89
+ } ;
90
+ * self = Buckets :: new ( n) ;
91
+ res
91
92
}
92
93
}
93
94
@@ -155,26 +156,27 @@ impl<T: Number<T>> Histogram<T> {
155
156
h. temporality = Temporality :: Delta ;
156
157
h. data_points . clear ( ) ;
157
158
158
- // Max number of data points need to account for the special casing
159
- // of the no attribute value + overflow attribute.
160
- let n = self . value_map . count . load ( Ordering :: SeqCst ) + 2 ;
161
- if n > h. data_points . capacity ( ) {
162
- h. data_points . reserve_exact ( n - h. data_points . capacity ( ) ) ;
163
- }
159
+ let Ok ( mut trackers) = self . value_map . trackers . write ( ) else {
160
+ return ( 0 , None ) ;
161
+ } ;
164
162
165
- if self
166
- . value_map
167
- . has_no_attribute_value
168
- . swap ( false , Ordering :: AcqRel )
169
- {
170
- if let Ok ( ref mut b) = self . value_map . no_attribute_tracker . buckets . lock ( ) {
171
- h. data_points . push ( HistogramDataPoint {
172
- attributes : vec ! [ ] ,
163
+ collect_data_points_reset (
164
+ & self . value_map . no_attribs_tracker ,
165
+ & mut trackers,
166
+ & mut h. data_points ,
167
+ |attributes, tracker| {
168
+ let b = tracker
169
+ . buckets
170
+ . lock ( )
171
+ . unwrap_or_else ( |err| err. into_inner ( ) )
172
+ . clone_and_reset ( ) ;
173
+ HistogramDataPoint {
174
+ attributes,
173
175
start_time : start,
174
176
time : t,
175
177
count : b. count ,
176
178
bounds : self . bounds . clone ( ) ,
177
- bucket_counts : b. counts . clone ( ) ,
179
+ bucket_counts : b. counts ,
178
180
sum : if self . record_sum {
179
181
b. total
180
182
} else {
@@ -191,54 +193,14 @@ impl<T: Number<T>> Histogram<T> {
191
193
None
192
194
} ,
193
195
exemplars : vec ! [ ] ,
194
- } ) ;
195
-
196
- b. reset ( ) ;
197
- }
198
- }
199
-
200
- let mut trackers = match self . value_map . trackers . write ( ) {
201
- Ok ( v) => v,
202
- Err ( _) => return ( 0 , None ) ,
203
- } ;
204
-
205
- let mut seen = HashSet :: new ( ) ;
206
- for ( attrs, tracker) in trackers. drain ( ) {
207
- if seen. insert ( Arc :: as_ptr ( & tracker) ) {
208
- if let Ok ( b) = tracker. buckets . lock ( ) {
209
- h. data_points . push ( HistogramDataPoint {
210
- attributes : attrs. clone ( ) ,
211
- start_time : start,
212
- time : t,
213
- count : b. count ,
214
- bounds : self . bounds . clone ( ) ,
215
- bucket_counts : b. counts . clone ( ) ,
216
- sum : if self . record_sum {
217
- b. total
218
- } else {
219
- T :: default ( )
220
- } ,
221
- min : if self . record_min_max {
222
- Some ( b. min )
223
- } else {
224
- None
225
- } ,
226
- max : if self . record_min_max {
227
- Some ( b. max )
228
- } else {
229
- None
230
- } ,
231
- exemplars : vec ! [ ] ,
232
- } ) ;
233
196
}
234
- }
235
- }
197
+ } ,
198
+ ) ;
236
199
237
200
// The delta collection cycle resets.
238
201
if let Ok ( mut start) = self . start . lock ( ) {
239
202
* start = t;
240
203
}
241
- self . value_map . count . store ( 0 , Ordering :: SeqCst ) ;
242
204
243
205
( h. data_points . len ( ) , new_agg. map ( |a| Box :: new ( a) as Box < _ > ) )
244
206
}
@@ -266,21 +228,21 @@ impl<T: Number<T>> Histogram<T> {
266
228
h. temporality = Temporality :: Cumulative ;
267
229
h. data_points . clear ( ) ;
268
230
269
- // Max number of data points need to account for the special casing
270
- // of the no attribute value + overflow attribute.
271
- let n = self . value_map . count . load ( Ordering :: SeqCst ) + 2 ;
272
- if n > h. data_points . capacity ( ) {
273
- h. data_points . reserve_exact ( n - h. data_points . capacity ( ) ) ;
274
- }
231
+ let Ok ( trackers) = self . value_map . trackers . read ( ) else {
232
+ return ( 0 , None ) ;
233
+ } ;
275
234
276
- if self
277
- . value_map
278
- . has_no_attribute_value
279
- . load ( Ordering :: Acquire )
280
- {
281
- if let Ok ( b) = & self . value_map . no_attribute_tracker . buckets . lock ( ) {
282
- h. data_points . push ( HistogramDataPoint {
283
- attributes : vec ! [ ] ,
235
+ collect_data_points_readonly (
236
+ & self . value_map . no_attribs_tracker ,
237
+ & trackers,
238
+ & mut h. data_points ,
239
+ |attributes, tracker| {
240
+ let b = tracker
241
+ . buckets
242
+ . lock ( )
243
+ . unwrap_or_else ( |err| err. into_inner ( ) ) ;
244
+ HistogramDataPoint {
245
+ attributes,
284
246
start_time : start,
285
247
time : t,
286
248
count : b. count ,
@@ -302,50 +264,9 @@ impl<T: Number<T>> Histogram<T> {
302
264
None
303
265
} ,
304
266
exemplars : vec ! [ ] ,
305
- } ) ;
306
- }
307
- }
308
-
309
- let trackers = match self . value_map . trackers . write ( ) {
310
- Ok ( v) => v,
311
- Err ( _) => return ( 0 , None ) ,
312
- } ;
313
-
314
- // TODO: This will use an unbounded amount of memory if there
315
- // are unbounded number of attribute sets being aggregated. Attribute
316
- // sets that become "stale" need to be forgotten so this will not
317
- // overload the system.
318
- let mut seen = HashSet :: new ( ) ;
319
- for ( attrs, tracker) in trackers. iter ( ) {
320
- if seen. insert ( Arc :: as_ptr ( tracker) ) {
321
- if let Ok ( b) = tracker. buckets . lock ( ) {
322
- h. data_points . push ( HistogramDataPoint {
323
- attributes : attrs. clone ( ) ,
324
- start_time : start,
325
- time : t,
326
- count : b. count ,
327
- bounds : self . bounds . clone ( ) ,
328
- bucket_counts : b. counts . clone ( ) ,
329
- sum : if self . record_sum {
330
- b. total
331
- } else {
332
- T :: default ( )
333
- } ,
334
- min : if self . record_min_max {
335
- Some ( b. min )
336
- } else {
337
- None
338
- } ,
339
- max : if self . record_min_max {
340
- Some ( b. max )
341
- } else {
342
- None
343
- } ,
344
- exemplars : vec ! [ ] ,
345
- } ) ;
346
267
}
347
- }
348
- }
268
+ } ,
269
+ ) ;
349
270
350
271
( h. data_points . len ( ) , new_agg. map ( |a| Box :: new ( a) as Box < _ > ) )
351
272
}
0 commit comments