1
1
// SPDX-License-Identifier: Apache-2.0
2
2
// Copyright Open Network Fabric Authors
3
+ //
3
4
4
- use crate :: rate:: Derivative ;
5
+ use crate :: rate:: { HashMapSmoothing , SavitzkyGolayFilter } ;
5
6
use net:: packet:: Packet ;
6
7
use pipeline:: NetworkFunction ;
7
8
@@ -11,7 +12,6 @@ use std::time::{Duration, Instant};
11
12
use vpcmap:: VpcDiscriminant ;
12
13
use vpcmap:: map:: VpcMapReader ;
13
14
14
- use crate :: rate:: SavitzkyGolayFilter ;
15
15
use crate :: { RegisteredVpcMetrics , Specification , VpcMetricsSpec } ;
16
16
use net:: buffer:: PacketBufferMut ;
17
17
use rand:: RngCore ;
@@ -54,13 +54,11 @@ pub struct StatsCollector {
54
54
/// metrics maps known VpcDiscriminants to their metrics
55
55
metrics : hashbrown:: HashMap < VpcDiscriminant , RegisteredVpcMetrics > ,
56
56
/// Outstanding (i.e., not yet submitted) batches. These batches will eventually be collected
57
- /// in to the `submitted` filter in order to calculate rates.
57
+ /// in to the `submitted` filter in order to calculate smoothed rates.
58
58
outstanding : VecDeque < BatchSummary < u64 > > ,
59
- /// Filter for batches which have been submitted to the `submitted` filter. This filter is
60
- /// used to calculate rates .
59
+ /// Filter for batches which have been submitted; used to calculate smoothed pps/Bps.
60
+ /// We push *apportioned per-batch counts* here; with TIME_TICK=1s, smoothing(counts) ≈ smoothing(pps) .
61
61
submitted : SavitzkyGolayFilter < hashbrown:: HashMap < VpcDiscriminant , TransmitSummary < u64 > > > ,
62
- /// Running cumulative totals used to produce monotonic series into the SG derivative filter.
63
- cumulative_totals : hashbrown:: HashMap < VpcDiscriminant , TransmitSummary < u64 > > ,
64
62
/// Reader for the VPC map. This reader is used to determine the VPCs that are currently
65
63
/// known to the system.
66
64
vpcmap_r : VpcMapReader < VpcMapName > ,
@@ -104,7 +102,6 @@ impl StatsCollector {
104
102
metrics,
105
103
outstanding,
106
104
submitted : SavitzkyGolayFilter :: new ( Self :: TIME_TICK ) ,
107
- cumulative_totals : hashbrown:: HashMap :: new ( ) ,
108
105
vpcmap_r,
109
106
updates,
110
107
} ;
@@ -166,7 +163,7 @@ impl StatsCollector {
166
163
}
167
164
}
168
165
169
- /// Calculate updated stats and submit any expired entries to the rate filter.
166
+ /// Calculate updated stats and submit any expired entries to the SG filter.
170
167
#[ tracing:: instrument( level = "trace" ) ]
171
168
fn update ( & mut self , update : Option < MetricsUpdate > ) {
172
169
if let Some ( update) = update {
@@ -283,7 +280,7 @@ impl StatsCollector {
283
280
}
284
281
}
285
282
286
- /// Submit a concluded set of stats for inclusion in rate calculations
283
+ /// Submit a concluded set of stats for inclusion in smoothing calculations
287
284
#[ tracing:: instrument( level = "trace" ) ]
288
285
fn submit_expired ( & mut self , concluded : BatchSummary < u64 > ) {
289
286
const CAPACITY_PADDING : usize = 16 ;
@@ -294,11 +291,13 @@ impl StatsCollector {
294
291
. last ( )
295
292
. unwrap_or_else ( || unreachable ! ( ) )
296
293
. planned_end ;
297
- let duration = Duration :: from_secs ( 1 ) ;
294
+ let duration = Self :: TIME_TICK ;
298
295
self . outstanding
299
296
. push_back ( BatchSummary :: with_start_and_capacity (
300
297
start, duration, capacity,
301
298
) ) ;
299
+
300
+ // Update raw packet/byte COUNTS for “total” metrics (monotonic counters)
302
301
concluded. vpc . iter ( ) . for_each ( |( & src, tx_summary) | {
303
302
let metrics = match self . metrics . get ( & src) {
304
303
None => {
@@ -321,40 +320,18 @@ impl StatsCollector {
321
320
} ) ;
322
321
} ) ;
323
322
324
- // Update cumulative totals from the *concluded* batch (apportioned already)
325
- for ( & src, tx_summary) in concluded. vpc . iter ( ) {
326
- let totals = self
327
- . cumulative_totals
328
- . entry ( src)
329
- . or_insert_with ( TransmitSummary :: new) ;
330
-
331
- for ( & dst, & stats) in tx_summary. dst . iter ( ) {
332
- match totals. dst . get_mut ( & dst) {
333
- Some ( entry) => {
334
- entry. packets = entry. packets . saturating_add ( stats. packets ) ;
335
- entry. bytes = entry. bytes . saturating_add ( stats. bytes ) ;
336
- }
337
- None => {
338
- totals. dst . insert ( dst, stats) ;
339
- }
340
- }
341
- }
342
- }
343
-
344
- // Push the cumulative snapshot into the SG derivative filter
345
- debug ! ( "sg snapshot: {:?}" , self . cumulative_totals) ;
346
- self . submitted . push ( self . cumulative_totals . clone ( ) ) ;
323
+ // Push this *apportioned per-batch* snapshot into the SG window.
324
+ // With TIME_TICK=1s, smoothing these counts ≈ smoothing pps/Bps directly.
325
+ self . submitted . push ( concluded. vpc ) ;
347
326
327
+ // Build per-source filters and smooth.
348
328
let filters_by_src: hashbrown:: HashMap <
349
329
VpcDiscriminant ,
350
330
TransmitSummary < SavitzkyGolayFilter < u64 > > ,
351
331
> = ( & self . submitted ) . into ( ) ;
352
- if let Ok ( rates_by_src) =
353
- <hashbrown:: HashMap < _ , TransmitSummary < SavitzkyGolayFilter < u64 > > > >:: derivative (
354
- & filters_by_src,
355
- )
356
- {
357
- rates_by_src. iter ( ) . for_each ( |( & src, tx_summary) | {
332
+
333
+ if let Ok ( smoothed_by_src) = filters_by_src. smooth ( ) {
334
+ smoothed_by_src. iter ( ) . for_each ( |( & src, tx_summary) | {
358
335
let metrics = match self . metrics . get ( & src) {
359
336
None => {
360
337
warn ! ( "lost metrics for src {src}" ) ;
@@ -364,14 +341,20 @@ impl StatsCollector {
364
341
} ;
365
342
tx_summary. dst . iter ( ) . for_each ( |( dst, rate) | {
366
343
if let Some ( action) = metrics. peering . get ( dst) {
344
+ // Smoothed packets-per-second / bytes-per-second (since tick=1s)
367
345
action. tx . packet . rate . metric . set ( rate. packets ) ;
368
346
action. tx . byte . rate . metric . set ( rate. bytes ) ;
369
- debug ! ( "set rate for src {src} to dst {dst}: {:?}" , rate) ;
347
+ trace ! (
348
+ "smoothed rate src={:?} dst={:?}: pps={:.3} Bps={:.3}" ,
349
+ src, dst, rate. packets, rate. bytes
350
+ ) ;
370
351
} else {
371
352
warn ! ( "lost metrics for src {src} to dst {dst}" ) ;
372
353
}
373
354
} ) ;
374
355
} ) ;
356
+ } else {
357
+ trace ! ( "Not enough samples yet for smoothing" ) ;
375
358
}
376
359
377
360
// TODO: add in drop metrics
0 commit comments