66
77import numpy as np
88from confluent_kafka .cimpl import TopicPartition
9- from p4p .server import DynamicProvider , Server
9+ from p4p .server import DynamicProvider , Server , StaticProvider
1010from confluent_kafka import Consumer
1111
1212from kafka_dae_diagnostics .data import Data
1313from kafka_dae_diagnostics .kafka_handlers import handle_event_messages , \
14- handle_run_info_messages , handle_event_msg
14+ handle_run_info_messages
1515from kafka_dae_diagnostics .spectrum_handlers import SpectrumHandler
16+ import p4p
17+
18+ from kafka_dae_diagnostics .static_pvs import StaticPVs
1619
1720logger = logging .getLogger (__name__ )
1821
2124
2225def main () -> None :
2326 data = Data (
24- spectra = np .zeros (shape = (1 , 1 , 1 ), dtype = np .uint64 ),
25- spectrum_updaters = [],
27+ spectra = np .zeros (shape = (1 , 1 , 1 ), dtype = np .float64 ),
28+ callbacks = {},
29+ bin_boundaries = np .linspace (0 , 20_000_000 , num = 1001 , dtype = np .int32 )
2630 )
2731
28- handler = SpectrumHandler ("TE:NDW2922:KDAEDIAG:" , data )
32+ prefix = "TE:NDW2922:KDAEDIAG:"
33+
34+ static_pvs = StaticPVs (data )
35+ static_provider = StaticProvider ()
36+ static_provider .add (f"{ prefix } EVENTS" , static_pvs .total_events )
37+ static_provider .add (f"{ prefix } MEVENTS" , static_pvs .total_mevents )
38+ static_provider .add (f"{ prefix } TOTALCOUNTS" , static_pvs .total_events )
39+ static_provider .add (f"{ prefix } TOTAL_EVENT_MESSAGES" , static_pvs .total_event_messages )
40+ static_provider .add (f"{ prefix } TOTAL_EVENT_MEGABYTES" , static_pvs .total_event_megabytes )
41+ static_provider .add (f"{ prefix } COUNTRATE" , static_pvs .count_rate )
42+ static_provider .add (f"{ prefix } HISTMEMORY" , static_pvs .histogram_memory )
43+
44+ static_provider .add (f"{ prefix } NUMPERIODS" , static_pvs .num_periods )
45+ static_provider .add (f"{ prefix } NUMSPECTRA" , static_pvs .num_spectra )
46+ static_provider .add (f"{ prefix } NUMTIMECHANNELS" , static_pvs .num_time_channels )
47+
48+ static_provider .add (f"{ prefix } STARTTIME" , static_pvs .start_time )
49+ static_provider .add (f"{ prefix } RUNDURATION" , static_pvs .run_duration )
50+ static_provider .add (f"{ prefix } DIAGNOSTICSLAG" , static_pvs .processing_lag )
51+
52+ spectrum_handler = SpectrumHandler (prefix , data )
2953 providers = [
30- DynamicProvider ("spectra" , handler = handler ),
54+ DynamicProvider ("spectra" , handler = spectrum_handler ),
55+ static_provider ,
3156 ]
57+
58+ data .callbacks ["static-callbacks" ] = static_pvs .update_all
59+
3260 server = Server (providers = providers )
3361 with server :
3462 consume_from_kafka_forever (data )
3563
3664
37- def update_spectra (data : Data ) -> None :
38- num_updaters = len (data .spectrum_updaters )
39- if num_updaters == 0 :
40- return
41-
42- logger .debug ("Updating %d spectra for connected clients" , num_updaters )
43- for period , detector , pv in data .spectrum_updaters :
44- try :
45- pv .post (data .spectra [(0 , detector )].astype (np .double ), timestamp = time .time ())
46- except Exception as e :
47- logger .warning ("Failed to update dynamic spectrum PV for period %d, detector %d, error: %s %s" , period ,
48- detector , e .__class__ .__name__ , e )
65+ def callbacks (data : Data ) -> None :
66+ with data .callbacks_lock :
67+ for callback_id , cb in data .callbacks .items ():
68+ try :
69+ cb (data )
70+ except Exception as e :
71+ logger .warning ("Callback '%s' failed, error: %s %s" , callback_id , e .__class__ .__name__ , e )
4972
5073
5174def make_runinfo_consumer (settings : dict [str , Any ]) -> Consumer :
@@ -59,10 +82,8 @@ def make_runinfo_consumer(settings: dict[str, Any]) -> Consumer:
5982 """
6083 runinfo_consumer = Consumer (settings )
6184
62- start_offset = (
63- runinfo_consumer .get_watermark_offsets (TopicPartition ("NDW2922_runInfo" , 0 ), cached = False )[1 ]
64- - 2
65- )
85+ low , high = runinfo_consumer .get_watermark_offsets (TopicPartition ("NDW2922_runInfo" , 0 ), cached = False )
86+ start_offset = max (high - 2 , low )
6687 runinfo_consumer .assign ([TopicPartition ("NDW2922_runInfo" , 0 , start_offset )])
6788 return runinfo_consumer
6889
@@ -92,12 +113,15 @@ def consume_from_kafka_forever(data: Data) -> None:
92113
93114 while True :
94115 run_info_messages = runinfo_consumer .consume (num_messages = 50 , timeout = 0. )
95- handle_run_info_messages (run_info_messages , data = data , event_consumer = event_consumer )
116+ if run_info_messages :
117+ handle_run_info_messages (run_info_messages , data = data , event_consumer = event_consumer )
96118
97119 event_messages = event_consumer .consume (num_messages = 1000 , timeout = 0.1 )
98- handle_event_messages (event_messages , data = data )
120+ if event_messages :
121+ start = time .time ()
122+ handle_event_messages (event_messages , data = data )
123+ logger .debug ("Handled %d event messages in %.3f ms" , len (event_messages ), ((time .time () - start ) * 1000 ))
99124
100125 if len (event_messages ) > 0 or len (run_info_messages ) > 0 :
101- # If any messages arrived, spectra may have changed - update any PVs which
102- # are connected.
103- update_spectra (data )
126+ # If any messages arrived, data may have changed - update all subscribed callbacks
127+ callbacks (data )
0 commit comments