Skip to content

Commit fe7ba45

Browse files
author
Omri Sarig
committed
scripts/logging: Handle partly read packets
The current implementation of the serial log parser is running in a loop - every 2 seconds it checks if there is any information ready to be read, and in this case, it reads this information and parses it. In case the last packet of the information is not fully available, the script will read only the first part of the packet, and will fail when parsing, ending the run with an error. This should not the be the case, as it is not an error to have only part of the packet in the buffer. This commit fixes this problem - now, instead of failing because the parser does not have enough information, the parser will parse all the full packets it have, and keep the last, partial packet in the queue, to be parsed together with the next chunk of data. This is done by updating the log parsers to return the amount of parsed data when trying to parse the information, and updating the calling scripts to correctly handle this new return value. Additionally, the parser is now quietly failing in case of having a partial message, and throw an exception for any other kind of error in the parsing (instead of returning a boolean return code). In addition to the partial packet handling, the current commit also do the following, minor improvements: * parserlib now fails by throwing an exception, instead of exiting - this is done to make it possible to choose a different handling for the errors from the calling code, if needed. * The dictionary and parser are now created before the parse operation. This makes the uart parser more efficient, and also removes the previously duplicated messages of build id, target architecture and endianess (which was printed every time new information was received from the UART). Signed-off-by: Omri Sarig <[email protected]>
1 parent a8a1dd1 commit fe7ba45

File tree

5 files changed

+100
-25
lines changed

5 files changed

+100
-25
lines changed

scripts/logging/dictionary/dictionary_parser/log_parser_v1.py

Lines changed: 30 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -255,6 +255,18 @@ def print_hexdump(hex_data, prefix_len, color):
255255
print(f"{color}%s%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
256256
hex_vals, hex_padding, chr_vals))
257257

258+
def get_full_msg_hdr_size(self):
259+
"""Get the size of the full message header"""
260+
return struct.calcsize(self.fmt_msg_type) + \
261+
struct.calcsize(self.fmt_msg_hdr) + \
262+
struct.calcsize(self.fmt_msg_timestamp)
263+
264+
def get_normal_msg_size(self, logdata, offset):
265+
"""Get the needed size of the normal log message at offset"""
266+
log_desc, source_id = struct.unpack_from(self.fmt_msg_hdr, logdata, offset)
267+
pkg_len = (log_desc >> 6) & int(math.pow(2, 10) - 1)
268+
data_len = (log_desc >> 16) & int(math.pow(2, 12) - 1)
269+
return self.get_full_msg_hdr_size() + pkg_len + data_len
258270

259271
def parse_one_normal_msg(self, logdata, offset):
260272
"""Parse one normal log message and print the encoded message"""
@@ -347,27 +359,41 @@ def parse_log_data(self, logdata, debug=False):
347359
offset = 0
348360

349361
while offset < len(logdata):
362+
if offset + struct.calcsize(self.fmt_msg_type) > len(logdata):
363+
return offset
364+
350365
# Get message type
351366
msg_type = struct.unpack_from(self.fmt_msg_type, logdata, offset)[0]
352-
offset += struct.calcsize(self.fmt_msg_type)
353367

354368
if msg_type == MSG_TYPE_DROPPED:
369+
370+
if offset + struct.calcsize(self.fmt_dropped_cnt) > len(logdata):
371+
return offset
372+
offset += struct.calcsize(self.fmt_msg_type)
373+
355374
num_dropped = struct.unpack_from(self.fmt_dropped_cnt, logdata, offset)
356375
offset += struct.calcsize(self.fmt_dropped_cnt)
357376

358377
print(f"--- {num_dropped} messages dropped ---")
359378

360379
elif msg_type == MSG_TYPE_NORMAL:
380+
381+
if ((offset + self.get_full_msg_hdr_size() > len(logdata)) or
382+
(offset + self.get_normal_msg_size(logdata, offset) > len(logdata))):
383+
return offset
384+
385+
offset += struct.calcsize(self.fmt_msg_type)
386+
361387
ret = self.parse_one_normal_msg(logdata, offset)
362388
if ret is None:
363-
return False
389+
raise ValueError("Error parsing normal log message")
364390

365391
offset = ret
366392

367393
else:
368394
logger.error("------ Unknown message type: %s", msg_type)
369-
return False
395+
raise ValueError(f"Unknown message type: {msg_type}")
370396

371-
return True
397+
return offset
372398

373399
colorama.init()

scripts/logging/dictionary/dictionary_parser/log_parser_v3.py

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -260,6 +260,16 @@ def print_hexdump(hex_data, prefix_len, color):
260260
print(f"{color}%s%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
261261
hex_vals, hex_padding, chr_vals))
262262

263+
def get_full_msg_hdr_size(self):
264+
"""Get the size of the full message header"""
265+
return struct.calcsize(self.fmt_msg_type) + \
266+
struct.calcsize(self.fmt_msg_hdr) + \
267+
struct.calcsize(self.fmt_msg_timestamp)
268+
269+
def get_normal_msg_size(self, logdata, offset):
270+
"""Get the needed size of the normal log message at offset"""
271+
_, pkg_len, data_len, source_id = struct.unpack_from(self.fmt_msg_hdr, logdata, offset + struct.calcsize(self.fmt_msg_type))
272+
return self.get_full_msg_hdr_size() + pkg_len + data_len
263273

264274
def parse_one_normal_msg(self, logdata, offset):
265275
"""Parse one normal log message and print the encoded message"""
@@ -356,27 +366,42 @@ def parse_log_data(self, logdata, debug=False):
356366
offset = 0
357367

358368
while offset < len(logdata):
369+
if offset + struct.calcsize(self.fmt_msg_type) > len(logdata):
370+
return offset
371+
359372
# Get message type
360373
msg_type = struct.unpack_from(self.fmt_msg_type, logdata, offset)[0]
361-
offset += struct.calcsize(self.fmt_msg_type)
362374

363375
if msg_type == MSG_TYPE_DROPPED:
376+
377+
if offset + struct.calcsize(self.fmt_dropped_cnt) > len(logdata):
378+
return offset
379+
380+
offset += struct.calcsize(self.fmt_msg_type)
381+
364382
num_dropped = struct.unpack_from(self.fmt_dropped_cnt, logdata, offset)
365383
offset += struct.calcsize(self.fmt_dropped_cnt)
366384

367385
print(f"--- {num_dropped} messages dropped ---")
368386

369387
elif msg_type == MSG_TYPE_NORMAL:
388+
389+
if ((offset + self.get_full_msg_hdr_size() > len(logdata)) or
390+
(offset + self.get_normal_msg_size(logdata, offset) > len(logdata))):
391+
return offset
392+
393+
offset += struct.calcsize(self.fmt_msg_type)
394+
370395
ret = self.parse_one_normal_msg(logdata, offset)
371396
if ret is None:
372-
return False
397+
raise ValueError("Error parsing normal log message")
373398

374399
offset = ret
375400

376401
else:
377402
logger.error("------ Unknown message type: %s", msg_type)
378-
return False
403+
raise ValueError(f"Unknown message type: {msg_type}")
379404

380-
return True
405+
return offset
381406

382407
colorama.init()

scripts/logging/dictionary/log_parser.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,12 +109,18 @@ def main():
109109
else:
110110
logger.setLevel(logging.INFO)
111111

112+
database = parserlib.get_database(args.dbfile)
113+
log_parser = parserlib.get_log_parser(database, logger)
114+
112115
logdata = read_log_file(args)
113116
if logdata is None:
114117
logger.error("ERROR: cannot read log from file: %s, exiting...", args.logfile)
115118
sys.exit(1)
116119

117-
parserlib.parser(logdata, args.dbfile, logger)
120+
parsed_data_offset = parserlib.parser(logdata, database, log_parser, logger)
121+
if parsed_data_offset != len(logdata):
122+
logger.error("ERROR: Not all data was parsed, %d bytes left unparsed", len(logdata) - parsed_data_offset)
123+
sys.exit(1)
118124

119125
if __name__ == "__main__":
120126
main()

scripts/logging/dictionary/log_parser_uart.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,14 +50,19 @@ def main():
5050
else:
5151
logger.setLevel(logging.INFO)
5252

53+
database = parserlib.get_database(args.dbfile)
54+
log_parser = parserlib.get_log_parser(database, logger)
55+
5356
# Parse the log every second from serial port
57+
data = b''
5458
with serial.Serial(args.serialPort, args.baudrate) as ser:
5559
ser.timeout = 2
5660
while True:
5761
size = ser.inWaiting()
5862
if size:
59-
data = ser.read(size)
60-
parserlib.parser(data, args.dbfile, logger)
63+
data += ser.read(size)
64+
parsed_data_offset = parserlib.parser(data, database, log_parser, logger)
65+
data = data[parsed_data_offset:]
6166
time.sleep(1)
6267

6368
if __name__ == "__main__":

scripts/logging/dictionary/parserlib.py

Lines changed: 27 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,22 +19,23 @@
1919
from dictionary_parser.log_database import LogDatabase
2020

2121

22-
def parser(logdata, dbfile, logger):
23-
"""function of serial parser"""
22+
def get_database(dbfile):
23+
"""Create a database out of the JSON file."""
2424
# Read from database file
2525
database = LogDatabase.read_json_database(dbfile)
2626

27-
if not isinstance(logger, logging.Logger):
28-
raise ValueError("Invalid logger instance. Please configure the logger!")
29-
3027
if database is None:
3128
logger.error("ERROR: Cannot open database file: exiting...")
32-
sys.exit(1)
29+
raise ValueError(f"Cannot open database file: {dbfile}")
3330

34-
if logdata is None:
35-
logger.error("ERROR: cannot read log from file: exiting...")
36-
sys.exit(1)
31+
return database
32+
33+
34+
def get_log_parser(database, logger):
35+
"""Get the log parser for the given database.
3736
37+
In addition to creating the parser, the function prints general information about the parser.
38+
"""
3839
log_parser = dictionary_parser.get_parser(database)
3940
if log_parser is not None:
4041
logger.debug("# Build ID: %s", database.get_build_id())
@@ -43,10 +44,22 @@ def parser(logdata, dbfile, logger):
4344
logger.debug("# Endianness: Little")
4445
else:
4546
logger.debug("# Endianness: Big")
46-
47-
ret = log_parser.parse_log_data(logdata)
48-
if not ret:
49-
logger.error("ERROR: there were error(s) parsing log data")
50-
sys.exit(1)
5147
else:
5248
logger.error("ERROR: Cannot find a suitable parser matching database version!")
49+
raise ValueError(f"Cannot create parser.")
50+
51+
return log_parser
52+
53+
54+
def parser(logdata, database, log_parser, logger):
55+
"""function of serial parser"""
56+
57+
if not isinstance(logger, logging.Logger):
58+
raise ValueError("Invalid logger instance. Please configure the logger!")
59+
60+
if logdata is None:
61+
logger.error("ERROR: cannot read log from file: exiting...")
62+
raise ValueError("Cannot read log data.")
63+
64+
ret = log_parser.parse_log_data(logdata)
65+
return ret

0 commit comments

Comments
 (0)