logging: dictionary: format scripts with ruff

Simple formatting via ruff on dictionary logging scripts.
No manual editing was done on the scripts.

Signed-off-by: Daniel Leung <daniel.leung@intel.com>
This commit is contained in:
Daniel Leung
2025-11-12 11:34:06 -08:00
committed by Benjamin Cabé
parent e37b3ca50e
commit fb055ea77e
11 changed files with 199 additions and 238 deletions

View File

@@ -1250,15 +1250,6 @@ exclude = [
"./scripts/list_boards.py",
"./scripts/list_hardware.py",
"./scripts/list_shields.py",
"./scripts/logging/dictionary/database_gen.py",
"./scripts/logging/dictionary/dictionary_parser/data_types.py",
"./scripts/logging/dictionary/dictionary_parser/log_database.py",
"./scripts/logging/dictionary/dictionary_parser/log_parser.py",
"./scripts/logging/dictionary/dictionary_parser/log_parser_v1.py",
"./scripts/logging/dictionary/dictionary_parser/log_parser_v3.py",
"./scripts/logging/dictionary/dictionary_parser/utils.py",
"./scripts/logging/dictionary/log_parser.py",
"./scripts/logging/dictionary/log_parser_uart.py",
"./scripts/make_bugs_pickle.py",
"./scripts/net/enumerate_http_status.py",
"./scripts/profiling/stackcollapse.py",
@@ -1474,6 +1465,5 @@ exclude = [
"./tests/net/socket/tls_configurations/pytest/test_app_vs_openssl.py",
"./tests/net/socket/udp/generate-c-string.py",
"./tests/subsys/debug/gdbstub/pytest/test_gdbstub.py",
"./tests/subsys/logging/dictionary/pytest/test_logging_dictionary.py",
"./tests/ztest/ztest_param/pytest/test_parameters.py",
]

View File

@@ -43,9 +43,7 @@ STATIC_STRING_SECTIONS = [
]
# Sections that contains static strings but are not part of the binary (allocable).
REMOVED_STRING_SECTIONS = [
'log_strings'
]
REMOVED_STRING_SECTIONS = ['log_strings']
# Regulation expression to match DWARF location
@@ -76,23 +74,18 @@ def parse_args():
argparser.add_argument("elffile", help="Zephyr ELF binary")
argparser.add_argument("--build", help="Build ID")
argparser.add_argument("--build-header",
help="Header file containing BUILD_VERSION define")
argparser.add_argument("--debug", action="store_true",
help="Print extra debugging information")
argparser.add_argument("-v", "--verbose", action="store_true",
help="Print more information")
argparser.add_argument("--build-header", help="Header file containing BUILD_VERSION define")
argparser.add_argument("--debug", action="store_true", help="Print extra debugging information")
argparser.add_argument("-v", "--verbose", action="store_true", help="Print more information")
outfile_grp = argparser.add_mutually_exclusive_group(required=True)
outfile_grp.add_argument("--json",
help="Output Dictionary Logging Database file in JSON")
outfile_grp.add_argument("--syst",
help="Output MIPI Sys-T Collateral XML file")
outfile_grp.add_argument("--json", help="Output Dictionary Logging Database file in JSON")
outfile_grp.add_argument("--syst", help="Output MIPI Sys-T Collateral XML file")
return argparser.parse_args()
def extract_elf_code_data_sections(elf, wildcards = None):
def extract_elf_code_data_sections(elf, wildcards=None):
"""Find all sections in ELF file"""
sections = {}
@@ -101,17 +94,17 @@ def extract_elf_code_data_sections(elf, wildcards = None):
# since they actually have code/data.
#
# On contrary, BSS is allocated but NOBITS.
if (((wildcards is not None) and (sect.name in wildcards)) or
((sect['sh_flags'] & SH_FLAGS.SHF_ALLOC) == SH_FLAGS.SHF_ALLOC
and sect['sh_type'] == 'SHT_PROGBITS')
if ((wildcards is not None) and (sect.name in wildcards)) or (
(sect['sh_flags'] & SH_FLAGS.SHF_ALLOC) == SH_FLAGS.SHF_ALLOC
and sect['sh_type'] == 'SHT_PROGBITS'
):
sections[sect.name] = {
'name' : sect.name,
'size' : sect['sh_size'],
'start' : sect['sh_addr'],
'end' : sect['sh_addr'] + sect['sh_size'] - 1,
'data' : sect.data(),
}
'name': sect.name,
'size': sect['sh_size'],
'start': sect['sh_addr'],
'end': sect['sh_addr'] + sect['sh_size'] - 1,
'data': sect.data(),
}
return sections
@@ -121,11 +114,11 @@ def find_elf_sections(elf, sh_name):
for section in elf.iter_sections():
if section.name == sh_name:
ret = {
'name' : section.name,
'size' : section['sh_size'],
'start' : section['sh_addr'],
'end' : section['sh_addr'] + section['sh_size'] - 1,
'data' : section.data(),
'name': section.name,
'size': section['sh_size'],
'start': section['sh_addr'],
'end': section['sh_addr'] + section['sh_size'] - 1,
'data': section.data(),
}
return ret
@@ -137,17 +130,20 @@ def get_kconfig_symbols(elf):
"""Get kconfig symbols from the ELF file"""
for section in elf.iter_sections():
if isinstance(section, SymbolTableSection) and section['sh_type'] != 'SHT_DYNSYM':
return {sym.name: sym.entry.st_value
for sym in section.iter_symbols()
if sym.name.startswith("CONFIG_")}
return {
sym.name: sym.entry.st_value
for sym in section.iter_symbols()
if sym.name.startswith("CONFIG_")
}
raise LookupError("Could not find symbol table")
def find_log_const_symbols(elf):
"""Extract all "log_const_*" symbols from ELF file"""
symbol_tables = [s for s in elf.iter_sections()
if isinstance(s, elftools.elf.sections.SymbolTableSection)]
symbol_tables = [
s for s in elf.iter_sections() if isinstance(s, elftools.elf.sections.SymbolTableSection)
]
ret_list = []
@@ -259,8 +255,7 @@ def process_kconfigs(elf, database):
#
# Use 32-bit timestamp? or 64-bit?
if "CONFIG_LOG_TIMESTAMP_64BIT" in kconfigs:
database.add_kconfig("CONFIG_LOG_TIMESTAMP_64BIT",
kconfigs['CONFIG_LOG_TIMESTAMP_64BIT'])
database.add_kconfig("CONFIG_LOG_TIMESTAMP_64BIT", kconfigs['CONFIG_LOG_TIMESTAMP_64BIT'])
def extract_logging_subsys_information(elf, database, string_mappings):
@@ -289,9 +284,9 @@ def is_die_attr_ref(attr):
"""
Returns True if the DIE attribute is a reference.
"""
return bool(attr.form in ('DW_FORM_ref1', 'DW_FORM_ref2',
'DW_FORM_ref4', 'DW_FORM_ref8',
'DW_FORM_ref'))
return bool(
attr.form in ('DW_FORM_ref1', 'DW_FORM_ref2', 'DW_FORM_ref4', 'DW_FORM_ref8', 'DW_FORM_ref')
)
def find_die_var_base_type(compile_unit, die, is_const):
@@ -351,7 +346,8 @@ def extract_string_variables(elf):
for die in compile_unit.iter_DIEs():
# Only care about variables with location information
# and of type "char"
if die.tag == 'DW_TAG_variable' and ('DW_AT_type' in die.attributes
if die.tag == 'DW_TAG_variable' and (
'DW_AT_type' in die.attributes
and 'DW_AT_location' in die.attributes
and is_die_var_const_char(compile_unit, die)
):
@@ -362,23 +358,25 @@ def extract_string_variables(elf):
loc = loc_parser.parse_from_attribute(loc_attr, die.cu['version'], die)
if isinstance(loc, LocationExpr):
try:
addr = describe_DWARF_expr(loc.loc_expr,
dwarf_info.structs)
addr = describe_DWARF_expr(loc.loc_expr, dwarf_info.structs)
matcher = DT_LOCATION_REGEX.match(addr)
if matcher:
addr = int(matcher.group(1), 16)
if addr > 0:
strings.append({
'name': die.attributes['DW_AT_name'].value,
'addr': addr,
'die': die
})
strings.append(
{
'name': die.attributes['DW_AT_name'].value,
'addr': addr,
'die': die,
}
)
except KeyError:
pass
return strings
def try_decode_string(str_maybe):
"""Check if it is a printable string"""
for encoding in STR_ENCODINGS:
@@ -389,6 +387,7 @@ def try_decode_string(str_maybe):
return None
def is_printable(b):
# Check if string is printable according to Python
# since the parser (written in Python) will need to
@@ -398,6 +397,7 @@ def is_printable(b):
# string.printable so they need to be checked separately.
return (b in string.printable) or (b in ACCEPTABLE_ESCAPE_CHARS)
def extract_strings_in_one_section(section, str_mappings):
"""Extract NULL-terminated strings in one ELF section"""
data = section['data']
@@ -412,7 +412,7 @@ def extract_strings_in_one_section(section, str_mappings):
# End of possible string
if start is not None:
# Found potential string
str_maybe = data[start : idx]
str_maybe = data[start:idx]
decoded_str = try_decode_string(str_maybe)
if decoded_str is not None:
@@ -425,8 +425,9 @@ def extract_strings_in_one_section(section, str_mappings):
# (e.g. extended ASC-II characters) or control
# characters (e.g. '\r' or '\n'), so simply print
# the byte string instead.
logger.debug('Found string via extraction at ' + PTR_FMT + ': %s',
addr, str_maybe)
logger.debug(
'Found string via extraction at ' + PTR_FMT + ': %s', addr, str_maybe
)
# GCC-based toolchain will reuse the NULL character
# for empty strings. There is no way to know which
@@ -435,8 +436,7 @@ def extract_strings_in_one_section(section, str_mappings):
null_addr = section['start'] + idx
str_mappings[null_addr] = ''
logger.debug('Found null string via extraction at ' + PTR_FMT,
null_addr)
logger.debug('Found null string via extraction at ' + PTR_FMT, null_addr)
start = None
else:
# Non-printable byte, remove start location
@@ -461,8 +461,9 @@ def extract_static_strings(elf, database, section_extraction=False):
one_str = extract_one_string_in_section(sect, str_var['addr'])
if one_str is not None:
string_mappings[str_var['addr']] = one_str
logger.debug('Found string variable at ' + PTR_FMT + ': %s',
str_var['addr'], one_str)
logger.debug(
'Found string variable at ' + PTR_FMT + ': %s', str_var['addr'], one_str
)
break
if section_extraction:
@@ -478,8 +479,7 @@ def extract_static_strings(elf, database, section_extraction=False):
for sect_name in string_sections:
if sect_name in elf_sections:
rawstr_map = extract_strings_in_one_section(elf_sections[sect_name],
rawstr_map)
rawstr_map = extract_strings_in_one_section(elf_sections[sect_name], rawstr_map)
for one_str in rawstr_map:
if one_str not in string_mappings:
@@ -562,13 +562,11 @@ def main():
# Write database file
if args.json and not LogDatabase.write_json_database(args.json, database):
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
args.json)
logger.error("ERROR: Cannot open database file for write: %s, exiting...", args.json)
sys.exit(1)
if args.syst and not LogDatabase.write_syst_database(args.syst, database):
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
args.syst)
logger.error("ERROR: Cannot open database file for write: %s, exiting...", args.syst)
sys.exit(1)
elffile.close()

View File

@@ -14,6 +14,7 @@ import struct
class DataTypes:
"""Class regarding data types, their alignments and sizes"""
INT = 0
UINT = 1
LONG = 2
@@ -47,14 +48,13 @@ class DataTypes:
self.add_data_type(self.DOUBLE, "d")
self.add_data_type(self.LONG_DOUBLE, "d")
@staticmethod
def get_stack_min_align(arch, is_tgt_64bit):
'''
Correspond to the VA_STACK_ALIGN and VA_STACK_MIN_ALIGN
in cbprintf_internal.h. Note that there might be some
variations that is obtained via actually running through
the log parser.
variations that is obtained via actually running through
the log parser.
Return a tuple where the first element is stack alignment
value. The second element is true if alignment needs to
@@ -102,7 +102,6 @@ class DataTypes:
return (stack_min_align, need_further_align)
@staticmethod
def get_data_type_align(data_type, is_tgt_64bit):
'''
@@ -121,7 +120,6 @@ class DataTypes:
return align
def add_data_type(self, data_type, fmt):
"""Add one data type"""
if self.database.is_tgt_little_endian():
@@ -158,31 +156,26 @@ class DataTypes:
# 'stack_align' should correspond to VA_STACK_ALIGN
# in cbprintf_internal.h
stack_align, need_more_align = DataTypes.get_stack_min_align(
self.database.get_arch(),
self.database.is_tgt_64bit())
self.database.get_arch(), self.database.is_tgt_64bit()
)
if need_more_align:
stack_align = DataTypes.get_data_type_align(data_type,
self.database.is_tgt_64bit())
stack_align = DataTypes.get_data_type_align(data_type, self.database.is_tgt_64bit())
self.data_types[data_type]['stack_align'] = stack_align
def get_sizeof(self, data_type):
"""Get sizeof() of a data type"""
return self.data_types[data_type]['sizeof']
def get_alignment(self, data_type):
"""Get the alignment of a data type"""
return self.data_types[data_type]['align']
def get_stack_alignment(self, data_type):
"""Get the stack alignment of a data type"""
return self.data_types[data_type]['stack_align']
def get_formatter(self, data_type):
"""Get the formatter for a data type"""
return self.data_types[data_type]['fmt']

View File

@@ -16,37 +16,37 @@ from .mipi_syst import gen_syst_xml_file
from .utils import extract_one_string_in_section, find_string_in_mappings
ARCHS = {
"arc" : {
"arc": {
"kconfig": "CONFIG_ARC",
},
"arm" : {
"arm": {
"kconfig": "CONFIG_ARM",
},
"arm64" : {
"arm64": {
"kconfig": "CONFIG_ARM64",
},
"mips" : {
"mips": {
"kconfig": "CONFIG_MIPS",
},
"sparc" : {
"sparc": {
"kconfig": "CONFIG_SPARC",
},
"x86" : {
"x86": {
"kconfig": "CONFIG_X86",
},
"posix" : {
"posix": {
"kconfig": "CONFIG_ARCH_POSIX",
},
"riscv32e" : {
"riscv32e": {
"kconfig": "CONFIG_RISCV_ISA_RV32E",
},
"riscv" : {
"riscv": {
"kconfig": "CONFIG_RISCV",
},
"rx" : {
"rx": {
"kconfig": "CONFIG_RX",
},
"xtensa" : {
"xtensa": {
"kconfig": "CONFIG_XTENSA",
},
}
@@ -54,6 +54,7 @@ ARCHS = {
class LogDatabase:
"""Class of log database"""
# Update this if database format of dictionary based logging
# has changed
ZEPHYR_DICT_LOG_VER = 3
@@ -74,32 +75,26 @@ class LogDatabase:
self.database = new_db
def get_version(self):
"""Get Database Version"""
return self.database['version']
def get_build_id(self):
"""Get Build ID"""
return self.database['build_id']
def set_build_id(self, build_id):
"""Set Build ID in Database"""
self.database['build_id'] = build_id
def get_arch(self):
"""Get the Target Architecture"""
return self.database['arch']
def set_arch(self, arch):
"""Set the Target Architecture"""
self.database['arch'] = arch
def get_tgt_bits(self):
"""Get Target Bitness: 32 or 64"""
if 'bits' in self.database['target']:
@@ -107,12 +102,10 @@ class LogDatabase:
return None
def set_tgt_bits(self, bits):
"""Set Target Bitness: 32 or 64"""
self.database['target']['bits'] = bits
def is_tgt_64bit(self):
"""Return True if target is 64-bit, False if 32-bit.
None if error."""
@@ -127,7 +120,6 @@ class LogDatabase:
return None
def get_tgt_endianness(self):
"""
Get Target Endianness.
@@ -139,7 +131,6 @@ class LogDatabase:
return None
def set_tgt_endianness(self, endianness):
"""
Set Target Endianness
@@ -148,7 +139,6 @@ class LogDatabase:
"""
self.database['target']['little_endianness'] = endianness
def is_tgt_little_endian(self):
"""Return True if target is little endian"""
if 'little_endianness' not in self.database['target']:
@@ -156,22 +146,18 @@ class LogDatabase:
return self.database['target']['little_endianness'] == self.LITTLE_ENDIAN
def get_string_mappings(self):
"""Get string mappings to database"""
return self.database['string_mappings']
def set_string_mappings(self, database):
"""Add string mappings to database"""
self.database['string_mappings'] = database
def has_string_mappings(self):
"""Return True if there are string mappings in database"""
return 'string_mappings' in self.database
def has_string_sections(self):
"""Return True if there are any static string sections"""
if 'sections' not in self.database:
@@ -179,7 +165,6 @@ class LogDatabase:
return len(self.database['sections']) != 0
def __find_string_in_mappings(self, string_ptr):
"""
Find string pointed by string_ptr in the string mapping
@@ -187,7 +172,6 @@ class LogDatabase:
"""
return find_string_in_mappings(self.database['string_mappings'], string_ptr)
def __find_string_in_sections(self, string_ptr):
"""
Find string pointed by string_ptr in the binary data
@@ -201,7 +185,6 @@ class LogDatabase:
return None
def find_string(self, string_ptr):
"""Find string pointed by string_ptr in the database.
Return None if not found."""
@@ -215,17 +198,15 @@ class LogDatabase:
return one_str
def add_log_instance(self, source_id, name, level, address):
"""Add one log instance into database"""
self.database['log_subsys']['log_instances'][source_id] = {
'source_id' : source_id,
'name' : name,
'level' : level,
'addr' : address,
'source_id': source_id,
'name': name,
'level': level,
'addr': address,
}
def get_log_source_string(self, domain_id, source_id):
"""Get the source string based on source ID"""
# JSON stores key as string, so we need to convert
@@ -236,17 +217,14 @@ class LogDatabase:
return f"unknown<{domain_id}:{source_id}>"
def add_kconfig(self, name, val):
"""Add a kconfig name-value pair into database"""
self.database['kconfigs'][name] = val
def get_kconfigs(self):
"""Return kconfig name-value pairs"""
return self.database['kconfigs']
@staticmethod
def read_json_database(db_file_name):
"""Read database from file and return a LogDatabase object"""
@@ -277,7 +255,6 @@ class LogDatabase:
return database
@staticmethod
def write_json_database(db_file_name, database):
"""Write the database into file"""

View File

@@ -20,9 +20,10 @@ LOG_LEVELS = [
('err', Fore.RED),
('wrn', Fore.YELLOW),
('inf', Fore.GREEN),
('dbg', Fore.BLUE)
('dbg', Fore.BLUE),
]
def get_log_level_str_color(lvl):
"""Convert numeric log level to string"""
if lvl < 0 or lvl >= len(LOG_LEVELS):
@@ -56,12 +57,12 @@ def formalize_fmt_string(fmt_str):
class LogParser(abc.ABC):
"""Abstract class of log parser"""
def __init__(self, database):
self.database = database
self.data_types = DataTypes(self.database)
@abc.abstractmethod
def parse_log_data(self, logdata, debug=False):
"""Parse log data"""

View File

@@ -62,6 +62,7 @@ logger = logging.getLogger("parser")
class LogParserV1(LogParser):
"""Log Parser V1"""
def __init__(self, database):
super().__init__(database=database)
@@ -83,7 +84,6 @@ class LogParserV1(LogParser):
else:
self.fmt_msg_timestamp = endian + FMT_MSG_TIMESTAMP_32
def __get_string(self, arg, arg_offset, string_tbl):
one_str = self.database.find_string(arg)
if one_str is not None:
@@ -100,7 +100,6 @@ class LogParserV1(LogParser):
return ret
def process_one_fmt_str(self, fmt_str, arg_list, string_tbl):
"""Parse the format string to extract arguments from
the binary arglist and return a tuple usable with
@@ -129,8 +128,7 @@ class LogParserV1(LogParser):
elif fmt == '*':
pass
elif fmt.isdecimal() or str.lower(fmt) == 'l' \
or fmt in (' ', '#', '-', '+', '.', 'h'):
elif fmt.isdecimal() or str.lower(fmt) == 'l' or fmt in (' ', '#', '-', '+', '.', 'h'):
# formatting modifiers, just ignore
continue
@@ -201,7 +199,6 @@ class LogParserV1(LogParser):
return tuple(args)
@staticmethod
def extract_string_table(str_tbl):
"""Extract string table in a packaged log message"""
@@ -226,7 +223,6 @@ class LogParserV1(LogParser):
return tbl
@staticmethod
def print_hexdump(hex_data, prefix_len, color):
"""Print hex dump"""
@@ -244,22 +240,25 @@ class LogParserV1(LogParser):
chr_vals += " "
elif chr_done == HEX_BYTES_IN_LINE:
print(f"{color}%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
hex_vals, chr_vals))
print(f"{color}%s%s|%s{Fore.RESET}" % ((" " * prefix_len), hex_vals, chr_vals))
hex_vals = ""
chr_vals = ""
chr_done = 0
if len(chr_vals) > 0:
hex_padding = " " * (HEX_BYTES_IN_LINE - chr_done)
print(f"{color}%s%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
hex_vals, hex_padding, chr_vals))
print(
f"{color}%s%s%s|%s{Fore.RESET}"
% ((" " * prefix_len), hex_vals, hex_padding, chr_vals)
)
def get_full_msg_hdr_size(self):
"""Get the size of the full message header"""
return struct.calcsize(self.fmt_msg_type) + \
struct.calcsize(self.fmt_msg_hdr) + \
struct.calcsize(self.fmt_msg_timestamp)
return (
struct.calcsize(self.fmt_msg_type)
+ struct.calcsize(self.fmt_msg_hdr)
+ struct.calcsize(self.fmt_msg_timestamp)
)
def get_normal_msg_size(self, logdata, offset):
"""Get the needed size of the normal log message at offset"""
@@ -295,21 +294,21 @@ class LogParserV1(LogParser):
offset_end_of_args += offset
# Extra data after packaged log
extra_data = logdata[(offset + pkg_len):next_msg_offset]
extra_data = logdata[(offset + pkg_len) : next_msg_offset]
# Number of appended strings in package
num_packed_strings = struct.unpack_from("B", logdata, offset+1)[0]
num_packed_strings = struct.unpack_from("B", logdata, offset + 1)[0]
# Number of read-only string indexes
num_ro_str_indexes = struct.unpack_from("B", logdata, offset+2)[0]
num_ro_str_indexes = struct.unpack_from("B", logdata, offset + 2)[0]
offset_end_of_args += num_ro_str_indexes
# Number of read-write string indexes
num_rw_str_indexes = struct.unpack_from("B", logdata, offset+3)[0]
num_rw_str_indexes = struct.unpack_from("B", logdata, offset + 3)[0]
offset_end_of_args += num_rw_str_indexes
# Extract the string table in the packaged log message
string_tbl = self.extract_string_table(logdata[offset_end_of_args:(offset + pkg_len)])
string_tbl = self.extract_string_table(logdata[offset_end_of_args : (offset + pkg_len)])
if len(string_tbl) != num_packed_strings:
logger.error("------ Error extracting string table")
@@ -324,11 +323,12 @@ class LogParserV1(LogParser):
# the offset begins at 0 for va_list. However, the format string
# itself is before the va_list, so need to go back the width of
# a pointer.
fmt_str_ptr = struct.unpack_from(self.data_types.get_formatter(DataTypes.PTR),
logdata, offset)[0]
fmt_str = self.__get_string(fmt_str_ptr,
-self.data_types.get_sizeof(DataTypes.PTR),
string_tbl)
fmt_str_ptr = struct.unpack_from(
self.data_types.get_formatter(DataTypes.PTR), logdata, offset
)[0]
fmt_str = self.__get_string(
fmt_str_ptr, -self.data_types.get_sizeof(DataTypes.PTR), string_tbl
)
offset += self.data_types.get_sizeof(DataTypes.PTR)
if not fmt_str:
@@ -361,7 +361,6 @@ class LogParserV1(LogParser):
msg_type = struct.unpack_from(self.fmt_msg_type, logdata, offset)[0]
if msg_type == MSG_TYPE_DROPPED:
if offset + struct.calcsize(self.fmt_dropped_cnt) > len(logdata):
return False, offset
offset += struct.calcsize(self.fmt_msg_type)
@@ -372,9 +371,9 @@ class LogParserV1(LogParser):
print(f"--- {num_dropped} messages dropped ---")
elif msg_type == MSG_TYPE_NORMAL:
if ((offset + self.get_full_msg_hdr_size() > len(logdata)) or
(offset + self.get_normal_msg_size(logdata, offset) > len(logdata))):
if (offset + self.get_full_msg_hdr_size() > len(logdata)) or (
offset + self.get_normal_msg_size(logdata, offset) > len(logdata)
):
return False, offset
offset += struct.calcsize(self.fmt_msg_type)

View File

@@ -65,6 +65,7 @@ logger = logging.getLogger("parser")
class LogParserV3(LogParser):
"""Log Parser V1"""
def __init__(self, database):
super().__init__(database=database)
@@ -88,7 +89,6 @@ class LogParserV3(LogParser):
else:
self.fmt_msg_timestamp = endian + FMT_MSG_TIMESTAMP_32
def __get_string(self, arg, arg_offset, string_tbl):
one_str = self.database.find_string(arg)
if one_str is not None:
@@ -105,7 +105,6 @@ class LogParserV3(LogParser):
return ret
def process_one_fmt_str(self, fmt_str, arg_list, string_tbl):
"""Parse the format string to extract arguments from
the binary arglist and return a tuple usable with
@@ -134,8 +133,7 @@ class LogParserV3(LogParser):
elif fmt == '*':
pass
elif fmt.isdecimal() or str.lower(fmt) == 'l' \
or fmt in (' ', '#', '-', '+', '.', 'h'):
elif fmt.isdecimal() or str.lower(fmt) == 'l' or fmt in (' ', '#', '-', '+', '.', 'h'):
# formatting modifiers, just ignore
continue
@@ -206,7 +204,6 @@ class LogParserV3(LogParser):
return tuple(args)
@staticmethod
def extract_string_table(str_tbl):
"""Extract string table in a packaged log message"""
@@ -231,7 +228,6 @@ class LogParserV3(LogParser):
return tbl
@staticmethod
def print_hexdump(hex_data, prefix_len, color):
"""Print hex dump"""
@@ -249,22 +245,25 @@ class LogParserV3(LogParser):
chr_vals += " "
elif chr_done == HEX_BYTES_IN_LINE:
print(f"{color}%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
hex_vals, chr_vals))
print(f"{color}%s%s|%s{Fore.RESET}" % ((" " * prefix_len), hex_vals, chr_vals))
hex_vals = ""
chr_vals = ""
chr_done = 0
if len(chr_vals) > 0:
hex_padding = " " * (HEX_BYTES_IN_LINE - chr_done)
print(f"{color}%s%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
hex_vals, hex_padding, chr_vals))
print(
f"{color}%s%s%s|%s{Fore.RESET}"
% ((" " * prefix_len), hex_vals, hex_padding, chr_vals)
)
def get_full_msg_hdr_size(self):
"""Get the size of the full message header"""
return struct.calcsize(self.fmt_msg_type) + \
struct.calcsize(self.fmt_msg_hdr) + \
struct.calcsize(self.fmt_msg_timestamp)
return (
struct.calcsize(self.fmt_msg_type)
+ struct.calcsize(self.fmt_msg_hdr)
+ struct.calcsize(self.fmt_msg_timestamp)
)
def get_normal_msg_size(self, logdata, offset):
"""Get the needed size of the normal log message at offset"""
@@ -278,8 +277,9 @@ class LogParserV3(LogParser):
def parse_one_normal_msg(self, logdata, offset):
"""Parse one normal log message and print the encoded message"""
# Parse log message header
domain_lvl, pkg_len, data_len, source_id = struct.unpack_from(self.fmt_msg_hdr,
logdata, offset)
domain_lvl, pkg_len, data_len, source_id = struct.unpack_from(
self.fmt_msg_hdr, logdata, offset
)
offset += struct.calcsize(self.fmt_msg_hdr)
timestamp = struct.unpack_from(self.fmt_msg_timestamp, logdata, offset)[0]
@@ -305,21 +305,21 @@ class LogParserV3(LogParser):
offset_end_of_args += offset
# Extra data after packaged log
extra_data = logdata[(offset + pkg_len):next_msg_offset]
extra_data = logdata[(offset + pkg_len) : next_msg_offset]
# Number of appended strings in package
num_packed_strings = struct.unpack_from("B", logdata, offset+1)[0]
num_packed_strings = struct.unpack_from("B", logdata, offset + 1)[0]
# Number of read-only string indexes
num_ro_str_indexes = struct.unpack_from("B", logdata, offset+2)[0]
num_ro_str_indexes = struct.unpack_from("B", logdata, offset + 2)[0]
offset_end_of_args += num_ro_str_indexes
# Number of read-write string indexes
num_rw_str_indexes = struct.unpack_from("B", logdata, offset+3)[0]
num_rw_str_indexes = struct.unpack_from("B", logdata, offset + 3)[0]
offset_end_of_args += num_rw_str_indexes
# Extract the string table in the packaged log message
string_tbl = self.extract_string_table(logdata[offset_end_of_args:(offset + pkg_len)])
string_tbl = self.extract_string_table(logdata[offset_end_of_args : (offset + pkg_len)])
if len(string_tbl) != num_packed_strings:
logger.error("------ Error extracting string table")
@@ -334,11 +334,12 @@ class LogParserV3(LogParser):
# the offset begins at 0 for va_list. However, the format string
# itself is before the va_list, so need to go back the width of
# a pointer.
fmt_str_ptr = struct.unpack_from(self.data_types.get_formatter(DataTypes.PTR),
logdata, offset)[0]
fmt_str = self.__get_string(fmt_str_ptr,
-self.data_types.get_sizeof(DataTypes.PTR),
string_tbl)
fmt_str_ptr = struct.unpack_from(
self.data_types.get_formatter(DataTypes.PTR), logdata, offset
)[0]
fmt_str = self.__get_string(
fmt_str_ptr, -self.data_types.get_sizeof(DataTypes.PTR), string_tbl
)
offset += self.data_types.get_sizeof(DataTypes.PTR)
if not fmt_str:
@@ -372,7 +373,6 @@ class LogParserV3(LogParser):
msg_type = struct.unpack_from(self.fmt_msg_type, logdata, offset)[0]
if msg_type == MSG_TYPE_DROPPED:
if offset + struct.calcsize(self.fmt_dropped_cnt) > len(logdata):
return False, offset
@@ -384,9 +384,9 @@ class LogParserV3(LogParser):
print(f"--- {num_dropped} messages dropped ---")
elif msg_type == MSG_TYPE_NORMAL:
if ((offset + self.get_full_msg_hdr_size() > len(logdata)) or
(offset + self.get_normal_msg_size(logdata, offset) > len(logdata))):
if (offset + self.get_full_msg_hdr_size() > len(logdata)) or (
offset + self.get_normal_msg_size(logdata, offset) > len(logdata)
):
return False, offset
offset += struct.calcsize(self.fmt_msg_type)

View File

@@ -62,6 +62,6 @@ def find_string_in_mappings(string_mappings, str_ptr):
for ptr, string in string_mappings.items():
if ptr <= str_ptr < (ptr + len(string)):
whole_str = string_mappings[ptr]
return whole_str[str_ptr - ptr:]
return whole_str[str_ptr - ptr :]
return None

View File

@@ -32,12 +32,13 @@ def parse_args():
argparser.add_argument("dbfile", help="Dictionary Logging Database file")
argparser.add_argument("logfile", help="Log Data file")
argparser.add_argument("--hex", action="store_true",
help="Log Data file is in hexadecimal strings")
argparser.add_argument("--rawhex", action="store_true",
help="Log file only contains hexadecimal log data")
argparser.add_argument("--debug", action="store_true",
help="Print extra debugging information")
argparser.add_argument(
"--hex", action="store_true", help="Log Data file is in hexadecimal strings"
)
argparser.add_argument(
"--rawhex", action="store_true", help="Log file only contains hexadecimal log data"
)
argparser.add_argument("--debug", action="store_true", help="Print extra debugging information")
return argparser.parse_args()
@@ -135,5 +136,6 @@ def main():
)
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -25,8 +25,7 @@ def parse_args():
argparser.add_argument("dbfile", help="Dictionary Logging Database file")
argparser.add_argument("serialPort", help="Port where the logs are generated")
argparser.add_argument("baudrate", help="Serial Port baud rate")
argparser.add_argument("--debug", action="store_true",
help="Print extra debugging information")
argparser.add_argument("--debug", action="store_true", help="Print extra debugging information")
return argparser.parse_args()
@@ -34,8 +33,7 @@ def parse_args():
def main():
"""function of serial parser"""
print("This script is deprecated. Use 'live_log_parser.py' instead.",
file=sys.stderr)
print("This script is deprecated. Use 'live_log_parser.py' instead.", file=sys.stderr)
# Convert the arguments to the format expected by live_log_parser, and invoke it directly.
args = parse_args()
@@ -47,11 +45,7 @@ def main():
if args.debug:
sys.argv.append('--debug')
sys.argv += [
'serial',
args.serialPort,
args.baudrate
]
sys.argv += ['serial', args.serialPort, args.baudrate]
live_log_parser.main()

View File

@@ -20,6 +20,7 @@ ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
logger = logging.getLogger(__name__)
def process_logs(dut: DeviceAdapter, build_dir):
'''
This grabs the encoded log from console and parse the log
@@ -41,7 +42,7 @@ def process_logs(dut: DeviceAdapter, build_dir):
# as the log parser requires file as input.
# Timeout is intentionally long. Twister will
# timeout earlier with per-test timeout.
handler_output = dut.readlines_until(regex = '.*##ZLOGV1##[0-9]+', timeout = 600.0)
handler_output = dut.readlines_until(regex='.*##ZLOGV1##[0-9]+', timeout=600.0)
# Join all the output lines together
handler_output = ''.join(ho.strip() for ho in handler_output)
@@ -73,45 +74,49 @@ def expected_regex_common():
the decoded log lines.
'''
return [
# *** Booting Zephyr OS build <version> ***
re.compile(r'.*[*][*][*] Booting Zephyr OS build [0-9a-z.-]+'),
# Hello World! <board name>
re.compile(r'[\s]+Hello World! [\w-]+'),
# [ 10] <err> hello_world: error string
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <err> hello_world: error string'),
# [ 10] <dbg> hello_world: main: debug string
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: debug string'),
# [ 10] <inf> hello_world: info string
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <inf> hello_world: info string'),
# [ 10] <dbg> hello_world: main: int8_t 1, uint8_t 2
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: int8_t 1, uint8_t 2'),
# [ 10] <dbg> hello_world: main: int16_t 16, uint16_t 17
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: int16_t 16, uint16_t 17'),
# [ 10] <dbg> hello_world: main: int32_t 32, uint32_t 33
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: int32_t 32, uint32_t 33'),
# [ 10] <dbg> hello_world: main: int64_t 64, uint64_t 65
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: int64_t 64, uint64_t 65'),
# [ 10] <dbg> hello_world: main: char !
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: char !'),
# [ 10] <dbg> hello_world: main: s str static str c str
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: s str static str c str'),
# [ 10] <dbg> hello_world: main: d str dynamic str
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: d str dynamic str'),
# [ 10] <dbg> hello_world: main: mixed str dynamic str --- dynamic str \
# --- another dynamic str --- another dynamic str
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: mixed str dynamic str '
'--- dynamic str --- another dynamic str --- another dynamic str'),
# [ 10] <dbg> hello_world: main: mixed c/s ! static str dynamic str static str !
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: mixed c/s ! static str '
'dynamic str static str !'),
# [ 10] <dbg> hello_world: main: pointer 0x1085f9
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: pointer 0x[0-9a-f]+'),
# [ 10] <dbg> hello_world: main: For HeXdUmP!
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: For HeXdUmP!'),
# 48 45 58 44 55 4d 50 21 20 48 45 58 44 55 4d 50 |HEXDUMP! HEXDUMP
re.compile(r'[\s]+[ ]+[0-9a-f ]{48,52}[|]HEXDUMP! HEXDUMP'),
# 40 20 48 45 58 44 55 4d 50 23 |@ HEXDUM P#
re.compile(r'[\s]+[ ]+[0-9a-f ]{48,52}[|]@ HEXDUM P#'),
# *** Booting Zephyr OS build <version> ***
re.compile(r'.*[*][*][*] Booting Zephyr OS build [0-9a-z.-]+'),
# Hello World! <board name>
re.compile(r'[\s]+Hello World! [\w-]+'),
# [ 10] <err> hello_world: error string
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <err> hello_world: error string'),
# [ 10] <dbg> hello_world: main: debug string
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: debug string'),
# [ 10] <inf> hello_world: info string
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <inf> hello_world: info string'),
# [ 10] <dbg> hello_world: main: int8_t 1, uint8_t 2
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: int8_t 1, uint8_t 2'),
# [ 10] <dbg> hello_world: main: int16_t 16, uint16_t 17
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: int16_t 16, uint16_t 17'),
# [ 10] <dbg> hello_world: main: int32_t 32, uint32_t 33
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: int32_t 32, uint32_t 33'),
# [ 10] <dbg> hello_world: main: int64_t 64, uint64_t 65
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: int64_t 64, uint64_t 65'),
# [ 10] <dbg> hello_world: main: char !
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: char !'),
# [ 10] <dbg> hello_world: main: s str static str c str
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: s str static str c str'),
# [ 10] <dbg> hello_world: main: d str dynamic str
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: d str dynamic str'),
# [ 10] <dbg> hello_world: main: mixed str dynamic str --- dynamic str \
# --- another dynamic str --- another dynamic str
re.compile(
r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: mixed str dynamic str '
'--- dynamic str --- another dynamic str --- another dynamic str'
),
# [ 10] <dbg> hello_world: main: mixed c/s ! static str dynamic str static str !
re.compile(
r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: mixed c/s ! static str '
'dynamic str static str !'
),
# [ 10] <dbg> hello_world: main: pointer 0x1085f9
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: pointer 0x[0-9a-f]+'),
# [ 10] <dbg> hello_world: main: For HeXdUmP!
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: For HeXdUmP!'),
# 48 45 58 44 55 4d 50 21 20 48 45 58 44 55 4d 50 |HEXDUMP! HEXDUMP
re.compile(r'[\s]+[ ]+[0-9a-f ]{48,52}[|]HEXDUMP! HEXDUMP'),
# 40 20 48 45 58 44 55 4d 50 23 |@ HEXDUM P#
re.compile(r'[\s]+[ ]+[0-9a-f ]{48,52}[|]@ HEXDUM P#'),
]
@@ -121,9 +126,11 @@ def expected_regex_fpu():
the decoded log lines for FPU builds.
'''
return [
# [ 10] <dbg> hello_world: main: float 66.669998, double 68.690000
re.compile(r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: '
r'float 66[\.][0-9-\.]+, double 68[\.][0-9-\.]+'),
# [ 10] <dbg> hello_world: main: float 66.669998, double 68.690000
re.compile(
r'[\s]+[\[][0-9,:\. ]+[\]] <dbg> hello_world: main: '
r'float 66[\.][0-9-\.]+, double 68[\.][0-9-\.]+'
),
]