apply black to python files
This commit is contained in:
parent
4b511d8d30
commit
19078aa5fe
@ -20,27 +20,40 @@ def main():
|
||||
print("-- Python CMake build configurator utility --")
|
||||
|
||||
print("Parsing command line arguments..")
|
||||
parser = argparse.ArgumentParser(description="Processing arguments for CMake build configuration.")
|
||||
parser.add_argument(
|
||||
"-o", "--osal", type=str, choices=["freertos", "linux", "rtems", "host"],
|
||||
help="FSFW OSAL. Valid arguments: host, linux, rtems, freertos"
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Processing arguments for CMake build configuration."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b", "--buildtype", type=str, choices=["debug", "release", "size", "reldeb"],
|
||||
help="CMake build type. Valid arguments: debug, release, size, reldeb (Release with Debug Information)",
|
||||
default="debug"
|
||||
"-o",
|
||||
"--osal",
|
||||
type=str,
|
||||
choices=["freertos", "linux", "rtems", "host"],
|
||||
help="FSFW OSAL. Valid arguments: host, linux, rtems, freertos",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--buildtype",
|
||||
type=str,
|
||||
choices=["debug", "release", "size", "reldeb"],
|
||||
help="CMake build type. Valid arguments: debug, release, size, reldeb (Release with Debug Information)",
|
||||
default="debug",
|
||||
)
|
||||
parser.add_argument("-l", "--builddir", type=str, help="Specify build directory.")
|
||||
parser.add_argument(
|
||||
"-g", "--generator", type=str, help="CMake Generator", choices=["make", "ninja"]
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t", "--target-bsp", type=str, help="Target BSP, combination of architecture and machine"
|
||||
"-t",
|
||||
"--target-bsp",
|
||||
type=str,
|
||||
help="Target BSP, combination of architecture and machine",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d", "--defines",
|
||||
"-d",
|
||||
"--defines",
|
||||
help="Additional custom defines passed to CMake (supply without -D prefix!)",
|
||||
nargs="*", type=str
|
||||
nargs="*",
|
||||
type=str,
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
@ -53,15 +66,15 @@ def main():
|
||||
|
||||
if args.generator is None:
|
||||
generator = determine_build_generator()
|
||||
generator_cmake_arg = f"-G \"{generator}\""
|
||||
generator_cmake_arg = f'-G "{generator}"'
|
||||
else:
|
||||
if args.generator == "make":
|
||||
if os.name == 'nt':
|
||||
if os.name == "nt":
|
||||
generator_cmake_arg = '-G "MinGW Makefiles"'
|
||||
else:
|
||||
generator_cmake_arg = '-G "Unix Makefiles"'
|
||||
elif args.generator == 'ninja':
|
||||
generator_cmake_arg = '-G Ninja'
|
||||
elif args.generator == "ninja":
|
||||
generator_cmake_arg = "-G Ninja"
|
||||
else:
|
||||
generator_cmake_arg = args.generator
|
||||
|
||||
@ -73,14 +86,14 @@ def main():
|
||||
|
||||
cmake_build_type = determine_build_type(args.buildtype)
|
||||
|
||||
cmake_target_cfg_cmd = ''
|
||||
cmake_target_cfg_cmd = ""
|
||||
|
||||
define_string = ""
|
||||
if args.defines is not None:
|
||||
define_list = args.defines[0].split()
|
||||
for define in define_list:
|
||||
define_string += f"-D{define} "
|
||||
|
||||
|
||||
if args.builddir is None:
|
||||
cmake_build_folder = determine_build_folder(cmake_build_type)
|
||||
else:
|
||||
@ -88,8 +101,10 @@ def main():
|
||||
|
||||
build_path = source_location + os.path.sep + cmake_build_folder
|
||||
if os.path.isdir(build_path):
|
||||
remove_old_dir = input(f"{cmake_build_folder} folder already exists. "
|
||||
f"Remove old directory? [y/n]: ")
|
||||
remove_old_dir = input(
|
||||
f"{cmake_build_folder} folder already exists. "
|
||||
f"Remove old directory? [y/n]: "
|
||||
)
|
||||
if str(remove_old_dir).lower() in ["yes", "y", 1]:
|
||||
remove_old_dir = True
|
||||
else:
|
||||
@ -108,11 +123,13 @@ def main():
|
||||
print(f"Navigating into build directory: {build_path}")
|
||||
os.chdir(cmake_build_folder)
|
||||
|
||||
cmake_command = f"cmake {generator_cmake_arg} -DFSFW_OSAL=\"{cmake_fsfw_osal}\" " \
|
||||
f"-DCMAKE_BUILD_TYPE=\"{cmake_build_type}\" {cmake_target_cfg_cmd} " \
|
||||
f"{define_string} {source_location}"
|
||||
cmake_command = (
|
||||
f'cmake {generator_cmake_arg} -DFSFW_OSAL="{cmake_fsfw_osal}" '
|
||||
f'-DCMAKE_BUILD_TYPE="{cmake_build_type}" {cmake_target_cfg_cmd} '
|
||||
f"{define_string} {source_location}"
|
||||
)
|
||||
# Remove redundant spaces
|
||||
cmake_command = ' '.join(cmake_command.split())
|
||||
cmake_command = " ".join(cmake_command.split())
|
||||
print("Running CMake command (without +): ")
|
||||
print(f"+ {cmake_command}")
|
||||
os.system(cmake_command)
|
||||
@ -121,8 +138,10 @@ def main():
|
||||
|
||||
def determine_build_generator() -> str:
|
||||
print("No generator specified. ")
|
||||
print("Please select from the following list of build types or type "
|
||||
"in desired system directly [h for help]: ")
|
||||
print(
|
||||
"Please select from the following list of build types or type "
|
||||
"in desired system directly [h for help]: "
|
||||
)
|
||||
while True:
|
||||
user_input = input("Enter your selection: ")
|
||||
if user_input == "h":
|
||||
@ -136,11 +155,15 @@ def determine_build_generator() -> str:
|
||||
|
||||
|
||||
def determine_build_folder(cmake_build_type: str) -> str:
|
||||
confirm = input(f"No build folder specified. Set to build type name {cmake_build_type}? [y/n]: ")
|
||||
confirm = input(
|
||||
f"No build folder specified. Set to build type name {cmake_build_type}? [y/n]: "
|
||||
)
|
||||
if confirm in ["yes", "y", 1]:
|
||||
return cmake_build_type
|
||||
else:
|
||||
new_folder_name = input("Please enter folder name, will be created in source folder: ")
|
||||
new_folder_name = input(
|
||||
"Please enter folder name, will be created in source folder: "
|
||||
)
|
||||
return new_folder_name
|
||||
|
||||
|
||||
@ -150,19 +173,18 @@ def determine_source_location() -> str:
|
||||
index += 1
|
||||
os.chdir("..")
|
||||
if index >= 5:
|
||||
print("Error: Could not find source directory (determined by looking for fsfw folder!)")
|
||||
print(
|
||||
"Error: Could not find source directory (determined by looking for fsfw folder!)"
|
||||
)
|
||||
sys.exit(1)
|
||||
return os.getcwd()
|
||||
|
||||
|
||||
def determine_fsfw_osal() -> str:
|
||||
select_dict = dict({
|
||||
1: "host",
|
||||
2: "linux",
|
||||
3: "freertos",
|
||||
4: "rtems"
|
||||
})
|
||||
print("No build type specified. Please select from the following list of build types: ")
|
||||
select_dict = dict({1: "host", 2: "linux", 3: "freertos", 4: "rtems"})
|
||||
print(
|
||||
"No build type specified. Please select from the following list of build types: "
|
||||
)
|
||||
for key, item in select_dict.items():
|
||||
print(f"{key}: {item}")
|
||||
select = input("Enter your selection: ")
|
||||
@ -179,13 +201,12 @@ def determine_fsfw_osal() -> str:
|
||||
|
||||
def determine_build_type(build_type_arg) -> str:
|
||||
if build_type_arg is None:
|
||||
select_dict = dict({
|
||||
1: "Debug",
|
||||
2: "Release",
|
||||
3: "Release with Debug Information",
|
||||
4: "Size"
|
||||
})
|
||||
print("No build type specified. Please select from the following list of build types")
|
||||
select_dict = dict(
|
||||
{1: "Debug", 2: "Release", 3: "Release with Debug Information", 4: "Size"}
|
||||
)
|
||||
print(
|
||||
"No build type specified. Please select from the following list of build types"
|
||||
)
|
||||
for key, item in select_dict.items():
|
||||
print(f"{key}: {item}")
|
||||
select = input("Enter your selection: ")
|
||||
@ -229,11 +250,10 @@ def determine_tgt_bsp(osal: str) -> str:
|
||||
print("Target BSP set to arm/stm32h743zi-nucleo")
|
||||
osal = "arm/stm32h743zi-nucleo"
|
||||
elif osal == "linux":
|
||||
print("No target BSP specified. Please select from the following list of build types.")
|
||||
select_dict = dict({
|
||||
1: "arm/raspberrypi",
|
||||
2: "none/hosted"
|
||||
})
|
||||
print(
|
||||
"No target BSP specified. Please select from the following list of build types."
|
||||
)
|
||||
select_dict = dict({1: "arm/raspberrypi", 2: "none/hosted"})
|
||||
for key, item in select_dict.items():
|
||||
print(f"{key}: {item}")
|
||||
select = input("Enter your selection: ")
|
||||
|
@ -11,11 +11,20 @@ from fsfwgen.parserbase.parser import FileParser
|
||||
from fsfwgen.utility.csv_writer import CsvWriter
|
||||
from fsfwgen.utility.printer import Printer
|
||||
|
||||
PACKET_CONTENT_DEFINITION_DESTINATION = ["../../mission/pus/servicepackets/",
|
||||
"../../fsfw/pus/servicepackets/"]
|
||||
PACKET_CONTENT_DEFINITION_DESTINATION = [
|
||||
"../../mission/pus/servicepackets/",
|
||||
"../../fsfw/pus/servicepackets/",
|
||||
]
|
||||
PACKET_CONTENT_CSV_NAME = "mib_packet_data_content.csv"
|
||||
PACKET_CONTENT_HEADER_COLUMN = ["Service", "Subservice", "Packet Name", "Datatype", "Name",
|
||||
"Size [Bytes]", "Comment"]
|
||||
PACKET_CONTENT_HEADER_COLUMN = [
|
||||
"Service",
|
||||
"Subservice",
|
||||
"Packet Name",
|
||||
"Datatype",
|
||||
"Name",
|
||||
"Size [Bytes]",
|
||||
"Comment",
|
||||
]
|
||||
|
||||
SQL_DELETE_PACKET_DATA_CONTENT_CMD = """
|
||||
DROP TABLE IF EXISTS PacketContent;
|
||||
@ -43,12 +52,17 @@ VALUES(?,?,?,?,?,?,?)
|
||||
def main():
|
||||
print("PacketContentParser: Parsing for header files.")
|
||||
header_file_parser = FileListParser(PACKET_CONTENT_DEFINITION_DESTINATION)
|
||||
header_file_list = header_file_parser.parse_header_files(False, "Parsing packet data files: ")
|
||||
header_file_list = header_file_parser.parse_header_files(
|
||||
False, "Parsing packet data files: "
|
||||
)
|
||||
packet_content_parser = PacketContentParser(header_file_list)
|
||||
subservice_table = packet_content_parser.parse_files(True)
|
||||
Printer.print_content(subservice_table, "PacketContentParser: Printing packet data table:")
|
||||
subservice_writer = CsvWriter(PACKET_CONTENT_CSV_NAME,
|
||||
subservice_table, PACKET_CONTENT_HEADER_COLUMN)
|
||||
Printer.print_content(
|
||||
subservice_table, "PacketContentParser: Printing packet data table:"
|
||||
)
|
||||
subservice_writer = CsvWriter(
|
||||
PACKET_CONTENT_CSV_NAME, subservice_table, PACKET_CONTENT_HEADER_COLUMN
|
||||
)
|
||||
subservice_writer.write_to_csv()
|
||||
subservice_writer.move_csv("..")
|
||||
|
||||
@ -67,7 +81,7 @@ class PacketContentParser(FileParser):
|
||||
self.commentColumn = 6
|
||||
self.lastEntryColumn = 7
|
||||
self.columnListLength = 8
|
||||
self.dictEntryList = list(range(self.columnListLength-1))
|
||||
self.dictEntryList = list(range(self.columnListLength - 1))
|
||||
|
||||
self.datatypeMatch = False
|
||||
self.ignoreFlag = False
|
||||
@ -78,7 +92,9 @@ class PacketContentParser(FileParser):
|
||||
self_print_parsing_info = args[0]
|
||||
|
||||
# Read service from file name
|
||||
self.dictEntryList[self.serviceColumn] = re.search('[0-9]{1,3}', file_name).group(0)
|
||||
self.dictEntryList[self.serviceColumn] = re.search(
|
||||
"[0-9]{1,3}", file_name
|
||||
).group(0)
|
||||
self.dictEntryList[self.subserviceColumn] = " "
|
||||
file = open(file_name, "r")
|
||||
if self_print_parsing_info:
|
||||
@ -96,15 +112,19 @@ class PacketContentParser(FileParser):
|
||||
self.update_packet_content_sizes()
|
||||
|
||||
def scan_for_class_and_struct_match_and_handle_it(self, line):
|
||||
class_or_struct_match = re.search('[\s]*class[\s]*([\w]*)[\s]*.*[\s]*{[\s]*([^\n]*)', line)
|
||||
class_or_struct_match = re.search(
|
||||
"[\s]*class[\s]*([\w]*)[\s]*.*[\s]*{[\s]*([^\n]*)", line
|
||||
)
|
||||
if not class_or_struct_match:
|
||||
class_or_struct_match = re.search(
|
||||
'[\s]*struct[\s]*([\w]*)[\s]*.*[\s]*{[\s]*([^\n]*)', line)
|
||||
"[\s]*struct[\s]*([\w]*)[\s]*.*[\s]*{[\s]*([^\n]*)", line
|
||||
)
|
||||
if class_or_struct_match:
|
||||
self.dictEntryList[self.classNameColumn] = class_or_struct_match.group(1)
|
||||
if class_or_struct_match.group(2):
|
||||
self.dictEntryList[self.subserviceColumn] = \
|
||||
self.check_for_subservice_string(class_or_struct_match.group(2))
|
||||
self.dictEntryList[
|
||||
self.subserviceColumn
|
||||
] = self.check_for_subservice_string(class_or_struct_match.group(2))
|
||||
|
||||
def scan_for_variable_match_and_handle_it(self, line):
|
||||
# Look for datatype definitions
|
||||
@ -117,26 +137,37 @@ class PacketContentParser(FileParser):
|
||||
# First step: Search for possible parameter definitions
|
||||
# Generic serialize element or datatypes
|
||||
var_match = re.search(
|
||||
r'[\w]*(?:<)?[\s]*(uint32_t|uint8_t|uint16_t|ReturnValue_t|Mode_t|Submode_t|'
|
||||
r'object_id_t|float|double|bool|ActionId_t|EventId_t|sid_t|ParameterId_t)'
|
||||
r'(?:>)?[\s]*([\w]*)[\s]*(?:[= 0-9]*)?[;](?:[\/!< ]*([^\n]*))?', line)
|
||||
r"[\w]*(?:<)?[\s]*(uint32_t|uint8_t|uint16_t|ReturnValue_t|Mode_t|Submode_t|"
|
||||
r"object_id_t|float|double|bool|ActionId_t|EventId_t|sid_t|ParameterId_t)"
|
||||
r"(?:>)?[\s]*([\w]*)[\s]*(?:[= 0-9]*)?[;](?:[\/!< ]*([^\n]*))?",
|
||||
line,
|
||||
)
|
||||
if var_match:
|
||||
# Debug printout
|
||||
# print(var_match.group(0))
|
||||
self.handle_generic_variable_match(var_match)
|
||||
# Serial Fixed Array List with Size Header
|
||||
else:
|
||||
var_match = re.search(r'[ \w]*<SerialFixedArrayListAdapter<([\w_, ()]*)>>'
|
||||
r'[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?', line)
|
||||
var_match = re.search(
|
||||
r"[ \w]*<SerialFixedArrayListAdapter<([\w_, ()]*)>>"
|
||||
r"[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?",
|
||||
line,
|
||||
)
|
||||
if var_match:
|
||||
self.handle_serial_fixed_array_match(var_match)
|
||||
# Serial Buffer, No length field
|
||||
if not var_match:
|
||||
var_match = re.search(r'[ \w]*<SerialBufferAdapter<([\w_,]*)>>'
|
||||
r'[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?', line)
|
||||
var_match = re.search(
|
||||
r"[ \w]*<SerialBufferAdapter<([\w_,]*)>>"
|
||||
r"[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?",
|
||||
line,
|
||||
)
|
||||
if not var_match:
|
||||
var_match = re.search(r'[\w ]*(?:<)?(uint32_t|uint8_t|uint16_t)[\s]*\*'
|
||||
r'(?:>)?[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?', line)
|
||||
var_match = re.search(
|
||||
r"[\w ]*(?:<)?(uint32_t|uint8_t|uint16_t)[\s]*\*"
|
||||
r"(?:>)?[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?",
|
||||
line,
|
||||
)
|
||||
if var_match:
|
||||
self.handle_serial_buffer_match(var_match)
|
||||
# exclude size definition in serialize adapter or any definitions which are not parameter initializations
|
||||
@ -147,7 +178,7 @@ class PacketContentParser(FileParser):
|
||||
|
||||
def update_packet_content_table(self):
|
||||
self.index = self.index + 1
|
||||
dict_entry_tuple = tuple(self.dictEntryList[:self.columnListLength])
|
||||
dict_entry_tuple = tuple(self.dictEntryList[: self.columnListLength])
|
||||
if not self.ignoreFlag:
|
||||
self.mib_table.update({self.index: dict_entry_tuple})
|
||||
else:
|
||||
@ -162,16 +193,20 @@ class PacketContentParser(FileParser):
|
||||
pass
|
||||
else:
|
||||
fixed_array_properties = re.search(
|
||||
'([\w_]*)[\s]*,[\s]*([\w_()]*)[\s]*,[\s]*([\w_()]*)[\s]*', var_match.group(1))
|
||||
"([\w_]*)[\s]*,[\s]*([\w_()]*)[\s]*,[\s]*([\w_()]*)[\s]*",
|
||||
var_match.group(1),
|
||||
)
|
||||
if fixed_array_properties:
|
||||
type_of_next_buffer_size = fixed_array_properties.group(3)
|
||||
self.index = self.index + 1
|
||||
self.dictEntryList[self.datatypeColumn] = type_of_next_buffer_size
|
||||
self.dictEntryList[self.nameColumn] = "Size of following buffer"
|
||||
dict_entry_tuple = tuple(self.dictEntryList[:self.columnListLength])
|
||||
dict_entry_tuple = tuple(self.dictEntryList[: self.columnListLength])
|
||||
self.mib_table.update({self.index: dict_entry_tuple})
|
||||
self.handle_var_match(var_match)
|
||||
self.dictEntryList[self.datatypeColumn] = fixed_array_properties.group(1) + " *"
|
||||
self.dictEntryList[self.datatypeColumn] = (
|
||||
fixed_array_properties.group(1) + " *"
|
||||
)
|
||||
self.handle_exporter_string(var_match.group(3))
|
||||
|
||||
def handle_serial_buffer_match(self, var_match):
|
||||
@ -202,7 +237,9 @@ class PacketContentParser(FileParser):
|
||||
self.mib_table.update({key: content})
|
||||
|
||||
def attempt_uint_match(self, content):
|
||||
self.datatypeMatch = re.search('uint([\d]{1,2})_t', content[self.datatypeColumn])
|
||||
self.datatypeMatch = re.search(
|
||||
"uint([\d]{1,2})_t", content[self.datatypeColumn]
|
||||
)
|
||||
if self.datatypeMatch:
|
||||
content = list(content)
|
||||
content[self.sizeColumn] = round(int(self.datatypeMatch.group(1)) / 8)
|
||||
@ -211,8 +248,9 @@ class PacketContentParser(FileParser):
|
||||
|
||||
def attempt_four_byte_match(self, content):
|
||||
self.datatypeMatch = re.search(
|
||||
r'object_id_t|ActionId_t|Mode_t|float|sid_t|ParameterId_t',
|
||||
content[self.datatypeColumn])
|
||||
r"object_id_t|ActionId_t|Mode_t|float|sid_t|ParameterId_t",
|
||||
content[self.datatypeColumn],
|
||||
)
|
||||
if self.datatypeMatch:
|
||||
content = list(content)
|
||||
content[self.sizeColumn] = 4
|
||||
@ -220,7 +258,7 @@ class PacketContentParser(FileParser):
|
||||
return content
|
||||
|
||||
def attempt_eight_byte_match(self, content):
|
||||
self.datatypeMatch = re.search('double', content[self.datatypeColumn])
|
||||
self.datatypeMatch = re.search("double", content[self.datatypeColumn])
|
||||
if self.datatypeMatch:
|
||||
content = list(content)
|
||||
content[self.sizeColumn] = 8
|
||||
@ -228,7 +266,9 @@ class PacketContentParser(FileParser):
|
||||
return content
|
||||
|
||||
def attempt_two_byte_match(self, content):
|
||||
self.datatypeMatch = re.search('ReturnValue_t|EventId_t', content[self.datatypeColumn])
|
||||
self.datatypeMatch = re.search(
|
||||
"ReturnValue_t|EventId_t", content[self.datatypeColumn]
|
||||
)
|
||||
if self.datatypeMatch:
|
||||
content = list(content)
|
||||
content[self.sizeColumn] = 2
|
||||
@ -236,7 +276,7 @@ class PacketContentParser(FileParser):
|
||||
return content
|
||||
|
||||
def attempt_one_byte_match(self, content):
|
||||
self.datatypeMatch = re.search('Submode_t|bool', content[self.datatypeColumn])
|
||||
self.datatypeMatch = re.search("Submode_t|bool", content[self.datatypeColumn])
|
||||
if self.datatypeMatch:
|
||||
content = list(content)
|
||||
content[self.sizeColumn] = 1
|
||||
@ -244,7 +284,7 @@ class PacketContentParser(FileParser):
|
||||
return content
|
||||
|
||||
def handle_uint_buffer_type(self, content):
|
||||
if re.search('\*', content[self.datatypeColumn]):
|
||||
if re.search("\*", content[self.datatypeColumn]):
|
||||
content = list(content)
|
||||
content[self.sizeColumn] = "deduced"
|
||||
content = tuple(content)
|
||||
@ -252,14 +292,22 @@ class PacketContentParser(FileParser):
|
||||
|
||||
# Used to scan exporter string for ignore flag or store any comments
|
||||
def handle_exporter_string(self, match):
|
||||
exporter_string = re.search('[ /!<]*\[EXPORT[\w]*\][\s]*:[\s]*([^\n]*)', match)
|
||||
exporter_string = re.search("[ /!<]*\[EXPORT[\w]*\][\s]*:[\s]*([^\n]*)", match)
|
||||
if exporter_string:
|
||||
type_string = re.search("\[TYPE|BUFFERTYPE\][\s]*([\w]*)[^\n|\[]*", exporter_string.group(0),
|
||||
re.IGNORECASE)
|
||||
type_string = re.search(
|
||||
"\[TYPE|BUFFERTYPE\][\s]*([\w]*)[^\n|\[]*",
|
||||
exporter_string.group(0),
|
||||
re.IGNORECASE,
|
||||
)
|
||||
if type_string:
|
||||
self.dictEntryList[self.datatypeColumn] = str(type_string.group(1)) + " *"
|
||||
comment_string = re.search("\[COMMENT\][\s]*([\w]*)[^\n|\[]*", exporter_string.group(0),
|
||||
re.IGNORECASE)
|
||||
self.dictEntryList[self.datatypeColumn] = (
|
||||
str(type_string.group(1)) + " *"
|
||||
)
|
||||
comment_string = re.search(
|
||||
"\[COMMENT\][\s]*([\w]*)[^\n|\[]*",
|
||||
exporter_string.group(0),
|
||||
re.IGNORECASE,
|
||||
)
|
||||
if comment_string:
|
||||
self.dictEntryList[self.commentColumn] = comment_string.group(1)
|
||||
self.check_for_ignore_string(exporter_string.group(0))
|
||||
@ -269,13 +317,15 @@ class PacketContentParser(FileParser):
|
||||
# Used to transform comma separated subservice numbers into specific subservice numbers
|
||||
def check_for_subservice_string(self, full_description):
|
||||
subservice_info = re.search(
|
||||
r'^.*//[\s]*[!<]*[\s]*\[EXPORT[\w]*\][\s]*:[\s]*\[SUBSERVICE\][\s]*([^\n]*)',
|
||||
full_description, re.IGNORECASE)
|
||||
description = ' '
|
||||
r"^.*//[\s]*[!<]*[\s]*\[EXPORT[\w]*\][\s]*:[\s]*\[SUBSERVICE\][\s]*([^\n]*)",
|
||||
full_description,
|
||||
re.IGNORECASE,
|
||||
)
|
||||
description = " "
|
||||
if subservice_info:
|
||||
description = self.handle_subservice_string(subservice_info)
|
||||
if full_description == '':
|
||||
description = ' '
|
||||
if full_description == "":
|
||||
description = " "
|
||||
return description
|
||||
|
||||
def check_for_ignore_string(self, string):
|
||||
@ -286,8 +336,8 @@ class PacketContentParser(FileParser):
|
||||
|
||||
@staticmethod
|
||||
def handle_subservice_string(subservice_info):
|
||||
description = ' '
|
||||
subservice_list = [int(x) for x in subservice_info.group(1).split(',')]
|
||||
description = " "
|
||||
subservice_list = [int(x) for x in subservice_info.group(1).split(",")]
|
||||
subservice_number = len(subservice_list)
|
||||
for i in range(subservice_number):
|
||||
description = description + str(subservice_list[i])
|
||||
@ -300,6 +350,3 @@ class PacketContentParser(FileParser):
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
@ -21,8 +21,16 @@ DH_COMMAND_PACKET_DEFINITION_DESTINATION = "../../mission/devices/devicepackets/
|
||||
DH_DEFINITION_DESTINATION = "../../mission/devices/"
|
||||
DH_COMMANDS_CSV_NAME = "mib_device_commands.csv"
|
||||
DH_COMMAND_HEADER_COLUMNS = [
|
||||
"Device Handler", "Command Name", "Action ID", "Command Field Name", "Command Field Position",
|
||||
"Command Field Type", "Command Field Option Name", "Command Field Option Value", "Comment"]
|
||||
"Device Handler",
|
||||
"Command Name",
|
||||
"Action ID",
|
||||
"Command Field Name",
|
||||
"Command Field Position",
|
||||
"Command Field Type",
|
||||
"Command Field Option Name",
|
||||
"Command Field Option Value",
|
||||
"Comment",
|
||||
]
|
||||
|
||||
SQL_DELETE_CMDTABLE_CMD = """
|
||||
DROP TABLE IF EXISTS DeviceHandlerCommand;
|
||||
@ -55,6 +63,7 @@ class DeviceCommandColumns(Enum):
|
||||
"""
|
||||
Specifies order of MIB columns
|
||||
"""
|
||||
|
||||
DH_NAME = 0
|
||||
NAME = 1
|
||||
ACTION_ID = 2
|
||||
@ -75,19 +84,27 @@ def main():
|
||||
:return:
|
||||
"""
|
||||
info_header_file_parser = FileListParser(DH_DEFINITION_DESTINATION)
|
||||
info_header_file_list = info_header_file_parser.\
|
||||
parse_header_files(False, "Parsing device handler informations:")
|
||||
info_header_file_list = info_header_file_parser.parse_header_files(
|
||||
False, "Parsing device handler informations:"
|
||||
)
|
||||
dh_information_parser = DeviceHandlerInformationParser(info_header_file_list)
|
||||
dh_information_table = dh_information_parser.parse_files()
|
||||
Printer.print_content(dh_information_table, "Priting device handler command information table: ")
|
||||
Printer.print_content(
|
||||
dh_information_table, "Priting device handler command information table: "
|
||||
)
|
||||
|
||||
header_file_parser = FileListParser(DH_COMMAND_PACKET_DEFINITION_DESTINATION)
|
||||
header_file_list = \
|
||||
header_file_parser.parse_header_files(False, "Parsing device handler command files:")
|
||||
packet_subservice_parser = DeviceHandlerCommandParser(header_file_list, dh_information_table)
|
||||
header_file_list = header_file_parser.parse_header_files(
|
||||
False, "Parsing device handler command files:"
|
||||
)
|
||||
packet_subservice_parser = DeviceHandlerCommandParser(
|
||||
header_file_list, dh_information_table
|
||||
)
|
||||
dh_command_table = packet_subservice_parser.parse_files()
|
||||
Printer.print_content(dh_command_table, "Printing device handler command table:")
|
||||
dh_command_writer = CsvWriter(DH_COMMANDS_CSV_NAME, dh_command_table, DH_COMMAND_HEADER_COLUMNS)
|
||||
dh_command_writer = CsvWriter(
|
||||
DH_COMMANDS_CSV_NAME, dh_command_table, DH_COMMAND_HEADER_COLUMNS
|
||||
)
|
||||
dh_command_writer.write_to_csv()
|
||||
dh_command_writer.copy_csv()
|
||||
dh_command_writer.move_csv("..")
|
||||
@ -120,7 +137,7 @@ class DeviceHandlerInformationParser(FileParser):
|
||||
self_print_parsing_info = args[0]
|
||||
|
||||
# Read device name from file name
|
||||
handler_match = re.search(r'([\w]*).h', file_name)
|
||||
handler_match = re.search(r"([\w]*).h", file_name)
|
||||
if not handler_match:
|
||||
print("Device Command Parser: Configuration error, no handler name match !")
|
||||
handler_name = handler_match.group(1)
|
||||
@ -145,8 +162,12 @@ class DeviceHandlerInformationParser(FileParser):
|
||||
:return:
|
||||
"""
|
||||
# Case insensitive matching of device command enums
|
||||
enum_match = re.search(r'[\s]*enum[\s]*([\w]*)[\s]*{[\s][/!<>]*[\s]*'
|
||||
r'\[EXPORT[\w]*\][\s]*:[\s]*\[ENUM\]([^\n]*)', line, re.IGNORECASE)
|
||||
enum_match = re.search(
|
||||
r"[\s]*enum[\s]*([\w]*)[\s]*{[\s][/!<>]*[\s]*"
|
||||
r"\[EXPORT[\w]*\][\s]*:[\s]*\[ENUM\]([^\n]*)",
|
||||
line,
|
||||
re.IGNORECASE,
|
||||
)
|
||||
if enum_match:
|
||||
self.command_enum_name = enum_match.group(1)
|
||||
self.command_scanning_pending = True
|
||||
@ -158,9 +179,11 @@ class DeviceHandlerInformationParser(FileParser):
|
||||
self.__handle_command_enum_scanning(line)
|
||||
|
||||
def __handle_command_definition_scanning(self, line):
|
||||
command_match = \
|
||||
re.search(r'[\s]*static[\s]*const[\s]*DeviceCommandId_t[\s]*([\w]*)[\s]*=[\s]*'
|
||||
r'([\w]*)[\s]*;[\s]*[/!<>]*[\s]*\[EXPORT\][\s]*:[\s]*\[COMMAND\]', line)
|
||||
command_match = re.search(
|
||||
r"[\s]*static[\s]*const[\s]*DeviceCommandId_t[\s]*([\w]*)[\s]*=[\s]*"
|
||||
r"([\w]*)[\s]*;[\s]*[/!<>]*[\s]*\[EXPORT\][\s]*:[\s]*\[COMMAND\]",
|
||||
line,
|
||||
)
|
||||
if command_match:
|
||||
command_name = command_match.group(1)
|
||||
command_id = command_match.group(2)
|
||||
@ -171,8 +194,11 @@ class DeviceHandlerInformationParser(FileParser):
|
||||
if not self.command_scanning_pending:
|
||||
# scanning enum finished
|
||||
# stores current command into command dictionary with command name as unique key
|
||||
command_tuple = self.command_value_name_list, self.command_value_list, \
|
||||
self.command_comment_list
|
||||
command_tuple = (
|
||||
self.command_value_name_list,
|
||||
self.command_value_list,
|
||||
self.command_comment_list,
|
||||
)
|
||||
self.command_enum_dict.update({self.command_enum_name: command_tuple})
|
||||
self.command_enum_name = ""
|
||||
self.command_value_name_list = []
|
||||
@ -180,13 +206,14 @@ class DeviceHandlerInformationParser(FileParser):
|
||||
self.command_comment_list = []
|
||||
|
||||
def __scan_command_entries(self, line):
|
||||
command_match = \
|
||||
re.search(r'[\s]*([\w]*)[\s]*=[\s]*([0-9]{1,3})[^/][\s]*[/!<>]*[\s]*([^\n]*)', line)
|
||||
command_match = re.search(
|
||||
r"[\s]*([\w]*)[\s]*=[\s]*([0-9]{1,3})[^/][\s]*[/!<>]*[\s]*([^\n]*)", line
|
||||
)
|
||||
if command_match:
|
||||
self.command_value_name_list.append(command_match.group(1))
|
||||
self.command_value_list.append(command_match.group(2))
|
||||
self.command_comment_list.append(command_match.group(3))
|
||||
elif re.search(r'}[\s]*;', line):
|
||||
elif re.search(r"}[\s]*;", line):
|
||||
self.command_scanning_pending = False
|
||||
|
||||
def _post_parsing_operation(self):
|
||||
@ -197,6 +224,7 @@ class PendingScanType(Enum):
|
||||
"""
|
||||
Specifies which scan type is performed in the device command parser.
|
||||
"""
|
||||
|
||||
NO_SCANNING = 0
|
||||
STRUCT_SCAN = 1
|
||||
CLASS_SCAN = 2
|
||||
@ -209,6 +237,7 @@ class DeviceHandlerCommandParser(FileParser):
|
||||
packet definitions. A device handler info table must be passed which can be acquired
|
||||
by running the DH information parser.
|
||||
"""
|
||||
|
||||
def __init__(self, file_list, dh_information_table):
|
||||
super().__init__(file_list)
|
||||
# this table includes the current new table entry,
|
||||
@ -258,9 +287,12 @@ class DeviceHandlerCommandParser(FileParser):
|
||||
self.__scan_command(line)
|
||||
|
||||
def __scan_for_structs(self, line):
|
||||
struct_match = re.search(r'[\s]*struct[\s]*([\w]*)[\s]*{[\s]*[/!<>]*[\s]*'
|
||||
r'\[EXPORT\][ :]*\[COMMAND\]'
|
||||
r'[\s]*([\w]*)[ :]*([\w]*)', line)
|
||||
struct_match = re.search(
|
||||
r"[\s]*struct[\s]*([\w]*)[\s]*{[\s]*[/!<>]*[\s]*"
|
||||
r"\[EXPORT\][ :]*\[COMMAND\]"
|
||||
r"[\s]*([\w]*)[ :]*([\w]*)",
|
||||
line,
|
||||
)
|
||||
if struct_match:
|
||||
# Scan a found command struct
|
||||
self.__start_class_or_struct_scanning(struct_match)
|
||||
@ -269,8 +301,11 @@ class DeviceHandlerCommandParser(FileParser):
|
||||
|
||||
def __scan_for_class(self, line):
|
||||
# search for class command definition
|
||||
class_match = re.search(r'[\s]*class[\s]*([\w]*)[\s]*[^{]*{[ /!<>]*\[EXPORT\][ :]*'
|
||||
r'\[COMMAND\][\s]*([\w]*)[ :]*([\w]*)', line)
|
||||
class_match = re.search(
|
||||
r"[\s]*class[\s]*([\w]*)[\s]*[^{]*{[ /!<>]*\[EXPORT\][ :]*"
|
||||
r"\[COMMAND\][\s]*([\w]*)[ :]*([\w]*)",
|
||||
line,
|
||||
)
|
||||
if class_match:
|
||||
self.__start_class_or_struct_scanning(class_match)
|
||||
self.scanning_pending = PendingScanType.CLASS_SCAN.value
|
||||
@ -288,21 +323,27 @@ class DeviceHandlerCommandParser(FileParser):
|
||||
if handler_name in self.dh_information_table:
|
||||
(command_id_dict, self.enum_dict) = self.dh_information_table[handler_name]
|
||||
if command_name in command_id_dict:
|
||||
self.dict_entry_list[Clmns.ACTION_ID.value] = command_id_dict[command_name]
|
||||
self.dict_entry_list[Clmns.ACTION_ID.value] = command_id_dict[
|
||||
command_name
|
||||
]
|
||||
|
||||
def __scan_command(self, line):
|
||||
datatype_match = False
|
||||
if self.scanning_pending is PendingScanType.STRUCT_SCAN.value:
|
||||
datatype_match = \
|
||||
re.search(r'[\s]*(uint[0-9]{1,2}_t|float|double|bool|int|char)[\s]*([\w]*);'
|
||||
r'(?:[\s]*[/!<>]*[\s]*\[EXPORT\][: ]*(.*))?', line)
|
||||
datatype_match = re.search(
|
||||
r"[\s]*(uint[0-9]{1,2}_t|float|double|bool|int|char)[\s]*([\w]*);"
|
||||
r"(?:[\s]*[/!<>]*[\s]*\[EXPORT\][: ]*(.*))?",
|
||||
line,
|
||||
)
|
||||
elif self.scanning_pending is PendingScanType.CLASS_SCAN.value:
|
||||
datatype_match = re.search(
|
||||
r'[\s]*SerializeElement[\s]*<(uint[0-9]{1,2}_t|float|double|bool|int|char)[ >]*'
|
||||
r'([\w]*);(?:[ /!<>]*\[EXPORT\][: ]*(.*))?', line)
|
||||
r"[\s]*SerializeElement[\s]*<(uint[0-9]{1,2}_t|float|double|bool|int|char)[ >]*"
|
||||
r"([\w]*);(?:[ /!<>]*\[EXPORT\][: ]*(.*))?",
|
||||
line,
|
||||
)
|
||||
if datatype_match:
|
||||
self.__handle_datatype_match(datatype_match)
|
||||
elif re.search(r'}[\s]*;', line):
|
||||
elif re.search(r"}[\s]*;", line):
|
||||
self.scanning_pending = PendingScanType.NO_SCANNING.value
|
||||
self.command_index = 0
|
||||
|
||||
@ -318,11 +359,15 @@ class DeviceHandlerCommandParser(FileParser):
|
||||
|
||||
def __analyse_exporter_sequence(self, exporter_sequence):
|
||||
# This matches the exporter sequence pairs e.g. [ENUM] BLA [COMMENT] BLABLA [...] ...
|
||||
export_string_matches = re.search(r'(?:\[([\w]*)\][\s]*([^\[]*))?', exporter_sequence)
|
||||
export_string_matches = re.search(
|
||||
r"(?:\[([\w]*)\][\s]*([^\[]*))?", exporter_sequence
|
||||
)
|
||||
if export_string_matches:
|
||||
if len(export_string_matches.groups()) % 2 != 0:
|
||||
print("Device Command Parser: Error when analysing exporter sequence,"
|
||||
" check exporter string format")
|
||||
print(
|
||||
"Device Command Parser: Error when analysing exporter sequence,"
|
||||
" check exporter string format"
|
||||
)
|
||||
else:
|
||||
count = 0
|
||||
while count < len(export_string_matches.groups()):
|
||||
@ -348,8 +393,7 @@ class DeviceHandlerCommandParser(FileParser):
|
||||
enum_tuple = self.enum_dict[self.current_enum_name]
|
||||
for count in range(0, size_of_enum):
|
||||
self.__update_table_with_command_options(count, enum_tuple)
|
||||
self.command_index = \
|
||||
self.command_index + 1
|
||||
self.command_index = self.command_index + 1
|
||||
else:
|
||||
self.__update_table_with_no_command_options()
|
||||
self.index = self.index + 1
|
||||
@ -357,12 +401,16 @@ class DeviceHandlerCommandParser(FileParser):
|
||||
|
||||
def __update_table_with_command_options(self, count, enum_tuple):
|
||||
enum_value_name_list, enum_value_list, enum_comment_list = enum_tuple
|
||||
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_NAME.value] = \
|
||||
enum_value_name_list[count]
|
||||
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_VALUE.value] = enum_value_list[count]
|
||||
self.dict_entry_list[Clmns.COMMAND_FIELD_COMMENT.value] = enum_comment_list[count]
|
||||
self.dict_entry_list[Clmns.COMMAND_INDEX.value] = \
|
||||
self.command_index
|
||||
self.dict_entry_list[
|
||||
Clmns.COMMAND_FIELD_OPTION_NAME.value
|
||||
] = enum_value_name_list[count]
|
||||
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_VALUE.value] = enum_value_list[
|
||||
count
|
||||
]
|
||||
self.dict_entry_list[Clmns.COMMAND_FIELD_COMMENT.value] = enum_comment_list[
|
||||
count
|
||||
]
|
||||
self.dict_entry_list[Clmns.COMMAND_INDEX.value] = self.command_index
|
||||
dh_command_tuple = tuple(self.dict_entry_list)
|
||||
self.index += 1
|
||||
self.mib_table.update({self.index: dh_command_tuple})
|
||||
@ -371,8 +419,7 @@ class DeviceHandlerCommandParser(FileParser):
|
||||
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_NAME.value] = ""
|
||||
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_VALUE.value] = ""
|
||||
self.dict_entry_list[Clmns.COMMAND_FIELD_COMMENT.value] = self.command_comment
|
||||
self.dict_entry_list[Clmns.COMMAND_INDEX.value] = \
|
||||
self.command_index
|
||||
self.dict_entry_list[Clmns.COMMAND_INDEX.value] = self.command_index
|
||||
dh_command_tuple = tuple(self.dict_entry_list)
|
||||
self.mib_table.update({self.index: dh_command_tuple})
|
||||
self.command_index += 1
|
||||
|
@ -49,7 +49,7 @@ SUBSYSTEM_DEFINITION_DESTINATIONS = [
|
||||
HEADER_DEFINITION_DESTINATIONS = [
|
||||
f"{OBSW_ROOT_DIR}/bsp_hosted",
|
||||
f"{OBSW_ROOT_DIR}/fsfw/",
|
||||
f"{FSFW_CONFIG_ROOT}"
|
||||
f"{FSFW_CONFIG_ROOT}",
|
||||
]
|
||||
|
||||
|
||||
@ -83,7 +83,9 @@ def parse_events(
|
||||
header_file_name=CPP_H_FILENAME,
|
||||
)
|
||||
if COPY_CPP_FILE:
|
||||
LOGGER.info(f"EventParser: Copying CPP translation file to {CPP_COPY_DESTINATION}")
|
||||
LOGGER.info(
|
||||
f"EventParser: Copying CPP translation file to {CPP_COPY_DESTINATION}"
|
||||
)
|
||||
copy_file(CPP_FILENAME, CPP_COPY_DESTINATION)
|
||||
copy_file(CPP_H_FILENAME, CPP_COPY_DESTINATION)
|
||||
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 6d423f7106e49f93743fb69e9436e1e652f9e001
|
||||
Subproject commit b1e5a2d40a5f41b9020f2beb0b976035f91c6343
|
@ -47,7 +47,7 @@ from packetcontent.packet_content_parser import (
|
||||
PACKET_CONTENT_HEADER_COLUMN,
|
||||
SQL_CREATE_PACKET_DATA_CONTENT_CMD,
|
||||
SQL_INSERT_PACKET_DATA_CMD,
|
||||
SQL_DELETE_PACKET_DATA_CONTENT_CMD
|
||||
SQL_DELETE_PACKET_DATA_CONTENT_CMD,
|
||||
)
|
||||
from subservice.subservice_parser import (
|
||||
SubserviceParser,
|
||||
@ -67,7 +67,7 @@ from devicecommands.device_command_parser import (
|
||||
DH_COMMAND_HEADER_COLUMNS,
|
||||
SQL_CREATE_CMDTABLE_CMD,
|
||||
SQL_INSERT_INTO_CMDTABLE_CMD,
|
||||
SQL_DELETE_CMDTABLE_CMD
|
||||
SQL_DELETE_CMDTABLE_CMD,
|
||||
)
|
||||
from returnvalues.returnvalues_parser import (
|
||||
InterfaceParser,
|
||||
@ -75,15 +75,16 @@ from returnvalues.returnvalues_parser import (
|
||||
INTERFACE_DEFINITION_FILES,
|
||||
RETURNVALUE_DESTINATIONS,
|
||||
sql_retval_exporter,
|
||||
CSV_RETVAL_FILENAME
|
||||
CSV_RETVAL_FILENAME,
|
||||
)
|
||||
from objects.objects import (
|
||||
ObjectDefinitionParser,
|
||||
OBJECTS_DEFINITIONS,
|
||||
export_object_file,
|
||||
CSV_OBJECT_FILENAME,
|
||||
sql_object_exporter
|
||||
sql_object_exporter,
|
||||
)
|
||||
|
||||
DO_EXPORT_MIB = True
|
||||
PRINT_TABLES_TO_CONSOLE = False
|
||||
EXPORT_TO_CSV = True
|
||||
@ -146,7 +147,7 @@ def handle_subservices_generation():
|
||||
|
||||
|
||||
def generate_subservice_table():
|
||||
""" Generate the subservice table. """
|
||||
"""Generate the subservice table."""
|
||||
subservice_header_parser = FileListParser(
|
||||
destination_corrected(SUBSERVICE_DEFINITION_DESTINATION)
|
||||
)
|
||||
@ -161,7 +162,11 @@ def generate_subservice_table():
|
||||
def handle_packet_content_generation():
|
||||
print("MIB Exporter: Parsing packing content")
|
||||
packet_content_table = generate_packet_content_table()
|
||||
print("MIB Exporter: Found " + str(len(packet_content_table)) + " packet content entries.")
|
||||
print(
|
||||
"MIB Exporter: Found "
|
||||
+ str(len(packet_content_table))
|
||||
+ " packet content entries."
|
||||
)
|
||||
if PRINT_TABLES_TO_CONSOLE:
|
||||
print("MIB Exporter: Print packet content table: ")
|
||||
Printer.print_content(packet_content_table)
|
||||
@ -179,12 +184,12 @@ def handle_packet_content_generation():
|
||||
SQL_CREATE_PACKET_DATA_CONTENT_CMD,
|
||||
SQL_INSERT_PACKET_DATA_CMD,
|
||||
packet_content_table,
|
||||
SQL_DELETE_PACKET_DATA_CONTENT_CMD
|
||||
SQL_DELETE_PACKET_DATA_CONTENT_CMD,
|
||||
)
|
||||
|
||||
|
||||
def generate_packet_content_table():
|
||||
""" Generate packet content table """
|
||||
"""Generate packet content table"""
|
||||
packet_data_header_parser = FileListParser(
|
||||
destination_corrected(PACKET_CONTENT_DEFINITION_DESTINATION)
|
||||
)
|
||||
@ -199,7 +204,11 @@ def generate_packet_content_table():
|
||||
def handle_device_handler_command_generation():
|
||||
print("MIB Exporter: Parsing device handler commands.")
|
||||
dh_command_table = generate_device_command_table()
|
||||
print("MIB Exporter: Found " + str(len(dh_command_table)) + " device handler command entries")
|
||||
print(
|
||||
"MIB Exporter: Found "
|
||||
+ str(len(dh_command_table))
|
||||
+ " device handler command entries"
|
||||
)
|
||||
if PRINT_TABLES_TO_CONSOLE:
|
||||
print("MIB Exporter: Printing device handler command table: ")
|
||||
Printer.print_content(dh_command_table)
|
||||
@ -207,19 +216,23 @@ def handle_device_handler_command_generation():
|
||||
device_command_writer = CsvWriter(
|
||||
DH_COMMANDS_CSV_NAME, dh_command_table, DH_COMMAND_HEADER_COLUMNS
|
||||
)
|
||||
print("MIB Exporter: Exporting device handler commands to " + DH_COMMANDS_CSV_NAME)
|
||||
print(
|
||||
"MIB Exporter: Exporting device handler commands to " + DH_COMMANDS_CSV_NAME
|
||||
)
|
||||
device_command_writer.write_to_csv()
|
||||
if EXPORT_TO_SQL:
|
||||
print("MIB Exporter: Exporting device handler commands to SQL")
|
||||
sql_writer = SqlWriter()
|
||||
sql_writer.sql_writing_helper(
|
||||
SQL_CREATE_CMDTABLE_CMD, SQL_INSERT_INTO_CMDTABLE_CMD, dh_command_table,
|
||||
SQL_DELETE_CMDTABLE_CMD
|
||||
SQL_CREATE_CMDTABLE_CMD,
|
||||
SQL_INSERT_INTO_CMDTABLE_CMD,
|
||||
dh_command_table,
|
||||
SQL_DELETE_CMDTABLE_CMD,
|
||||
)
|
||||
|
||||
|
||||
def generate_device_command_table(print_info_table: bool = False):
|
||||
""" Generate device command table """
|
||||
"""Generate device command table"""
|
||||
info_header_file_parser = FileListParser(
|
||||
destination_corrected(DH_DEFINITION_DESTINATION)
|
||||
)
|
||||
@ -228,11 +241,15 @@ def generate_device_command_table(print_info_table: bool = False):
|
||||
)
|
||||
dh_information_parser = DeviceHandlerInformationParser(info_header_file_list)
|
||||
dh_information_table = dh_information_parser.parse_files()
|
||||
print("MIB Exporter: Found " + str(len(dh_information_table)) +
|
||||
" device handler information entries.")
|
||||
print(
|
||||
"MIB Exporter: Found "
|
||||
+ str(len(dh_information_table))
|
||||
+ " device handler information entries."
|
||||
)
|
||||
if print_info_table:
|
||||
Printer.print_content(
|
||||
dh_information_table, "MIB Exporter: Priting device handler command information table: "
|
||||
dh_information_table,
|
||||
"MIB Exporter: Priting device handler command information table: ",
|
||||
)
|
||||
|
||||
header_file_parser = FileListParser(
|
||||
@ -270,7 +287,9 @@ def generate_returnvalue_table():
|
||||
interfaces = interface_parser.parse_files()
|
||||
print("MIB Exporter: Found interfaces : " + str(len(interfaces)))
|
||||
header_parser = FileListParser(destination_corrected(RETURNVALUE_DESTINATIONS))
|
||||
header_list = header_parser.parse_header_files(True, "MIB Exporter: Parsing header file list: ")
|
||||
header_list = header_parser.parse_header_files(
|
||||
True, "MIB Exporter: Parsing header file list: "
|
||||
)
|
||||
returnvalue_parser = ReturnValueParser(interfaces, header_list, False)
|
||||
returnvalue_table = returnvalue_parser.parse_files(False)
|
||||
if PRINT_TABLES_TO_CONSOLE:
|
||||
@ -323,7 +342,7 @@ def handle_external_file_running():
|
||||
|
||||
|
||||
def update_globals():
|
||||
""" Updates the global variables """
|
||||
"""Updates the global variables"""
|
||||
g.PP = pprint.PrettyPrinter(indent=0, width=250)
|
||||
g.doExportMIB = DO_EXPORT_MIB
|
||||
g.executeSQLcommands = False
|
||||
|
@ -15,7 +15,13 @@ from fsfwgen.objects.objects import (
|
||||
from fsfwgen.utility.printer import PrettyPrinter
|
||||
from fsfwgen.utility.file_management import copy_file
|
||||
|
||||
from definitions import BSP_HOSTED, DATABASE_NAME, OBSW_ROOT_DIR, ROOT_DIR, EXAMPLE_COMMON_DIR
|
||||
from definitions import (
|
||||
BSP_HOSTED,
|
||||
DATABASE_NAME,
|
||||
OBSW_ROOT_DIR,
|
||||
ROOT_DIR,
|
||||
EXAMPLE_COMMON_DIR,
|
||||
)
|
||||
|
||||
LOGGER = get_console_logger()
|
||||
DATE_TODAY = datetime.datetime.now()
|
||||
|
@ -9,7 +9,13 @@ from fsfwgen.returnvalues.returnvalues_parser import InterfaceParser, ReturnValu
|
||||
from fsfwgen.utility.sql_writer import SqlWriter
|
||||
from fsfwgen.utility.printer import PrettyPrinter
|
||||
|
||||
from definitions import BSP_HOSTED, DATABASE_NAME, ROOT_DIR, OBSW_ROOT_DIR, EXAMPLE_COMMON_DIR
|
||||
from definitions import (
|
||||
BSP_HOSTED,
|
||||
DATABASE_NAME,
|
||||
ROOT_DIR,
|
||||
OBSW_ROOT_DIR,
|
||||
EXAMPLE_COMMON_DIR,
|
||||
)
|
||||
|
||||
LOGGER = get_console_logger()
|
||||
EXPORT_TO_FILE = True
|
||||
|
@ -28,7 +28,13 @@ from fsfwgen.utility.printer import Printer
|
||||
|
||||
SUBSERVICE_DEFINITION_DESTINATION = ["../../mission/", "../../fsfw/pus/"]
|
||||
SUBSERVICE_CSV_NAME = "mib_subservices.csv"
|
||||
SUBSERVICE_COLUMN_HEADER = ["Service", "Subservice Name", "Subservice Number", "Type", "Comment"]
|
||||
SUBSERVICE_COLUMN_HEADER = [
|
||||
"Service",
|
||||
"Subservice Name",
|
||||
"Subservice Number",
|
||||
"Type",
|
||||
"Comment",
|
||||
]
|
||||
|
||||
SQL_DELETE_SUBSVC_CMD = """
|
||||
DROP TABLE IF EXISTS Subservice;
|
||||
@ -55,6 +61,7 @@ class SubserviceColumns(Enum):
|
||||
"""
|
||||
Specifies order of MIB columns
|
||||
"""
|
||||
|
||||
SERVICE = 0
|
||||
NAME = 1
|
||||
NUMBER = 2
|
||||
@ -71,12 +78,16 @@ def main():
|
||||
:return:
|
||||
"""
|
||||
header_parser = FileListParser(SUBSERVICE_DEFINITION_DESTINATION)
|
||||
header_file_list = header_parser.parse_header_files(False, "Parsing subservice header files: ")
|
||||
header_file_list = header_parser.parse_header_files(
|
||||
False, "Parsing subservice header files: "
|
||||
)
|
||||
packet_subservice_parser = SubserviceParser(header_file_list)
|
||||
subservice_table = packet_subservice_parser.parse_files()
|
||||
Printer.print_content(subservice_table, "Printing subservice table:")
|
||||
print("Found " + str(len(subservice_table)) + " subservice entries.")
|
||||
subservice_writer = CsvWriter(SUBSERVICE_CSV_NAME, subservice_table, SUBSERVICE_COLUMN_HEADER)
|
||||
subservice_writer = CsvWriter(
|
||||
SUBSERVICE_CSV_NAME, subservice_table, SUBSERVICE_COLUMN_HEADER
|
||||
)
|
||||
subservice_writer.write_to_csv()
|
||||
subservice_writer.move_csv("..")
|
||||
|
||||
@ -90,6 +101,7 @@ class SubserviceParser(FileParser):
|
||||
"""
|
||||
This parser class can parse the subservice definitions.
|
||||
"""
|
||||
|
||||
def __init__(self, file_list: list):
|
||||
super().__init__(file_list)
|
||||
# Column System allows reshuffling of table columns in constructor
|
||||
@ -111,7 +123,7 @@ class SubserviceParser(FileParser):
|
||||
self_print_parsing_info = args[0]
|
||||
|
||||
# Read service from file name
|
||||
service_match = re.search('Service[^0-9]*([0-9]{1,3})', file_name)
|
||||
service_match = re.search("Service[^0-9]*([0-9]{1,3})", file_name)
|
||||
if service_match:
|
||||
self.dict_entry_list[Clmns.SERVICE.value] = service_match.group(1)
|
||||
self.dict_entry_list[Clmns.NAME.value] = " "
|
||||
@ -129,7 +141,7 @@ class SubserviceParser(FileParser):
|
||||
:return:
|
||||
"""
|
||||
# Case insensitive matching
|
||||
enum_match = re.search(r'[\s]*enum[\s]*Subservice([^\n]*)', line, re.IGNORECASE)
|
||||
enum_match = re.search(r"[\s]*enum[\s]*Subservice([^\n]*)", line, re.IGNORECASE)
|
||||
if enum_match:
|
||||
self.subservice_enum_found = True
|
||||
if self.subservice_enum_found:
|
||||
@ -146,7 +158,7 @@ class SubserviceParser(FileParser):
|
||||
subservice_match = self.__scan_subservices(line)
|
||||
if subservice_match:
|
||||
self.index = self.index + 1
|
||||
dict_entry_tuple = tuple(self.dict_entry_list[:self.clmns_len])
|
||||
dict_entry_tuple = tuple(self.dict_entry_list[: self.clmns_len])
|
||||
self.mib_table.update({self.index: dict_entry_tuple})
|
||||
self.__clear_tuple()
|
||||
|
||||
@ -158,8 +170,9 @@ class SubserviceParser(FileParser):
|
||||
self.possible_match_on_next_lines = False
|
||||
|
||||
def __scan_for_export_command(self, line: str) -> bool:
|
||||
command_string = re.search(r"([^\[]*)\[export\][: ]*\[([\w]*)\][\s]*([^\n]*)",
|
||||
line, re.IGNORECASE)
|
||||
command_string = re.search(
|
||||
r"([^\[]*)\[export\][: ]*\[([\w]*)\][\s]*([^\n]*)", line, re.IGNORECASE
|
||||
)
|
||||
if command_string:
|
||||
# Check whether there is a separated export command
|
||||
# (export command is not on same line as subservice definition)
|
||||
@ -179,9 +192,9 @@ class SubserviceParser(FileParser):
|
||||
Strip whitespaces and comment symbols and add to comment buffer.
|
||||
"""
|
||||
possible_multiline_comment = line.lstrip()
|
||||
possible_multiline_comment = possible_multiline_comment.lstrip('/')
|
||||
possible_multiline_comment = possible_multiline_comment.lstrip('<')
|
||||
possible_multiline_comment = possible_multiline_comment.lstrip('!')
|
||||
possible_multiline_comment = possible_multiline_comment.lstrip("/")
|
||||
possible_multiline_comment = possible_multiline_comment.lstrip("<")
|
||||
possible_multiline_comment = possible_multiline_comment.lstrip("!")
|
||||
possible_multiline_comment = possible_multiline_comment.rstrip()
|
||||
if len(possible_multiline_comment) > 0:
|
||||
self.dict_entry_list[Clmns.COMMENT.value] += possible_multiline_comment
|
||||
@ -192,8 +205,9 @@ class SubserviceParser(FileParser):
|
||||
:param line:
|
||||
:return:
|
||||
"""
|
||||
subservice_match = \
|
||||
re.search(r"[\s]*([\w]*)[\s]*=[\s]*([0-9]{1,3})(?:,)?(?:[ /!<>]*([^\n]*))?", line)
|
||||
subservice_match = re.search(
|
||||
r"[\s]*([\w]*)[\s]*=[\s]*([0-9]{1,3})(?:,)?(?:[ /!<>]*([^\n]*))?", line
|
||||
)
|
||||
if subservice_match:
|
||||
self.dict_entry_list[Clmns.NAME.value] = subservice_match.group(1)
|
||||
self.dict_entry_list[Clmns.NUMBER.value] = subservice_match.group(2)
|
||||
@ -204,7 +218,7 @@ class SubserviceParser(FileParser):
|
||||
return True
|
||||
# Check whether exporting was commanded on last lines
|
||||
return bool(self.possible_match_on_next_lines)
|
||||
if re.search(r'}[\s]*;', line):
|
||||
if re.search(r"}[\s]*;", line):
|
||||
self.subservice_enum_found = False
|
||||
return subservice_match
|
||||
|
||||
@ -217,19 +231,19 @@ class SubserviceParser(FileParser):
|
||||
return export_command_found
|
||||
|
||||
def __scan_for_type(self, string) -> bool:
|
||||
type_match = re.search(r'\[reply\]|\[tm\]', string, re.IGNORECASE)
|
||||
type_match = re.search(r"\[reply\]|\[tm\]", string, re.IGNORECASE)
|
||||
if type_match:
|
||||
self.dict_entry_list[Clmns.TYPE.value] = 'TM'
|
||||
self.dict_entry_list[Clmns.TYPE.value] = "TM"
|
||||
return True
|
||||
type_match = re.search(r'\[command\]|\[tc\]', string, re.IGNORECASE)
|
||||
type_match = re.search(r"\[command\]|\[tc\]", string, re.IGNORECASE)
|
||||
if type_match:
|
||||
self.dict_entry_list[Clmns.TYPE.value] = 'TC'
|
||||
self.dict_entry_list[Clmns.TYPE.value] = "TC"
|
||||
return True
|
||||
self.dict_entry_list[Clmns.TYPE.value] = 'Unspecified'
|
||||
self.dict_entry_list[Clmns.TYPE.value] = "Unspecified"
|
||||
return False
|
||||
|
||||
def __scan_for_comment(self, comment_string):
|
||||
comment_match = re.search(r':[\s]*\[[\w]*\][\s]*([^\n]*)', comment_string)
|
||||
comment_match = re.search(r":[\s]*\[[\w]*\][\s]*([^\n]*)", comment_string)
|
||||
if comment_match:
|
||||
self.dict_entry_list[Clmns.COMMENT.value] = comment_match.group(1)
|
||||
|
||||
|
@ -15,4 +15,4 @@ printToConsole = True
|
||||
exportToCSV = True
|
||||
doCopyFile = False
|
||||
copyDestination = "."
|
||||
fileSeparator = ';'
|
||||
fileSeparator = ";"
|
||||
|
Loading…
x
Reference in New Issue
Block a user