added generator files

This commit is contained in:
Robin Müller 2021-06-21 10:36:53 +02:00
parent cda061255a
commit 9229c4d98b
No known key found for this signature in database
GPG Key ID: FC76078F520434A5
21 changed files with 2004 additions and 1 deletions

18
generators/definitions.py Normal file
View File

@ -0,0 +1,18 @@
import enum
DATABASE_NAME = "fsfw_mod.db"
class BspSelect(enum.Enum):
BSP_HOSTED = enum.auto()
BSP_LINUX = enum.auto()
BSP_STM32_FREERTOS = enum.auto()
BSP_STM32_RTEMS = enum.auto()
BspFolderDict = {
BspSelect.BSP_HOSTED.value: "bsp_hosted",
BspSelect.BSP_LINUX.value: "bsp_linux",
BspSelect.BSP_STM32_FREERTOS.value: "bsp_stm32_freertos",
BspSelect.BSP_STM32_RTEMS.value: "bsp_stm32_rtems",
}

View File

View File

@ -0,0 +1,385 @@
#!/usr/bin/python3.7
"""
@file device_command_parser.py
@brief Parses the device commands which are used for the PUS Service 8 as the primary means
of satellite commanding.
@details Used by the MIB Exporter, inherits generic File Parser.
Also has information parser which parses the possible device handler command values
from the actual device handlers.
@author R. Mueller
"""
import re
from enum import Enum
from fsfwgen.parserbase.file_list_parser import FileListParser
from fsfwgen.parserbase.parser import FileParser
from fsfwgen.utility.csv_writer import CsvWriter
from fsfwgen.utility.printer import Printer
DH_COMMAND_PACKET_DEFINITION_DESTINATION = "../../mission/devices/devicepackets/"
DH_DEFINITION_DESTINATION = "../../mission/devices/"
DH_COMMANDS_CSV_NAME = "mib_device_commands.csv"
DH_COMMAND_HEADER_COLUMNS = [
"Device Handler", "Command Name", "Action ID", "Command Field Name", "Command Field Position",
"Command Field Type", "Command Field Option Name", "Command Field Option Value", "Comment"]
SQL_DELETE_CMDTABLE_CMD = """
DROP TABLE IF EXISTS DeviceHandlerCommand;
"""
SQL_CREATE_CMDTABLE_CMD = """
CREATE TABLE IF NOT EXISTS DeviceHandlerCommand(
id INTEGER PRIMARY KEY,
deviceHandler TEXT,
commandName TEXT,
actionID INTEGER,
cmdFieldName TEXT,
cmdFieldPos INTEGER,
cmdFieldType TEXT,
cmdFieldOptName TEXT,
cmdFieldOptVal INTEGER,
comment COMMENT
)
"""
SQL_INSERT_INTO_CMDTABLE_CMD = """
INSERT INTO DeviceHandlerCommand(deviceHandler,commandName,actionID,cmdFieldName,cmdFieldPos,
cmdFieldType,cmdFieldOptName,cmdFieldOptVal,comment)
VALUES(?,?,?,?,?,?,?,?,?)
"""
class DeviceCommandColumns(Enum):
"""
Specifies order of MIB columns
"""
DH_NAME = 0
NAME = 1
ACTION_ID = 2
COMMAND_FIELD_NAME = 3
COMMAND_INDEX = 4
TYPE = 5
COMMAND_FIELD_OPTION_NAME = 6
COMMAND_FIELD_OPTION_VALUE = 7
COMMAND_FIELD_COMMENT = 8
Clmns = DeviceCommandColumns
def main():
"""
The main routine is run if the device command parser is run separately.
:return:
"""
info_header_file_parser = FileListParser(DH_DEFINITION_DESTINATION)
info_header_file_list = info_header_file_parser.\
parse_header_files(False, "Parsing device handler informations:")
dh_information_parser = DeviceHandlerInformationParser(info_header_file_list)
dh_information_table = dh_information_parser.parse_files()
Printer.print_content(dh_information_table, "Priting device handler command information table: ")
header_file_parser = FileListParser(DH_COMMAND_PACKET_DEFINITION_DESTINATION)
header_file_list = \
header_file_parser.parse_header_files(False, "Parsing device handler command files:")
packet_subservice_parser = DeviceHandlerCommandParser(header_file_list, dh_information_table)
dh_command_table = packet_subservice_parser.parse_files()
Printer.print_content(dh_command_table, "Printing device handler command table:")
dh_command_writer = CsvWriter(DH_COMMANDS_CSV_NAME, dh_command_table, DH_COMMAND_HEADER_COLUMNS)
dh_command_writer.write_to_csv()
dh_command_writer.copy_csv()
dh_command_writer.move_csv("..")
# pylint: disable=too-few-public-methods
class DeviceHandlerInformationParser(FileParser):
"""
This helper class parses device handler informations based on the device handler
header files. These can be used to map commands to the device handler packets later.
"""
def __init__(self, fileList):
super().__init__(fileList)
self.command_dict = dict()
self.command_enum_dict = dict()
self.command_enum_name = ""
self.command_value_name_list = []
self.command_value_list = []
self.command_comment_list = []
# this table includes the current new table entry, which will be updated
# for target parameter
self.command_scanning_pending = False
# This is called for every file. Fill out info table in this routine
def _handle_file_parsing(self, file_name, *args):
self_print_parsing_info = False
if len(args) == 1 and isinstance(args[0], bool):
self_print_parsing_info = args[0]
# Read device name from file name
handler_match = re.search(r'([\w]*).h', file_name)
if not handler_match:
print("Device Command Parser: Configuration error, no handler name match !")
handler_name = handler_match.group(1)
file = open(file_name, "r")
if self_print_parsing_info:
print("Parsing " + file_name + " ...")
# Scans each line for possible device handler command enums
for line in file.readlines():
self.__handle_line_reading(line)
handler_tuple = (self.command_dict, self.command_enum_dict)
handler_dict = dict()
handler_dict.update({handler_name: handler_tuple})
self.mib_table.update(handler_dict)
self.command_dict = dict()
self.command_enum_dict = dict()
def __handle_line_reading(self, line):
"""
Searches for enum command definitions or device command definitions.
:param line:
:return:
"""
# Case insensitive matching of device command enums
enum_match = re.search(r'[\s]*enum[\s]*([\w]*)[\s]*{[\s][/!<>]*[\s]*'
r'\[EXPORT[\w]*\][\s]*:[\s]*\[ENUM\]([^\n]*)', line, re.IGNORECASE)
if enum_match:
self.command_enum_name = enum_match.group(1)
self.command_scanning_pending = True
else:
self.__handle_command_definition_scanning(line)
# while command scanning is pending, each line in enum needs to be parsed
if self.command_scanning_pending:
self.__handle_command_enum_scanning(line)
def __handle_command_definition_scanning(self, line):
command_match = \
re.search(r'[\s]*static[\s]*const[\s]*DeviceCommandId_t[\s]*([\w]*)[\s]*=[\s]*'
r'([\w]*)[\s]*;[\s]*[/!<>]*[\s]*\[EXPORT\][\s]*:[\s]*\[COMMAND\]', line)
if command_match:
command_name = command_match.group(1)
command_id = command_match.group(2)
self.command_dict.update({command_name: command_id})
def __handle_command_enum_scanning(self, line):
self.__scan_command_entries(line)
if not self.command_scanning_pending:
# scanning enum finished
# stores current command into command dictionary with command name as unique key
command_tuple = self.command_value_name_list, self.command_value_list, \
self.command_comment_list
self.command_enum_dict.update({self.command_enum_name: command_tuple})
self.command_enum_name = ""
self.command_value_name_list = []
self.command_value_list = []
self.command_comment_list = []
def __scan_command_entries(self, line):
command_match = \
re.search(r'[\s]*([\w]*)[\s]*=[\s]*([0-9]{1,3})[^/][\s]*[/!<>]*[\s]*([^\n]*)', line)
if command_match:
self.command_value_name_list.append(command_match.group(1))
self.command_value_list.append(command_match.group(2))
self.command_comment_list.append(command_match.group(3))
elif re.search(r'}[\s]*;', line):
self.command_scanning_pending = False
def _post_parsing_operation(self):
pass
class PendingScanType(Enum):
"""
Specifies which scan type is performed in the device command parser.
"""
NO_SCANNING = 0
STRUCT_SCAN = 1
CLASS_SCAN = 2
# pylint: disable=too-many-instance-attributes
class DeviceHandlerCommandParser(FileParser):
"""
This is the actual device handler command parser. It will parse the device handler
packet definitions. A device handler info table must be passed which can be acquired
by running the DH information parser.
"""
def __init__(self, file_list, dh_information_table):
super().__init__(file_list)
# this table includes the current new table entry,
# which will be updated for target parameter
self.dict_entry_list = list(range(Clmns.__len__()))
# This table containts information about respective device handler command options
self.dh_information_table = dh_information_table
self.enum_dict = dict()
self.current_enum_name = ""
self.comment = ""
self.command_comment = ""
self.command_index = 0
self.scanning_pending = PendingScanType.NO_SCANNING.value
# This is called for every file, fill out mib_table
def _handle_file_parsing(self, file_name, *args):
self_print_parsing_info = False
if len(args) == 1 and isinstance(args[0], bool):
self_print_parsing_info = args[0]
file = open(file_name, "r")
if self_print_parsing_info:
print("Parsing " + file_name + " ...")
# Scans each line for possible device handler command enums
for line in file.readlines():
self.__handle_line_reading(line)
def __handle_line_reading(self, line: str):
"""
Search for struct command definition
:param line:
:return:
"""
self.__scan_for_commands(line)
# if self.struct_scanning_pending:
def __scan_for_commands(self, line):
# Search for struct command definition
struct_found = self.__scan_for_structs(line)
if not struct_found:
self.__scan_for_class(line)
if self.scanning_pending is not PendingScanType.NO_SCANNING.value:
self.__scan_command(line)
def __scan_for_structs(self, line):
struct_match = re.search(r'[\s]*struct[\s]*([\w]*)[\s]*{[\s]*[/!<>]*[\s]*'
r'\[EXPORT\][ :]*\[COMMAND\]'
r'[\s]*([\w]*)[ :]*([\w]*)', line)
if struct_match:
# Scan a found command struct
self.__start_class_or_struct_scanning(struct_match)
self.scanning_pending = PendingScanType.STRUCT_SCAN.value
return struct_match
def __scan_for_class(self, line):
# search for class command definition
class_match = re.search(r'[\s]*class[\s]*([\w]*)[\s]*[^{]*{[ /!<>]*\[EXPORT\][ :]*'
r'\[COMMAND\][\s]*([\w]*)[ :]*([\w]*)', line)
if class_match:
self.__start_class_or_struct_scanning(class_match)
self.scanning_pending = PendingScanType.CLASS_SCAN.value
def __start_class_or_struct_scanning(self, command_match):
"""
Stores and assigns values that are the same for each command field option
:param command_match:
:return:
"""
handler_name = command_match.group(2)
self.dict_entry_list[Clmns.DH_NAME.value] = handler_name
self.dict_entry_list[Clmns.NAME.value] = command_match.group(1)
command_name = command_match.group(3)
if handler_name in self.dh_information_table:
(command_id_dict, self.enum_dict) = self.dh_information_table[handler_name]
if command_name in command_id_dict:
self.dict_entry_list[Clmns.ACTION_ID.value] = command_id_dict[command_name]
def __scan_command(self, line):
datatype_match = False
if self.scanning_pending is PendingScanType.STRUCT_SCAN.value:
datatype_match = \
re.search(r'[\s]*(uint[0-9]{1,2}_t|float|double|bool|int|char)[\s]*([\w]*);'
r'(?:[\s]*[/!<>]*[\s]*\[EXPORT\][: ]*(.*))?', line)
elif self.scanning_pending is PendingScanType.CLASS_SCAN.value:
datatype_match = re.search(
r'[\s]*SerializeElement[\s]*<(uint[0-9]{1,2}_t|float|double|bool|int|char)[ >]*'
r'([\w]*);(?:[ /!<>]*\[EXPORT\][: ]*(.*))?', line)
if datatype_match:
self.__handle_datatype_match(datatype_match)
elif re.search(r'}[\s]*;', line):
self.scanning_pending = PendingScanType.NO_SCANNING.value
self.command_index = 0
def __handle_datatype_match(self, datatype_match):
self.dict_entry_list[Clmns.TYPE.value] = datatype_match.group(1)
self.dict_entry_list[Clmns.COMMAND_FIELD_NAME.value] = datatype_match.group(2)
size_of_enum = 0
if datatype_match.group(3) is not None:
self.__analyse_exporter_sequence(datatype_match.group(3))
if self.current_enum_name != "":
size_of_enum = self.__get_enum_size()
self.__update_device_command_dict(size_of_enum)
def __analyse_exporter_sequence(self, exporter_sequence):
# This matches the exporter sequence pairs e.g. [ENUM] BLA [COMMENT] BLABLA [...] ...
export_string_matches = re.search(r'(?:\[([\w]*)\][\s]*([^\[]*))?', exporter_sequence)
if export_string_matches:
if len(export_string_matches.groups()) % 2 != 0:
print("Device Command Parser: Error when analysing exporter sequence,"
" check exporter string format")
else:
count = 0
while count < len(export_string_matches.groups()):
sequence_type = export_string_matches.group(count + 1)
sequence_entry = export_string_matches.group(count + 2)
count = count + 2
self.__handle_sequence_pair(sequence_type, sequence_entry)
def __handle_sequence_pair(self, sequence_type, sequence_entry):
if sequence_type.casefold() == "enum":
self.current_enum_name = sequence_entry
elif sequence_type.casefold() == "comment":
self.command_comment = sequence_entry
def __get_enum_size(self) -> int:
if self.current_enum_name in self.enum_dict:
size_of_enum = len(self.enum_dict[self.current_enum_name][1])
return size_of_enum
return 0
def __update_device_command_dict(self, size_of_enum: int = 0):
if size_of_enum > 0:
enum_tuple = self.enum_dict[self.current_enum_name]
for count in range(0, size_of_enum):
self.__update_table_with_command_options(count, enum_tuple)
self.command_index = \
self.command_index + 1
else:
self.__update_table_with_no_command_options()
self.index = self.index + 1
self.current_enum_name = ""
def __update_table_with_command_options(self, count, enum_tuple):
enum_value_name_list, enum_value_list, enum_comment_list = enum_tuple
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_NAME.value] = \
enum_value_name_list[count]
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_VALUE.value] = enum_value_list[count]
self.dict_entry_list[Clmns.COMMAND_FIELD_COMMENT.value] = enum_comment_list[count]
self.dict_entry_list[Clmns.COMMAND_INDEX.value] = \
self.command_index
dh_command_tuple = tuple(self.dict_entry_list)
self.index += 1
self.mib_table.update({self.index: dh_command_tuple})
def __update_table_with_no_command_options(self):
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_NAME.value] = ""
self.dict_entry_list[Clmns.COMMAND_FIELD_OPTION_VALUE.value] = ""
self.dict_entry_list[Clmns.COMMAND_FIELD_COMMENT.value] = self.command_comment
self.dict_entry_list[Clmns.COMMAND_INDEX.value] = \
self.command_index
dh_command_tuple = tuple(self.dict_entry_list)
self.mib_table.update({self.index: dh_command_tuple})
self.command_index += 1
def _post_parsing_operation(self):
pass
if __name__ == "__main__":
main()

View File

View File

@ -0,0 +1,94 @@
#! /usr/bin/python3
"""
@file event_parser.py
@brief Part of the Mission Information Base Exporter for the SOURCE project by KSat.
@details
Event exporter.
To use MySQLdb, run pip install mysqlclient or install in IDE.
On Windows, Build Tools installation might be necessary
@data 21.11.2019
"""
import datetime
from fsfwgen.events.event_parser import handle_csv_export, handle_cpp_export, SubsystemDefinitionParser, EventParser
from fsfwgen.parserbase.file_list_parser import FileListParser
from fsfwgen.utility.printer import PrettyPrinter
from fsfwgen.utility.file_management import copy_file, move_file
from definitions import BspSelect, BspFolderDict
# TODO: Ask from user or store in json file?
BSP_SELECT = BspSelect.BSP_LINUX.value
BSP_FOLDER = BspFolderDict[BSP_SELECT]
DATE_TODAY = datetime.datetime.now()
DATE_STRING_FULL = DATE_TODAY.strftime("%Y-%m-%d %H:%M:%S")
GENERATE_CPP = True
GENERATE_CPP_H = True
GENERATE_CSV = True
COPY_CPP_FILE = True
COPY_CPP_H_FILE = True
MOVE_CSV_FILE = True
PARSE_HOST_BSP = True
CSV_FILENAME = f"{BSP_FOLDER}_events.csv"
CSV_MOVE_DESTINATION = "../"
CPP_FILENAME = "translateEvents.cpp"
CPP_H_FILENAME = "translateEvents.h"
CPP_COPY_DESTINATION = f"../../{BSP_FOLDER}/fsfwconfig/events/"
FILE_SEPARATOR = ";"
SUBSYSTEM_DEFINITION_DESTINATIONS = [
f"../../{BSP_FOLDER}/fsfwconfig/events/subsystemIdRanges.h",
"../../fsfw/events/fwSubsystemIdRanges.h",
"../../common/config/commonSubsystemIds.h"
]
HEADER_DEFINITION_DESTINATIONS = ["../../mission/", "../../fsfw/", f"../../{BSP_FOLDER}", "../../test/"]
def main():
print("EventParser: Parsing events: ")
event_list = parse_events()
if GENERATE_CSV:
handle_csv_export(file_name=CSV_FILENAME, event_list=event_list, file_separator=FILE_SEPARATOR)
if MOVE_CSV_FILE:
move_file(file_name=CSV_FILENAME, destination=CSV_MOVE_DESTINATION)
if GENERATE_CPP:
handle_cpp_export(
event_list=event_list, date_string=DATE_STRING_FULL, file_name=CPP_FILENAME,
generate_header=GENERATE_CPP_H, header_file_name=CPP_H_FILENAME
)
if COPY_CPP_FILE:
print(f"EventParser: Copying file to {CPP_COPY_DESTINATION}")
copy_file(CPP_FILENAME, CPP_COPY_DESTINATION)
copy_file(CPP_H_FILENAME, CPP_COPY_DESTINATION)
print("")
def parse_events():
subsystem_parser = SubsystemDefinitionParser(SUBSYSTEM_DEFINITION_DESTINATIONS)
subsystem_table = subsystem_parser.parse_files()
print(f"Found {len(subsystem_table)} subsystem definitions.")
PrettyPrinter.pprint(subsystem_table)
event_header_parser = FileListParser(HEADER_DEFINITION_DESTINATIONS)
event_headers = event_header_parser.parse_header_files(
True, "Parsing event header file list:\n", True
)
# PrettyPrinter.pprint(event_headers)
# myEventList = parseHeaderFiles(subsystem_table, event_headers)
event_parser = EventParser(event_headers, subsystem_table)
event_parser.set_moving_window_mode(moving_window_size=7)
event_table = event_parser.parse_files()
list_items = sorted(event_table.items())
print(f"Found {len(list_items)} entries:")
PrettyPrinter.pprint(list_items)
return list_items
if __name__ == "__main__":
main()

View File

@ -0,0 +1,249 @@
/**
* @brief Auto-generated event translation file. Contains 78 translations.
* @details
* Generated on: 2021-05-28 18:21:48
*/
#include "translateEvents.h"
const char *STORE_SEND_WRITE_FAILED_STRING = "STORE_SEND_WRITE_FAILED";
const char *STORE_WRITE_FAILED_STRING = "STORE_WRITE_FAILED";
const char *STORE_SEND_READ_FAILED_STRING = "STORE_SEND_READ_FAILED";
const char *STORE_READ_FAILED_STRING = "STORE_READ_FAILED";
const char *UNEXPECTED_MSG_STRING = "UNEXPECTED_MSG";
const char *STORING_FAILED_STRING = "STORING_FAILED";
const char *TM_DUMP_FAILED_STRING = "TM_DUMP_FAILED";
const char *STORE_INIT_FAILED_STRING = "STORE_INIT_FAILED";
const char *STORE_INIT_EMPTY_STRING = "STORE_INIT_EMPTY";
const char *STORE_CONTENT_CORRUPTED_STRING = "STORE_CONTENT_CORRUPTED";
const char *STORE_INITIALIZE_STRING = "STORE_INITIALIZE";
const char *INIT_DONE_STRING = "INIT_DONE";
const char *DUMP_FINISHED_STRING = "DUMP_FINISHED";
const char *DELETION_FINISHED_STRING = "DELETION_FINISHED";
const char *DELETION_FAILED_STRING = "DELETION_FAILED";
const char *AUTO_CATALOGS_SENDING_FAILED_STRING = "AUTO_CATALOGS_SENDING_FAILED";
const char *GET_DATA_FAILED_STRING = "GET_DATA_FAILED";
const char *STORE_DATA_FAILED_STRING = "STORE_DATA_FAILED";
const char *DEVICE_BUILDING_COMMAND_FAILED_STRING = "DEVICE_BUILDING_COMMAND_FAILED";
const char *DEVICE_SENDING_COMMAND_FAILED_STRING = "DEVICE_SENDING_COMMAND_FAILED";
const char *DEVICE_REQUESTING_REPLY_FAILED_STRING = "DEVICE_REQUESTING_REPLY_FAILED";
const char *DEVICE_READING_REPLY_FAILED_STRING = "DEVICE_READING_REPLY_FAILED";
const char *DEVICE_INTERPRETING_REPLY_FAILED_STRING = "DEVICE_INTERPRETING_REPLY_FAILED";
const char *DEVICE_MISSED_REPLY_STRING = "DEVICE_MISSED_REPLY";
const char *DEVICE_UNKNOWN_REPLY_STRING = "DEVICE_UNKNOWN_REPLY";
const char *DEVICE_UNREQUESTED_REPLY_STRING = "DEVICE_UNREQUESTED_REPLY";
const char *INVALID_DEVICE_COMMAND_STRING = "INVALID_DEVICE_COMMAND";
const char *MONITORING_LIMIT_EXCEEDED_STRING = "MONITORING_LIMIT_EXCEEDED";
const char *MONITORING_AMBIGUOUS_STRING = "MONITORING_AMBIGUOUS";
const char *FUSE_CURRENT_HIGH_STRING = "FUSE_CURRENT_HIGH";
const char *FUSE_WENT_OFF_STRING = "FUSE_WENT_OFF";
const char *POWER_ABOVE_HIGH_LIMIT_STRING = "POWER_ABOVE_HIGH_LIMIT";
const char *POWER_BELOW_LOW_LIMIT_STRING = "POWER_BELOW_LOW_LIMIT";
const char *SWITCH_WENT_OFF_STRING = "SWITCH_WENT_OFF";
const char *HEATER_ON_STRING = "HEATER_ON";
const char *HEATER_OFF_STRING = "HEATER_OFF";
const char *HEATER_TIMEOUT_STRING = "HEATER_TIMEOUT";
const char *HEATER_STAYED_ON_STRING = "HEATER_STAYED_ON";
const char *HEATER_STAYED_OFF_STRING = "HEATER_STAYED_OFF";
const char *TEMP_SENSOR_HIGH_STRING = "TEMP_SENSOR_HIGH";
const char *TEMP_SENSOR_LOW_STRING = "TEMP_SENSOR_LOW";
const char *TEMP_SENSOR_GRADIENT_STRING = "TEMP_SENSOR_GRADIENT";
const char *COMPONENT_TEMP_LOW_STRING = "COMPONENT_TEMP_LOW";
const char *COMPONENT_TEMP_HIGH_STRING = "COMPONENT_TEMP_HIGH";
const char *COMPONENT_TEMP_OOL_LOW_STRING = "COMPONENT_TEMP_OOL_LOW";
const char *COMPONENT_TEMP_OOL_HIGH_STRING = "COMPONENT_TEMP_OOL_HIGH";
const char *TEMP_NOT_IN_OP_RANGE_STRING = "TEMP_NOT_IN_OP_RANGE";
const char *FDIR_CHANGED_STATE_STRING = "FDIR_CHANGED_STATE";
const char *FDIR_STARTS_RECOVERY_STRING = "FDIR_STARTS_RECOVERY";
const char *FDIR_TURNS_OFF_DEVICE_STRING = "FDIR_TURNS_OFF_DEVICE";
const char *MONITOR_CHANGED_STATE_STRING = "MONITOR_CHANGED_STATE";
const char *VALUE_BELOW_LOW_LIMIT_STRING = "VALUE_BELOW_LOW_LIMIT";
const char *VALUE_ABOVE_HIGH_LIMIT_STRING = "VALUE_ABOVE_HIGH_LIMIT";
const char *VALUE_OUT_OF_RANGE_STRING = "VALUE_OUT_OF_RANGE";
const char *SWITCHING_TM_FAILED_STRING = "SWITCHING_TM_FAILED";
const char *CHANGING_MODE_STRING = "CHANGING_MODE";
const char *MODE_INFO_STRING = "MODE_INFO";
const char *FALLBACK_FAILED_STRING = "FALLBACK_FAILED";
const char *MODE_TRANSITION_FAILED_STRING = "MODE_TRANSITION_FAILED";
const char *CANT_KEEP_MODE_STRING = "CANT_KEEP_MODE";
const char *OBJECT_IN_INVALID_MODE_STRING = "OBJECT_IN_INVALID_MODE";
const char *FORCING_MODE_STRING = "FORCING_MODE";
const char *MODE_CMD_REJECTED_STRING = "MODE_CMD_REJECTED";
const char *HEALTH_INFO_STRING = "HEALTH_INFO";
const char *CHILD_CHANGED_HEALTH_STRING = "CHILD_CHANGED_HEALTH";
const char *CHILD_PROBLEMS_STRING = "CHILD_PROBLEMS";
const char *OVERWRITING_HEALTH_STRING = "OVERWRITING_HEALTH";
const char *TRYING_RECOVERY_STRING = "TRYING_RECOVERY";
const char *RECOVERY_STEP_STRING = "RECOVERY_STEP";
const char *RECOVERY_DONE_STRING = "RECOVERY_DONE";
const char *RF_AVAILABLE_STRING = "RF_AVAILABLE";
const char *RF_LOST_STRING = "RF_LOST";
const char *BIT_LOCK_STRING = "BIT_LOCK";
const char *BIT_LOCK_LOST_STRING = "BIT_LOCK_LOST";
const char *FRAME_PROCESSING_FAILED_STRING = "FRAME_PROCESSING_FAILED";
const char *CLOCK_SET_STRING = "CLOCK_SET";
const char *CLOCK_SET_FAILURE_STRING = "CLOCK_SET_FAILURE";
const char *TEST_STRING = "TEST";
const char * translateEvents(Event event) {
switch( (event & 0xffff) ) {
case(2200):
return STORE_SEND_WRITE_FAILED_STRING;
case(2201):
return STORE_WRITE_FAILED_STRING;
case(2202):
return STORE_SEND_READ_FAILED_STRING;
case(2203):
return STORE_READ_FAILED_STRING;
case(2204):
return UNEXPECTED_MSG_STRING;
case(2205):
return STORING_FAILED_STRING;
case(2206):
return TM_DUMP_FAILED_STRING;
case(2207):
return STORE_INIT_FAILED_STRING;
case(2208):
return STORE_INIT_EMPTY_STRING;
case(2209):
return STORE_CONTENT_CORRUPTED_STRING;
case(2210):
return STORE_INITIALIZE_STRING;
case(2211):
return INIT_DONE_STRING;
case(2212):
return DUMP_FINISHED_STRING;
case(2213):
return DELETION_FINISHED_STRING;
case(2214):
return DELETION_FAILED_STRING;
case(2215):
return AUTO_CATALOGS_SENDING_FAILED_STRING;
case(2600):
return GET_DATA_FAILED_STRING;
case(2601):
return STORE_DATA_FAILED_STRING;
case(2800):
return DEVICE_BUILDING_COMMAND_FAILED_STRING;
case(2801):
return DEVICE_SENDING_COMMAND_FAILED_STRING;
case(2802):
return DEVICE_REQUESTING_REPLY_FAILED_STRING;
case(2803):
return DEVICE_READING_REPLY_FAILED_STRING;
case(2804):
return DEVICE_INTERPRETING_REPLY_FAILED_STRING;
case(2805):
return DEVICE_MISSED_REPLY_STRING;
case(2806):
return DEVICE_UNKNOWN_REPLY_STRING;
case(2807):
return DEVICE_UNREQUESTED_REPLY_STRING;
case(2808):
return INVALID_DEVICE_COMMAND_STRING;
case(2809):
return MONITORING_LIMIT_EXCEEDED_STRING;
case(2810):
return MONITORING_AMBIGUOUS_STRING;
case(4201):
return FUSE_CURRENT_HIGH_STRING;
case(4202):
return FUSE_WENT_OFF_STRING;
case(4204):
return POWER_ABOVE_HIGH_LIMIT_STRING;
case(4205):
return POWER_BELOW_LOW_LIMIT_STRING;
case(4300):
return SWITCH_WENT_OFF_STRING;
case(5000):
return HEATER_ON_STRING;
case(5001):
return HEATER_OFF_STRING;
case(5002):
return HEATER_TIMEOUT_STRING;
case(5003):
return HEATER_STAYED_ON_STRING;
case(5004):
return HEATER_STAYED_OFF_STRING;
case(5200):
return TEMP_SENSOR_HIGH_STRING;
case(5201):
return TEMP_SENSOR_LOW_STRING;
case(5202):
return TEMP_SENSOR_GRADIENT_STRING;
case(5901):
return COMPONENT_TEMP_LOW_STRING;
case(5902):
return COMPONENT_TEMP_HIGH_STRING;
case(5903):
return COMPONENT_TEMP_OOL_LOW_STRING;
case(5904):
return COMPONENT_TEMP_OOL_HIGH_STRING;
case(5905):
return TEMP_NOT_IN_OP_RANGE_STRING;
case(7101):
return FDIR_CHANGED_STATE_STRING;
case(7102):
return FDIR_STARTS_RECOVERY_STRING;
case(7103):
return FDIR_TURNS_OFF_DEVICE_STRING;
case(7201):
return MONITOR_CHANGED_STATE_STRING;
case(7202):
return VALUE_BELOW_LOW_LIMIT_STRING;
case(7203):
return VALUE_ABOVE_HIGH_LIMIT_STRING;
case(7204):
return VALUE_OUT_OF_RANGE_STRING;
case(7301):
return SWITCHING_TM_FAILED_STRING;
case(7400):
return CHANGING_MODE_STRING;
case(7401):
return MODE_INFO_STRING;
case(7402):
return FALLBACK_FAILED_STRING;
case(7403):
return MODE_TRANSITION_FAILED_STRING;
case(7404):
return CANT_KEEP_MODE_STRING;
case(7405):
return OBJECT_IN_INVALID_MODE_STRING;
case(7406):
return FORCING_MODE_STRING;
case(7407):
return MODE_CMD_REJECTED_STRING;
case(7506):
return HEALTH_INFO_STRING;
case(7507):
return CHILD_CHANGED_HEALTH_STRING;
case(7508):
return CHILD_PROBLEMS_STRING;
case(7509):
return OVERWRITING_HEALTH_STRING;
case(7510):
return TRYING_RECOVERY_STRING;
case(7511):
return RECOVERY_STEP_STRING;
case(7512):
return RECOVERY_DONE_STRING;
case(7900):
return RF_AVAILABLE_STRING;
case(7901):
return RF_LOST_STRING;
case(7902):
return BIT_LOCK_STRING;
case(7903):
return BIT_LOCK_LOST_STRING;
case(7905):
return FRAME_PROCESSING_FAILED_STRING;
case(8900):
return CLOCK_SET_STRING;
case(8901):
return CLOCK_SET_FAILURE_STRING;
case(9700):
return TEST_STRING;
default:
return "UNKNOWN_EVENT";
}
return 0;
}

View File

@ -0,0 +1,8 @@
#ifndef FSFWCONFIG_EVENTS_TRANSLATEEVENTS_H_
#define FSFWCONFIG_EVENTS_TRANSLATEEVENTS_H_
#include <fsfw/events/Event.h>
const char * translateEvents(Event event);
#endif /* FSFWCONFIG_EVENTS_TRANSLATEEVENTS_H_ */

@ -1 +1 @@
Subproject commit fa507cc4f7b947cd9e4de30e9be6167ff1c15ec7
Subproject commit 78e890f947f55a9417d390fea8d9bd5684d11730

338
generators/mod_exporter.py Normal file
View File

@ -0,0 +1,338 @@
#! /usr/bin/python3.8
# -*- coding: utf-8 -*-
"""
@file mod_exporter.py
@brief Mission Information Base Exporter for the SOURCE project by KSat.
@details
Parses OBSW which is based on FSFW developed by the Institute of Space Systems (IRS) Stuttgart.
Python 3.8 required
This exporter generates the MIB from the SOURCE On-Board Software directly
by using file parser implementations
This exporter has the following capabilities :
1. Export MIB tables CSV files
2. Export MIB tables into a SQL database
This exporter currently has parser for following data:
1. Objects
2. Returnvalues
3. Packet content (Telemetry/Telecommands)
4. Events
5. Subservices
6. Device Commands
7. Global datapool
@developers
Basic Instructions to implement new parserbase:
This parser uses a generic parser class. A specific parser implementation
can be built by implementing the generic parser class.
The parser generally takes a list with all files to parse and a dictionary
with the structure of the MiB table.
This website can be used to experiment with regular expressions: https://regex101.com/
TODO:
1. Maybe make this file object oriented too.
"""
import os
import pprint
from utility.mib_csv_writer import CsvWriter
from utility.mib_printer import Printer, PrettyPrinter
from utility.mib_sql_writer import SqlWriter
from utility import mib_globals as g
from parserbase.mib_file_list_parser import FileListParser
from packetcontent.mib_packet_content_parser import (
PacketContentParser,
PACKET_CONTENT_DEFINITION_DESTINATION,
PACKET_CONTENT_CSV_NAME,
PACKET_CONTENT_HEADER_COLUMN,
SQL_CREATE_PACKET_DATA_CONTENT_CMD,
SQL_INSERT_PACKET_DATA_CMD,
SQL_DELETE_PACKET_DATA_CONTENT_CMD
)
from subservice.mib_subservice_parser import (
SubserviceParser,
SUBSERVICE_DEFINITION_DESTINATION,
SUBSERVICE_CSV_NAME,
SUBSERVICE_COLUMN_HEADER,
SQL_CREATE_SUBSVC_CMD,
SQL_DELETE_SUBSVC_CMD,
SQL_INSERT_INTO_SUBSVC_CMD,
)
from devicecommands.device_command_parser import (
DeviceHandlerInformationParser,
DeviceHandlerCommandParser,
DH_COMMAND_PACKET_DEFINITION_DESTINATION,
DH_DEFINITION_DESTINATION,
DH_COMMANDS_CSV_NAME,
DH_COMMAND_HEADER_COLUMNS,
SQL_CREATE_CMDTABLE_CMD,
SQL_INSERT_INTO_CMDTABLE_CMD,
SQL_DELETE_CMDTABLE_CMD
)
from returnvalues.returnvalues_parser import (
InterfaceParser,
ReturnValueParser,
INTERFACE_DEFINITION_FILES,
RETURNVALUE_DESTINATIONS,
sql_retval_exporter,
CSV_RETVAL_FILENAME
)
from objects.objects import (
ObjectDefinitionParser,
OBJECTS_DEFINITIONS,
export_object_file,
CSV_OBJECT_FILENAME,
sql_object_exporter
)
DO_EXPORT_MIB = True
PRINT_TABLES_TO_CONSOLE = False
EXPORT_TO_CSV = True
EXPORT_TO_SQL = True
COPY_FILE = False
COPY_DESTINATION = "."
FILE_SEPARATOR = ";"
EXECUTE_SQL_COMMANDS = False
def main():
"""
Performs MIB generation.
"""
parse_mib()
def parse_mib():
"""
This is the core function. It builds parses all files,
builds all tables and returns them in a tuple.
The structure of respective tables is generated in a
separate functions and is easily modifiable:
:return:
"""
handle_subservices_generation()
print()
# handle_packet_content_generation()
# print()
# handle_device_handler_command_generation()
# print()
handle_returnvalue_generation()
print()
handle_objects_generation()
print()
handle_events_generation()
print()
def handle_subservices_generation():
print("MIB Exporter: Parsing subservices")
subservice_table = generate_subservice_table()
print("MIB Exporter: Found " + str(len(subservice_table)) + " subservice entries.")
if PRINT_TABLES_TO_CONSOLE:
print("MIB Exporter: Printing subservice table: ")
Printer.print_content(subservice_table)
if EXPORT_TO_CSV:
subservice_writer = CsvWriter(
SUBSERVICE_CSV_NAME, subservice_table, SUBSERVICE_COLUMN_HEADER
)
print("MIB Exporter: Exporting to file: " + SUBSERVICE_CSV_NAME)
subservice_writer.write_to_csv()
if EXPORT_TO_SQL:
print("MIB Exporter: Exporting subservices to SQL")
sql_writer = SqlWriter()
sql_writer.delete(SQL_DELETE_SUBSVC_CMD)
sql_writer.sql_writing_helper(
SQL_CREATE_SUBSVC_CMD, SQL_INSERT_INTO_SUBSVC_CMD, subservice_table
)
def generate_subservice_table():
""" Generate the subservice table. """
subservice_header_parser = FileListParser(
destination_corrected(SUBSERVICE_DEFINITION_DESTINATION)
)
subservice_header_list = subservice_header_parser.parse_header_files(
False, "MIB Exporter: Parsing subservice files: "
)
subservice_file_parser = SubserviceParser(subservice_header_list)
subservice_table = subservice_file_parser.parse_files()
return subservice_table
def handle_packet_content_generation():
print("MIB Exporter: Parsing packing content")
packet_content_table = generate_packet_content_table()
print("MIB Exporter: Found " + str(len(packet_content_table)) + " packet content entries.")
if PRINT_TABLES_TO_CONSOLE:
print("MIB Exporter: Print packet content table: ")
Printer.print_content(packet_content_table)
if EXPORT_TO_CSV:
packet_content_writer = CsvWriter(
PACKET_CONTENT_CSV_NAME, packet_content_table, PACKET_CONTENT_HEADER_COLUMN
)
print("MIB Exporter: Exporting to file " + PACKET_CONTENT_CSV_NAME)
packet_content_writer.write_to_csv()
if EXPORT_TO_SQL:
print("MIB Exporter: Exporting packet content to SQL")
sql_writer = SqlWriter()
sql_writer.sql_writing_helper(
SQL_CREATE_PACKET_DATA_CONTENT_CMD,
SQL_INSERT_PACKET_DATA_CMD,
packet_content_table,
SQL_DELETE_PACKET_DATA_CONTENT_CMD
)
def generate_packet_content_table():
""" Generate packet content table """
packet_data_header_parser = FileListParser(
destination_corrected(PACKET_CONTENT_DEFINITION_DESTINATION)
)
packet_data_header_list = packet_data_header_parser.parse_header_files(
False, "MIB Exporter: Parsing packet data files: "
)
packet_content_file_parser = PacketContentParser(packet_data_header_list)
packet_content_table = packet_content_file_parser.parse_files()
return packet_content_table
def handle_device_handler_command_generation():
print("MIB Exporter: Parsing device handler commands.")
dh_command_table = generate_device_command_table()
print("MIB Exporter: Found " + str(len(dh_command_table)) + " device handler command entries")
if PRINT_TABLES_TO_CONSOLE:
print("MIB Exporter: Printing device handler command table: ")
Printer.print_content(dh_command_table)
if EXPORT_TO_CSV:
device_command_writer = CsvWriter(
DH_COMMANDS_CSV_NAME, dh_command_table, DH_COMMAND_HEADER_COLUMNS
)
print("MIB Exporter: Exporting device handler commands to " + DH_COMMANDS_CSV_NAME)
device_command_writer.write_to_csv()
if EXPORT_TO_SQL:
print("MIB Exporter: Exporting device handler commands to SQL")
sql_writer = SqlWriter()
sql_writer.sql_writing_helper(
SQL_CREATE_CMDTABLE_CMD, SQL_INSERT_INTO_CMDTABLE_CMD, dh_command_table,
SQL_DELETE_CMDTABLE_CMD
)
def generate_device_command_table(print_info_table: bool = False):
""" Generate device command table """
info_header_file_parser = FileListParser(
destination_corrected(DH_DEFINITION_DESTINATION)
)
info_header_file_list = info_header_file_parser.parse_header_files(
False, "MIB Exporter: Parsing device handler informations: "
)
dh_information_parser = DeviceHandlerInformationParser(info_header_file_list)
dh_information_table = dh_information_parser.parse_files()
print("MIB Exporter: Found " + str(len(dh_information_table)) +
" device handler information entries.")
if print_info_table:
Printer.print_content(
dh_information_table, "MIB Exporter: Priting device handler command information table: "
)
header_file_parser = FileListParser(
destination_corrected(DH_COMMAND_PACKET_DEFINITION_DESTINATION)
)
header_file_list = header_file_parser.parse_header_files(
False, "MIB Exporter: Parsing device handler command files: "
)
packet_subservice_parser = DeviceHandlerCommandParser(
header_file_list, dh_information_table
)
dh_command_table = packet_subservice_parser.parse_files()
return dh_command_table
def handle_returnvalue_generation():
print("MIB Exporter: Parsing returnvalues")
returnvalue_table = generate_returnvalue_table()
print("MIB Exporter: Found " + str(len(returnvalue_table)) + " returnvalues.")
if PRINT_TABLES_TO_CONSOLE:
print("MIB Exporter: Printing returnvalue table: ")
Printer.print_content(returnvalue_table)
if EXPORT_TO_CSV:
print("MIB Exporter: Exporting returnvalues to " + CSV_RETVAL_FILENAME)
ReturnValueParser.export_to_file(CSV_RETVAL_FILENAME, returnvalue_table)
if EXPORT_TO_SQL:
print("MIB Exporter: Export returnvalues to SQL: ")
sql_retval_exporter(returnvalue_table)
def generate_returnvalue_table():
interface_parser = InterfaceParser(
destination_corrected(INTERFACE_DEFINITION_FILES), False
)
interfaces = interface_parser.parse_files()
print("MIB Exporter: Found interfaces : " + str(len(interfaces)))
header_parser = FileListParser(destination_corrected(RETURNVALUE_DESTINATIONS))
header_list = header_parser.parse_header_files(True, "MIB Exporter: Parsing header file list: ")
returnvalue_parser = ReturnValueParser(interfaces, header_list, False)
returnvalue_table = returnvalue_parser.parse_files(False)
if PRINT_TABLES_TO_CONSOLE:
Printer.print_content(returnvalue_table, "Returnvalue Table: ")
return returnvalue_table
def handle_objects_generation():
print("MIB Exporter: Parsing Objects")
object_parser = ObjectDefinitionParser(destination_corrected(OBJECTS_DEFINITIONS))
object_table = object_parser.parse_files()
object_list_sorted = sorted(object_table.items())
print("MIB Exporter: Found " + str(len(object_table)) + " entries")
if EXPORT_TO_CSV:
print("MIB Exporter: Exporting to file: " + CSV_OBJECT_FILENAME)
export_object_file(CSV_OBJECT_FILENAME, object_list_sorted)
if EXPORT_TO_SQL:
print("MIB Exporter: Exporting objects into SQL table")
sql_object_exporter(object_list_sorted)
def handle_events_generation():
pass
def destination_corrected(destination_string):
"""
If headers are parsed here instead of the respective subdirectories,
the destination files are located in a different relative destination
"""
if isinstance(destination_string, list):
destination_list = []
for destination in destination_string:
destination_list.append(destination[3:])
return destination_list
return destination_string[3:]
def handle_external_file_running():
"""
Generates the MIB parser from external files
TODO: Make this stuff OOP too. Retvals and objects were already refactored
"""
os.chdir("events")
os.system("python event_parser.py")
os.chdir("..")
print_string = "Exported to file: MIB_Events.csv\r\n"
return print_string
def update_globals():
""" Updates the global variables """
g.PP = pprint.PrettyPrinter(indent=0, width=250)
g.doExportMIB = DO_EXPORT_MIB
g.executeSQLcommands = False
g.printToConsole = PRINT_TABLES_TO_CONSOLE
g.exportToCSV = EXPORT_TO_CSV
g.copyFile = COPY_FILE
g.copyDestination = COPY_DESTINATION
g.fileSeparator = FILE_SEPARATOR
if __name__ == "__main__":
main()

View File

View File

@ -0,0 +1,109 @@
#! /usr/bin/env python3
"""
@file objects.py
@brief Part of the Mission Information Base Exporter for the SOURCE project by KSat.
@details
Object exporter.
To use MySQLdb, run pip install mysqlclient or install in IDE.
On Windows, Build Tools installation might be necessary
@data 21.11.2019
"""
import datetime
from fsfwgen.objects.objects import ObjectDefinitionParser, sql_object_exporter, write_translation_file, \
export_object_file, write_translation_header_file
from fsfwgen.utility.printer import PrettyPrinter
from fsfwgen.utility.file_management import copy_file, move_file
from definitions import DATABASE_NAME, BspSelect, BspFolderDict
DATE_TODAY = datetime.datetime.now()
DATE_STRING_FULL = DATE_TODAY.strftime("%Y-%m-%d %H:%M:%S")
GENERATE_CSV = True
MOVE_CSV = True
GENERATE_CPP = True
COPY_CPP = True
GENERATE_HEADER = True
PARSE_HOST_BSP = False
EXPORT_TO_SQL = True
BSP_SELECT = BspSelect.BSP_HOSTED.value
BSP_FOLDER = BspFolderDict[BSP_SELECT]
CPP_COPY_DESTINATION = f"../../{BSP_FOLDER}/fsfwconfig/objects/"
CSV_MOVE_DESTINATION = "../"
CPP_FILENAME = "translateObjects.cpp"
CPP_H_FILENAME = "translateObjects.h"
CSV_OBJECT_FILENAME = f"{BSP_FOLDER}_objects.csv"
FILE_SEPARATOR = ";"
OBJECTS_PATH = f"../../{BSP_FOLDER}/fsfwconfig/objects/systemObjectList.h"
FRAMEWORK_OBJECTS_PATH = "../../fsfw/objectmanager/frameworkObjects.h"
COMMON_OBJECTS_PATH = "../../common/config/commonSystemObjects.h"
OBJECTS_DEFINITIONS = [OBJECTS_PATH, FRAMEWORK_OBJECTS_PATH, COMMON_OBJECTS_PATH]
SQL_DELETE_OBJECTS_CMD = """
DROP TABLE IF EXISTS Objects
"""
SQL_CREATE_OBJECTS_CMD = """
CREATE TABLE IF NOT EXISTS Objects(
id INTEGER PRIMARY KEY,
objectid TEXT,
name TEXT
)
"""
SQL_INSERT_INTO_OBJECTS_CMD = """
INSERT INTO Objects(objectid, name)
VALUES(?,?)
"""
def main():
print("Parsing objects: ")
list_items = parse_objects()
handle_file_export(list_items)
if EXPORT_TO_SQL:
print("ObjectParser: Exporting to SQL")
sql_object_exporter(
object_table=list_items, delete_cmd=SQL_DELETE_OBJECTS_CMD, insert_cmd=SQL_INSERT_INTO_OBJECTS_CMD,
create_cmd=SQL_CREATE_OBJECTS_CMD, db_filename=f"../{DATABASE_NAME}"
)
def parse_objects():
# fetch objects
object_parser = ObjectDefinitionParser(OBJECTS_DEFINITIONS)
subsystem_definitions = object_parser.parse_files()
# id_subsystem_definitions.update(framework_subsystem_definitions)
list_items = sorted(subsystem_definitions.items())
PrettyPrinter.pprint(list_items)
print("ObjectParser: Number of objects: ", len(list_items))
return list_items
def handle_file_export(list_items):
if GENERATE_CPP:
print("ObjectParser: Generating translation C++ file.")
write_translation_file(filename=CPP_FILENAME, list_of_entries=list_items, date_string_full=DATE_STRING_FULL)
if COPY_CPP:
print("ObjectParser: Copying object file to " + CPP_COPY_DESTINATION)
copy_file(CPP_FILENAME, CPP_COPY_DESTINATION)
if GENERATE_HEADER:
write_translation_header_file(filename=CPP_H_FILENAME)
copy_file(filename=CPP_H_FILENAME, destination=CPP_COPY_DESTINATION)
if GENERATE_CSV:
print("ObjectParser: Generating text export.")
export_object_file(filename=CSV_OBJECT_FILENAME, object_list=list_items, file_separator=FILE_SEPARATOR)
if MOVE_CSV:
move_file(file_name=CSV_OBJECT_FILENAME, destination=CSV_MOVE_DESTINATION)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,127 @@
/**
* @brief Auto-generated object translation file.
* @details
* Contains 37 translations.
* Generated on: 2021-05-28 18:12:56
*/
#include "translateObjects.h"
const char *TEST_ASSEMBLY_STRING = "TEST_ASSEMBLY";
const char *TEST_CONTROLLER_STRING = "TEST_CONTROLLER";
const char *TEST_DEVICE_HANDLER_0_STRING = "TEST_DEVICE_HANDLER_0";
const char *TEST_DEVICE_HANDLER_1_STRING = "TEST_DEVICE_HANDLER_1";
const char *TEST_ECHO_COM_IF_STRING = "TEST_ECHO_COM_IF";
const char *FSFW_OBJECTS_START_STRING = "FSFW_OBJECTS_START";
const char *PUS_SERVICE_1_VERIFICATION_STRING = "PUS_SERVICE_1_VERIFICATION";
const char *PUS_SERVICE_2_DEVICE_ACCESS_STRING = "PUS_SERVICE_2_DEVICE_ACCESS";
const char *PUS_SERVICE_3_HOUSEKEEPING_STRING = "PUS_SERVICE_3_HOUSEKEEPING";
const char *PUS_SERVICE_5_EVENT_REPORTING_STRING = "PUS_SERVICE_5_EVENT_REPORTING";
const char *PUS_SERVICE_8_FUNCTION_MGMT_STRING = "PUS_SERVICE_8_FUNCTION_MGMT";
const char *PUS_SERVICE_9_TIME_MGMT_STRING = "PUS_SERVICE_9_TIME_MGMT";
const char *PUS_SERVICE_17_TEST_STRING = "PUS_SERVICE_17_TEST";
const char *PUS_SERVICE_20_PARAMETERS_STRING = "PUS_SERVICE_20_PARAMETERS";
const char *PUS_SERVICE_200_MODE_MGMT_STRING = "PUS_SERVICE_200_MODE_MGMT";
const char *PUS_SERVICE_201_HEALTH_STRING = "PUS_SERVICE_201_HEALTH";
const char *HEALTH_TABLE_STRING = "HEALTH_TABLE";
const char *MODE_STORE_STRING = "MODE_STORE";
const char *EVENT_MANAGER_STRING = "EVENT_MANAGER";
const char *INTERNAL_ERROR_REPORTER_STRING = "INTERNAL_ERROR_REPORTER";
const char *TC_STORE_STRING = "TC_STORE";
const char *TM_STORE_STRING = "TM_STORE";
const char *IPC_STORE_STRING = "IPC_STORE";
const char *TIME_STAMPER_STRING = "TIME_STAMPER";
const char *FSFW_OBJECTS_END_STRING = "FSFW_OBJECTS_END";
const char *UDP_BRIDGE_STRING = "UDP_BRIDGE";
const char *UDP_POLLING_TASK_STRING = "UDP_POLLING_TASK";
const char *CCSDS_DISTRIBUTOR_STRING = "CCSDS_DISTRIBUTOR";
const char *PUS_DISTRIBUTOR_STRING = "PUS_DISTRIBUTOR";
const char *TM_FUNNEL_STRING = "TM_FUNNEL";
const char *TEST_DUMMY_1_STRING = "TEST_DUMMY_1";
const char *TEST_DUMMY_2_STRING = "TEST_DUMMY_2";
const char *TEST_DUMMY_3_STRING = "TEST_DUMMY_3";
const char *TEST_DUMMY_4_STRING = "TEST_DUMMY_4";
const char *TEST_DUMMY_5_STRING = "TEST_DUMMY_5";
const char *TEST_TASK_STRING = "TEST_TASK";
const char *NO_OBJECT_STRING = "NO_OBJECT";
const char* translateObject(object_id_t object) {
switch( (object & 0xFFFFFFFF) ) {
case 0x4100CAFE:
return TEST_ASSEMBLY_STRING;
case 0x4301CAFE:
return TEST_CONTROLLER_STRING;
case 0x4401AFFE:
return TEST_DEVICE_HANDLER_0_STRING;
case 0x4402AFFE:
return TEST_DEVICE_HANDLER_1_STRING;
case 0x4900AFFE:
return TEST_ECHO_COM_IF_STRING;
case 0x53000000:
return FSFW_OBJECTS_START_STRING;
case 0x53000001:
return PUS_SERVICE_1_VERIFICATION_STRING;
case 0x53000002:
return PUS_SERVICE_2_DEVICE_ACCESS_STRING;
case 0x53000003:
return PUS_SERVICE_3_HOUSEKEEPING_STRING;
case 0x53000005:
return PUS_SERVICE_5_EVENT_REPORTING_STRING;
case 0x53000008:
return PUS_SERVICE_8_FUNCTION_MGMT_STRING;
case 0x53000009:
return PUS_SERVICE_9_TIME_MGMT_STRING;
case 0x53000017:
return PUS_SERVICE_17_TEST_STRING;
case 0x53000020:
return PUS_SERVICE_20_PARAMETERS_STRING;
case 0x53000200:
return PUS_SERVICE_200_MODE_MGMT_STRING;
case 0x53000201:
return PUS_SERVICE_201_HEALTH_STRING;
case 0x53010000:
return HEALTH_TABLE_STRING;
case 0x53010100:
return MODE_STORE_STRING;
case 0x53030000:
return EVENT_MANAGER_STRING;
case 0x53040000:
return INTERNAL_ERROR_REPORTER_STRING;
case 0x534f0100:
return TC_STORE_STRING;
case 0x534f0200:
return TM_STORE_STRING;
case 0x534f0300:
return IPC_STORE_STRING;
case 0x53500010:
return TIME_STAMPER_STRING;
case 0x53ffffff:
return FSFW_OBJECTS_END_STRING;
case 0x62000300:
return UDP_BRIDGE_STRING;
case 0x62000400:
return UDP_POLLING_TASK_STRING;
case 0x63000000:
return CCSDS_DISTRIBUTOR_STRING;
case 0x63000001:
return PUS_DISTRIBUTOR_STRING;
case 0x63000002:
return TM_FUNNEL_STRING;
case 0x74000001:
return TEST_DUMMY_1_STRING;
case 0x74000002:
return TEST_DUMMY_2_STRING;
case 0x74000003:
return TEST_DUMMY_3_STRING;
case 0x74000004:
return TEST_DUMMY_4_STRING;
case 0x74000005:
return TEST_DUMMY_5_STRING;
case 0x7400CAFE:
return TEST_TASK_STRING;
case 0xFFFFFFFF:
return NO_OBJECT_STRING;
default:
return "UNKNOWN_OBJECT";
}
return 0;
}

View File

@ -0,0 +1,8 @@
#ifndef FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_
#define FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_
#include <fsfw/objectmanager/SystemObjectIF.h>
const char* translateObject(object_id_t object);
#endif /* FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_ */

View File

View File

@ -0,0 +1,305 @@
#! /usr/bin/python3.8
"""
@file mib_packet_content_parser.py
@brief Parses the Service Packet Definition files for all variables
@details Used by the Mib Exporter, inherits generic File Parser
"""
import re
from parserbase.mib_file_list_parser import FileListParser
from parserbase.mib_parser import FileParser
from utility.mib_csv_writer import CsvWriter
from utility.mib_printer import Printer
PACKET_CONTENT_DEFINITION_DESTINATION = ["../../mission/pus/servicepackets/",
"../../fsfw/pus/servicepackets/"]
PACKET_CONTENT_CSV_NAME = "mib_packet_data_content.csv"
PACKET_CONTENT_HEADER_COLUMN = ["Service", "Subservice", "Packet Name", "Datatype", "Name",
"Size [Bytes]", "Comment"]
SQL_DELETE_PACKET_DATA_CONTENT_CMD = """
DROP TABLE IF EXISTS PacketContent;
"""
SQL_CREATE_PACKET_DATA_CONTENT_CMD = """
CREATE TABLE IF NOT EXISTS PacketContent (
id INTEGER PRIMARY KEY,
service INTEGER,
subsvc INTEGER,
packetName TEXT ,
dataType TEXT,
name TEXT,
size INTEGER,
comment TEXT
)
"""
SQL_INSERT_PACKET_DATA_CMD = """
INSERT INTO PacketContent(service,subsvc,packetName,dataType,name,size,comment)
VALUES(?,?,?,?,?,?,?)
"""
def main():
print("PacketContentParser: Parsing for header files.")
header_file_parser = FileListParser(PACKET_CONTENT_DEFINITION_DESTINATION)
header_file_list = header_file_parser.parse_header_files(False, "Parsing packet data files: ")
packet_content_parser = PacketContentParser(header_file_list)
subservice_table = packet_content_parser.parse_files(True)
Printer.print_content(subservice_table, "PacketContentParser: Printing packet data table:")
subservice_writer = CsvWriter(PACKET_CONTENT_CSV_NAME,
subservice_table, PACKET_CONTENT_HEADER_COLUMN)
subservice_writer.write_to_csv()
subservice_writer.move_csv("..")
# noinspection PyTypeChecker
class PacketContentParser(FileParser):
# Initialize all needed columns
def __init__(self, file_list):
super().__init__(file_list)
self.serviceColumn = 0
self.subserviceColumn = 1
self.classNameColumn = 2
self.datatypeColumn = 3
self.nameColumn = 4
self.sizeColumn = 5
self.commentColumn = 6
self.lastEntryColumn = 7
self.columnListLength = 8
self.dictEntryList = list(range(self.columnListLength-1))
self.datatypeMatch = False
self.ignoreFlag = False
def _handle_file_parsing(self, file_name: str, *args: any):
self_print_parsing_info = False
if len(args) == 1 and isinstance(args[0], bool):
self_print_parsing_info = args[0]
# Read service from file name
self.dictEntryList[self.serviceColumn] = re.search('[0-9]{1,3}', file_name).group(0)
self.dictEntryList[self.subserviceColumn] = " "
file = open(file_name, "r")
if self_print_parsing_info:
print("Parsing " + file_name + " ...")
# Scans each line for possible variables
for line in file.readlines():
# Looks for class and struct definitions which mark a PUS packet
self.scan_for_class_and_struct_match_and_handle_it(line)
# Looks for variables
self.scan_for_variable_match_and_handle_it(line)
# Operation taken when file parsing is complete
# All packet content sizes are set by analysing the datatype
def _post_parsing_operation(self):
self.update_packet_content_sizes()
def scan_for_class_and_struct_match_and_handle_it(self, line):
class_or_struct_match = re.search('[\s]*class[\s]*([\w]*)[\s]*.*[\s]*{[\s]*([^\n]*)', line)
if not class_or_struct_match:
class_or_struct_match = re.search(
'[\s]*struct[\s]*([\w]*)[\s]*.*[\s]*{[\s]*([^\n]*)', line)
if class_or_struct_match:
self.dictEntryList[self.classNameColumn] = class_or_struct_match.group(1)
if class_or_struct_match.group(2):
self.dictEntryList[self.subserviceColumn] = \
self.check_for_subservice_string(class_or_struct_match.group(2))
def scan_for_variable_match_and_handle_it(self, line):
# Look for datatype definitions
var_match = self.packet_content_matcher(line)
if var_match:
# Attempts to find variable definition inside that packet
self.update_packet_content_table()
def packet_content_matcher(self, line):
# First step: Search for possible parameter definitions
# Generic serialize element or datatypes
var_match = re.search(
r'[\w]*(?:<)?[\s]*(uint32_t|uint8_t|uint16_t|ReturnValue_t|Mode_t|Submode_t|'
r'object_id_t|float|double|bool|ActionId_t|EventId_t|sid_t|ParameterId_t)'
r'(?:>)?[\s]*([\w]*)[\s]*(?:[= 0-9]*)?[;](?:[\/!< ]*([^\n]*))?', line)
if var_match:
# Debug printout
# print(var_match.group(0))
self.handle_generic_variable_match(var_match)
# Serial Fixed Array List with Size Header
else:
var_match = re.search(r'[ \w]*<SerialFixedArrayListAdapter<([\w_, ()]*)>>'
r'[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?', line)
if var_match:
self.handle_serial_fixed_array_match(var_match)
# Serial Buffer, No length field
if not var_match:
var_match = re.search(r'[ \w]*<SerialBufferAdapter<([\w_,]*)>>'
r'[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?', line)
if not var_match:
var_match = re.search(r'[\w ]*(?:<)?(uint32_t|uint8_t|uint16_t)[\s]*\*'
r'(?:>)?[\s]*([\w]*)[\s]*[;](?:[/!< ]*([^\n]*))?', line)
if var_match:
self.handle_serial_buffer_match(var_match)
# exclude size definition in serialize adapter or any definitions which are not parameter initializations
# or typedefs
if var_match and re.search("typedef", var_match.group(0)):
var_match = False
return var_match
def update_packet_content_table(self):
self.index = self.index + 1
dict_entry_tuple = tuple(self.dictEntryList[:self.columnListLength])
if not self.ignoreFlag:
self.mib_table.update({self.index: dict_entry_tuple})
else:
self.ignoreFlag = False
def handle_generic_variable_match(self, var_match):
self.handle_var_match(var_match)
self.handle_exporter_string(var_match.group(3))
def handle_serial_fixed_array_match(self, var_match):
if self.check_for_ignore_string(var_match.group(0)):
pass
else:
fixed_array_properties = re.search(
'([\w_]*)[\s]*,[\s]*([\w_()]*)[\s]*,[\s]*([\w_()]*)[\s]*', var_match.group(1))
if fixed_array_properties:
type_of_next_buffer_size = fixed_array_properties.group(3)
self.index = self.index + 1
self.dictEntryList[self.datatypeColumn] = type_of_next_buffer_size
self.dictEntryList[self.nameColumn] = "Size of following buffer"
dict_entry_tuple = tuple(self.dictEntryList[:self.columnListLength])
self.mib_table.update({self.index: dict_entry_tuple})
self.handle_var_match(var_match)
self.dictEntryList[self.datatypeColumn] = fixed_array_properties.group(1) + " *"
self.handle_exporter_string(var_match.group(3))
def handle_serial_buffer_match(self, var_match):
self.handle_var_match(var_match)
self.dictEntryList[self.datatypeColumn] = var_match.group(1) + " *"
self.dictEntryList[self.sizeColumn] = "deduced"
self.handle_exporter_string(var_match.group(3))
def handle_var_match(self, var_match):
self.dictEntryList[self.commentColumn] = ""
self.dictEntryList[self.sizeColumn] = ""
self.dictEntryList[self.datatypeColumn] = var_match.group(1)
self.dictEntryList[self.nameColumn] = var_match.group(2)
def update_packet_content_sizes(self):
self.dictEntryList[self.sizeColumn] = " "
for key, content in self.mib_table.items():
content = self.attempt_uint_match(content)
if not self.datatypeMatch:
content = self.attempt_eight_byte_match(content)
if not self.datatypeMatch:
content = self.attempt_four_byte_match(content)
if not self.datatypeMatch:
content = self.attempt_two_byte_match(content)
if not self.datatypeMatch:
content = self.attempt_one_byte_match(content)
content = self.handle_uint_buffer_type(content)
self.mib_table.update({key: content})
def attempt_uint_match(self, content):
self.datatypeMatch = re.search('uint([\d]{1,2})_t', content[self.datatypeColumn])
if self.datatypeMatch:
content = list(content)
content[self.sizeColumn] = round(int(self.datatypeMatch.group(1)) / 8)
content = tuple(content)
return content
def attempt_four_byte_match(self, content):
self.datatypeMatch = re.search(
r'object_id_t|ActionId_t|Mode_t|float|sid_t|ParameterId_t',
content[self.datatypeColumn])
if self.datatypeMatch:
content = list(content)
content[self.sizeColumn] = 4
content = tuple(content)
return content
def attempt_eight_byte_match(self, content):
self.datatypeMatch = re.search('double', content[self.datatypeColumn])
if self.datatypeMatch:
content = list(content)
content[self.sizeColumn] = 8
content = tuple(content)
return content
def attempt_two_byte_match(self, content):
self.datatypeMatch = re.search('ReturnValue_t|EventId_t', content[self.datatypeColumn])
if self.datatypeMatch:
content = list(content)
content[self.sizeColumn] = 2
content = tuple(content)
return content
def attempt_one_byte_match(self, content):
self.datatypeMatch = re.search('Submode_t|bool', content[self.datatypeColumn])
if self.datatypeMatch:
content = list(content)
content[self.sizeColumn] = 1
content = tuple(content)
return content
def handle_uint_buffer_type(self, content):
if re.search('\*', content[self.datatypeColumn]):
content = list(content)
content[self.sizeColumn] = "deduced"
content = tuple(content)
return content
# Used to scan exporter string for ignore flag or store any comments
def handle_exporter_string(self, match):
exporter_string = re.search('[ /!<]*\[EXPORT[\w]*\][\s]*:[\s]*([^\n]*)', match)
if exporter_string:
type_string = re.search("\[TYPE|BUFFERTYPE\][\s]*([\w]*)[^\n|\[]*", exporter_string.group(0),
re.IGNORECASE)
if type_string:
self.dictEntryList[self.datatypeColumn] = str(type_string.group(1)) + " *"
comment_string = re.search("\[COMMENT\][\s]*([\w]*)[^\n|\[]*", exporter_string.group(0),
re.IGNORECASE)
if comment_string:
self.dictEntryList[self.commentColumn] = comment_string.group(1)
self.check_for_ignore_string(exporter_string.group(0))
if not comment_string:
self.dictEntryList[self.commentColumn] = exporter_string.group(1)
# Used to transform comma separated subservice numbers into specific subservice numbers
def check_for_subservice_string(self, full_description):
subservice_info = re.search(
r'^.*//[\s]*[!<]*[\s]*\[EXPORT[\w]*\][\s]*:[\s]*\[SUBSERVICE\][\s]*([^\n]*)',
full_description, re.IGNORECASE)
description = ' '
if subservice_info:
description = self.handle_subservice_string(subservice_info)
if full_description == '':
description = ' '
return description
def check_for_ignore_string(self, string):
ignore_string = re.search("IGNORE", string, re.IGNORECASE)
if ignore_string:
self.ignoreFlag = True
return True
@staticmethod
def handle_subservice_string(subservice_info):
description = ' '
subservice_list = [int(x) for x in subservice_info.group(1).split(',')]
subservice_number = len(subservice_list)
for i in range(subservice_number):
description = description + str(subservice_list[i])
if i == subservice_number - 2:
description = description + " and "
elif i < subservice_number - 1:
description = description + ", "
return description
if __name__ == "__main__":
main()

View File

View File

@ -0,0 +1,102 @@
#! /usr/bin/python3
# -*- coding: utf-8 -*-
"""
:file: returnvalues_parser.py
:brief: Part of the MOD export tools for the SOURCE project by KSat.
TODO: Integrate into Parser Structure instead of calling this file (no cpp file generated yet)
:details:
Returnvalue exporter.
To use MySQLdb, run pip install mysqlclient or install in IDE. On Windows, Build Tools installation might be necessary.
:data: 21.11.2019
"""
from fsfwgen.parserbase.file_list_parser import FileListParser
from fsfwgen.returnvalues.returnvalues_parser import InterfaceParser, ReturnValueParser
from fsfwgen.utility.sql_writer import SqlWriter
from fsfwgen.utility.file_management import move_file
from definitions import DATABASE_NAME, BspSelect, BspFolderDict
EXPORT_TO_FILE = True
MOVE_CSV_FILE = True
EXPORT_TO_SQL = True
PRINT_TABLES = True
BSP_SELECT = BspSelect.BSP_HOSTED.value
BSP_FOLDER = BspFolderDict[BSP_SELECT]
CSV_RETVAL_FILENAME = f"{BSP_FOLDER}_returnvalues.csv"
CSV_MOVE_DESTINATION = "../"
FILE_SEPARATOR = ';'
MAX_STRING_LENGTH = 32
INTERFACE_DEFINITION_FILES = [
"../../fsfw/returnvalues/FwClassIds.h",
"../../common/config/commonClassIds.h",
f"../../{BSP_FOLDER}/fsfwconfig/returnvalues/classIds.h"
]
RETURNVALUE_DESTINATIONS = [
"../../mission/", "../../fsfw/", f"../../{BSP_FOLDER}"
]
SQL_DELETE_RETURNVALUES_CMD = """
DROP TABLE IF EXISTS Returnvalues
"""
SQL_CREATE_RETURNVALUES_CMD = """
CREATE TABLE IF NOT EXISTS Returnvalues (
id INTEGER PRIMARY KEY,
code TEXT,
name TEXT,
interface TEXT,
file TEXT,
description TEXT
)
"""
SQL_INSERT_RETURNVALUES_CMD = """
INSERT INTO Returnvalues(code,name,interface,file,description)
VALUES(?,?,?,?,?)
"""
def main():
returnvalue_table = parse_returnvalues()
print("")
if EXPORT_TO_FILE:
ReturnValueParser.export_to_file(CSV_RETVAL_FILENAME, returnvalue_table, FILE_SEPARATOR)
if MOVE_CSV_FILE:
move_file(file_name=CSV_RETVAL_FILENAME, destination=CSV_MOVE_DESTINATION)
if EXPORT_TO_SQL:
print("ReturnvalueParser: Exporting to SQL")
sql_retval_exporter(returnvalue_table, db_filename=f"../{DATABASE_NAME}")
def parse_returnvalues():
""" Core function to parse for the return values """
interface_parser = InterfaceParser(file_list=INTERFACE_DEFINITION_FILES, print_table=PRINT_TABLES)
interfaces = interface_parser.parse_files()
header_parser = FileListParser(RETURNVALUE_DESTINATIONS)
header_list = header_parser.parse_header_files(True, "Parsing header file list: ")
print("")
returnvalue_parser = ReturnValueParser(interfaces, header_list, PRINT_TABLES)
returnvalue_parser.set_moving_window_mode(moving_window_size=7)
returnvalue_table = returnvalue_parser.parse_files(True)
print(f"ReturnvalueParser: Found {len(returnvalue_table)} returnvalues.")
return returnvalue_table
def sql_retval_exporter(returnvalue_table, db_filename: str):
sql_writer = SqlWriter(db_filename=db_filename)
sql_writer.open(SQL_CREATE_RETURNVALUES_CMD)
for entry in returnvalue_table.items():
sql_writer.write_entries(
SQL_INSERT_RETURNVALUES_CMD, (entry[0],
entry[1][2],
entry[1][4],
entry[1][3],
entry[1][1]))
sql_writer.commit()
sql_writer.close()
if __name__ == "__main__":
main()

View File

View File

@ -0,0 +1,242 @@
"""
@file mib_subservice_parser.py
@brief Parses the Subservice definitions for the Mission Information Base.
@details Used by the MIB Exporter, inherits generic File Parser
@author R. Mueller
@date 14.11.2019
Example Stringset to scan for:
enum Subservice: uint8_t {
//!< [EXPORT] : [COMMAND] Perform connection test
CONNECTION_TEST = 1,
//!< [EXPORT] : [REPLY] Connection test reply
CONNECTION_TEST_REPORT = 2,
EVENT_TRIGGER_TEST = 128, //!< [EXPORT] : [COMMAND] Trigger test reply and test event
MULTIPLE_EVENT_TRIGGER_TEST = 129, //!< [EXPORT] : [COMMAND] Trigger multiple events (5)
MULTIPLE_CONNECTION_TEST = 130 //!< [EXPORT] : [COMMAND] Trigger multiple connection tests
};
"""
import re
from enum import Enum
from fsfwgen.parserbase.file_list_parser import FileListParser
from fsfwgen.parserbase.parser import FileParser
from utility.mib_csv_writer import CsvWriter
from utility.mib_printer import Printer
SUBSERVICE_DEFINITION_DESTINATION = ["../../mission/", "../../fsfw/pus/"]
SUBSERVICE_CSV_NAME = "mib_subservices.csv"
SUBSERVICE_COLUMN_HEADER = ["Service", "Subservice Name", "Subservice Number", "Type", "Comment"]
SQL_DELETE_SUBSVC_CMD = """
DROP TABLE IF EXISTS Subservice;
"""
SQL_CREATE_SUBSVC_CMD = """
CREATE TABLE IF NOT EXISTS Subservice(
id INTEGER PRIMARY KEY,
service INTEGER,
subsvcName TEXT,
subsvcNumber INTEGER,
type TEXT CHECK( type IN ('TC','TM')),
comment TEXT
)
"""
SQL_INSERT_INTO_SUBSVC_CMD = """
INSERT INTO Subservice(service,subsvcName,subsvcNumber,type,comment)
VALUES(?,?,?,?,?)
"""
class SubserviceColumns(Enum):
"""
Specifies order of MIB columns
"""
SERVICE = 0
NAME = 1
NUMBER = 2
TYPE = 3
COMMENT = 4
Clmns = SubserviceColumns
def main():
"""
If this file is run separately, this main will be run.
:return:
"""
header_parser = FileListParser(SUBSERVICE_DEFINITION_DESTINATION)
header_file_list = header_parser.parse_header_files(False, "Parsing subservice header files: ")
packet_subservice_parser = SubserviceParser(header_file_list)
subservice_table = packet_subservice_parser.parse_files()
Printer.print_content(subservice_table, "Printing subservice table:")
print("Found " + str(len(subservice_table)) + " subservice entries.")
subservice_writer = CsvWriter(SUBSERVICE_CSV_NAME, subservice_table, SUBSERVICE_COLUMN_HEADER)
subservice_writer.write_to_csv()
subservice_writer.move_csv("..")
# TODO: Not really happy with the multi-line implementation, but this is not trivial..
# Right not, we are not using the last lines stored, we just store the string
# of the last line (if its only a comment). It propably would be better to always
# scan 3 or 4 lines at once. However, this is not easy too..
# pylint: disable=too-few-public-methods
class SubserviceParser(FileParser):
"""
This parser class can parse the subservice definitions.
"""
def __init__(self, file_list: list):
super().__init__(file_list)
# Column System allows reshuffling of table columns in constructor
self.clmns_len = SubserviceColumns.__len__()
# this table includes the current new table entry,
# which will be updated for target parameter
self.dict_entry_list = list(range(self.clmns_len))
self.dict_entry_list[Clmns.COMMENT.value] = ""
self.subservice_enum_found = False
# This list will store the last three lines for longer comments.
self.last_line_list = ["", "", ""]
# If an export command was found, cache the possibility of a match.
self.possible_match_on_next_lines = False
# This is called for every file
def _handle_file_parsing(self, file_name: str, *args: any, **kwargs):
self_print_parsing_info = False
if len(args) == 1 and isinstance(args[0], bool):
self_print_parsing_info = args[0]
# Read service from file name
service_match = re.search('Service[^0-9]*([0-9]{1,3})', file_name)
if service_match:
self.dict_entry_list[Clmns.SERVICE.value] = service_match.group(1)
self.dict_entry_list[Clmns.NAME.value] = " "
file = open(file_name, "r")
if self_print_parsing_info:
print("Parsing " + file_name + " ...")
# Scans each line for possible variables
for line in file.readlines():
self.__handle_line_reading(line)
def __handle_line_reading(self, line):
"""
Handles the reading of single lines.
:param line:
:return:
"""
# Case insensitive matching
enum_match = re.search(r'[\s]*enum[\s]*Subservice([^\n]*)', line, re.IGNORECASE)
if enum_match:
self.subservice_enum_found = True
if self.subservice_enum_found:
self.__handle_enum_scanning(line)
self.last_line_list[2] = self.last_line_list[1]
self.last_line_list[1] = self.last_line_list[0]
self.last_line_list[0] = line
def __handle_enum_scanning(self, line: str):
"""
Two-line reading. First check last line. For export command.
"""
self.__scan_for_export_command(self.last_line_list[0])
subservice_match = self.__scan_subservices(line)
if subservice_match:
self.index = self.index + 1
dict_entry_tuple = tuple(self.dict_entry_list[:self.clmns_len])
self.mib_table.update({self.index: dict_entry_tuple})
self.__clear_tuple()
def __clear_tuple(self):
self.dict_entry_list[Clmns.NAME.value] = ""
self.dict_entry_list[Clmns.TYPE.value] = ""
self.dict_entry_list[Clmns.NUMBER.value] = ""
self.dict_entry_list[Clmns.COMMENT.value] = ""
self.possible_match_on_next_lines = False
def __scan_for_export_command(self, line: str) -> bool:
command_string = re.search(r"([^\[]*)\[export\][: ]*\[([\w]*)\][\s]*([^\n]*)",
line, re.IGNORECASE)
if command_string:
# Check whether there is a separated export command
# (export command is not on same line as subservice definition)
# ugly solution but has worked so far.
string = command_string.group(1).lstrip()
if len(string) <= 8:
self.possible_match_on_next_lines = True
if self.__scan_for_type(line):
self.__scan_for_comment(line)
return True
self.__add_possible_comment_string(line)
return False
def __add_possible_comment_string(self, line):
"""
If no command was found, the line might be a continuation of a comment.
Strip whitespaces and comment symbols and add to comment buffer.
"""
possible_multiline_comment = line.lstrip()
possible_multiline_comment = possible_multiline_comment.lstrip('/')
possible_multiline_comment = possible_multiline_comment.lstrip('<')
possible_multiline_comment = possible_multiline_comment.lstrip('!')
possible_multiline_comment = possible_multiline_comment.rstrip()
if len(possible_multiline_comment) > 0:
self.dict_entry_list[Clmns.COMMENT.value] += possible_multiline_comment
def __scan_subservices(self, line):
"""
Scan for subservice match.
:param line:
:return:
"""
subservice_match = \
re.search(r"[\s]*([\w]*)[\s]*=[\s]*([0-9]{1,3})(?:,)?(?:[ /!<>]*([^\n]*))?", line)
if subservice_match:
self.dict_entry_list[Clmns.NAME.value] = subservice_match.group(1)
self.dict_entry_list[Clmns.NUMBER.value] = subservice_match.group(2)
# I am assuming that an export string is longer than 7 chars.
if len(subservice_match.group(3)) > 7:
# Export command on same line overrides old commands. Read for comment.
if self.__process_comment_string(subservice_match.group(3)):
return True
# Check whether exporting was commanded on last lines
return bool(self.possible_match_on_next_lines)
if re.search(r'}[\s]*;', line):
self.subservice_enum_found = False
return subservice_match
def __process_comment_string(self, comment_string) -> bool:
# look for packet type specifier
export_command_found = self.__scan_for_type(comment_string)
# Look for everything after [EXPORT] : [TYPESPECIFIER] as comment
if export_command_found:
self.__scan_for_comment(comment_string)
return export_command_found
def __scan_for_type(self, string) -> bool:
type_match = re.search(r'\[reply\]|\[tm\]', string, re.IGNORECASE)
if type_match:
self.dict_entry_list[Clmns.TYPE.value] = 'TM'
return True
type_match = re.search(r'\[command\]|\[tc\]', string, re.IGNORECASE)
if type_match:
self.dict_entry_list[Clmns.TYPE.value] = 'TC'
return True
self.dict_entry_list[Clmns.TYPE.value] = 'Unspecified'
return False
def __scan_for_comment(self, comment_string):
comment_match = re.search(r':[\s]*\[[\w]*\][\s]*([^\n]*)', comment_string)
if comment_match:
self.dict_entry_list[Clmns.COMMENT.value] = comment_match.group(1)
def _post_parsing_operation(self):
pass
if __name__ == "__main__":
main()

View File

View File

@ -0,0 +1,18 @@
"""
@file
mib_globals.py
@date
16.11.2019
@brief
Global settings for MIB exporter
"""
import pprint
doExportMiB = True
executeSQLcommands = False
printToConsole = True
exportToCSV = True
doCopyFile = False
copyDestination = "."
fileSeparator = ';'