improved the file connector algorithm

This commit is contained in:
Robin Müller 2022-08-09 10:17:31 +02:00
parent 9c412ace74
commit fd9838bcba
No known key found for this signature in database
GPG Key ID: 71B58F8A3CDFA9AC
1 changed files with 79 additions and 36 deletions

View File

@ -1,7 +1,8 @@
import re import re
import sys import sys
from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from typing import List, Tuple, Optional, Dict from typing import List, Tuple, Optional, Dict, Union
from fsfwgen.parserbase.parser import FileParser, VerbosityLevels from fsfwgen.parserbase.parser import FileParser, VerbosityLevels
from fsfwgen.utility.printer import PrettyPrinter from fsfwgen.utility.printer import PrettyPrinter
@ -20,14 +21,34 @@ DEFAULT_MOVING_WINDOWS_SIZE = 7
INVALID_IF_ID = -1 INVALID_IF_ID = -1
@dataclass
class FileStartHelper:
start_name: str
start_name_or_value: str
count: int
cumulative_start_index: Optional[int] = None
@dataclass
class FileEndHelper:
end_name: str
cumulative_end_value: Optional[int] = None
@dataclass
class FileConnHelper:
file_name: str
sh: Optional[FileStartHelper]
eh: Optional[FileEndHelper]
class InterfaceParser(FileParser): class InterfaceParser(FileParser):
def __init__(self, file_list: list, print_table: bool = False): def __init__(self, file_list: list, print_table: bool = False):
super().__init__(file_list) super().__init__(file_list)
self.print_table = print_table self.print_table = print_table
self.file_table_list = [] self.file_table_list = []
self.file_name_table = [] self.file_name_table = []
self.start_name_list = [] self.file_conn_helpers: Optional[List[FileConnHelper]] = None
self.end_name_list = []
self._debug_mode = False self._debug_mode = False
def enable_debug_mode(self, enable: bool): def enable_debug_mode(self, enable: bool):
@ -60,23 +81,28 @@ class InterfaceParser(FileParser):
start_matched = False start_matched = False
end_matched = False end_matched = False
start_name = "" start_name = ""
target_end_name = "" first_entry_name_or_index = ""
file_conn_entry = FileConnHelper(file_name, None, None)
for line in all_lines: for line in all_lines:
if not start_matched: if not start_matched:
match = re.search(r"[\s]*([\w]*) = [\s]*([\w]*)", line) match = re.search(r"[\s]*([\w]*) = [\s]*([\w]*)", line)
if match: if match:
# current_file_table.update({count: [match.group(1), match.group(2)]}) # current_file_table.update({count: [match.group(1), match.group(2)]})
start_name = match.group(1) start_name = match.group(1)
target_end_name = match.group(2) first_entry_name_or_index = match.group(2)
start_matched = True start_matched = True
else: else:
match = re.search(r"[\s]*([\w]*),?(?:[\s]*//)?([^\n]*)?", line) match = re.search(r"[\s]*([\w]*),?(?:[\s]*//)?([^\n]*)?", line)
if match: if match:
count += 1 count += 1
# It is expected that the last entry is explicitely marked like this.
# TODO: Could also simply remember last entry and then designate that as end
# entry as soon as "}" is found. Requires moving window mode though
if re.search(r"\[EXPORT][\s]*:[\s]*\[END]", match.group(2)): if re.search(r"\[EXPORT][\s]*:[\s]*\[END]", match.group(2)):
last_entry_name = match.group(1) last_entry_name = match.group(1)
end_matched = True end_matched = True
self.end_name_list.append([last_entry_name, None]) file_conn_entry.eh = FileEndHelper(last_entry_name, None)
break
else: else:
short_name = match.group(2) short_name = match.group(2)
if short_name == "": if short_name == "":
@ -94,50 +120,68 @@ class InterfaceParser(FileParser):
"Make sure to use [EXPORT] : [END]" "Make sure to use [EXPORT] : [END]"
) )
sys.exit(1) sys.exit(1)
self.start_name_list.append([start_name, target_end_name, None, count]) file_conn_entry.sh = FileStartHelper(
start_name, first_entry_name_or_index, count, None
)
if self.file_conn_helpers is None:
self.file_conn_helpers = []
self.file_conn_helpers.append(file_conn_entry)
self.file_name_table.append(file_name) self.file_name_table.append(file_name)
self.file_table_list.append(current_file_table) self.file_table_list.append(current_file_table)
def _post_parsing_operation(self): def _post_parsing_operation(self):
self.start_name_list, self.end_name_list = self.__assign_start_end_indexes( self.__assign_start_end_indexes()
self.start_name_list, self.end_name_list self._print_start_end_info()
)
for idx, file_table in enumerate(self.file_table_list): for idx, file_table in enumerate(self.file_table_list):
self.__build_mod_interface_table(self.start_name_list[idx][2], file_table) self.__build_mod_interface_table(
self.file_conn_helpers[idx].sh.count, file_table
)
if self.print_table: if self.print_table:
PrettyPrinter.pprint(self.mib_table) PrettyPrinter.pprint(self.mib_table)
@staticmethod def _print_start_end_info(self):
def __assign_start_end_indexes( for conn_helper in self.file_conn_helpers:
start_name_list_list, end_name_list_list print(
) -> Tuple[List, List]: f"Detected {conn_helper.sh.count} entries in {conn_helper.file_name}, "
# TODO: Clean up this mess. Use a wrapper/helper class instead of wild list/indexes f"end index {conn_helper.eh.cumulative_end_value}"
# Also, the complexity of this algorithm warrants a unittest.. )
start_list_list_completed = start_name_list_list
end_list_list_completed = end_name_list_list def __assign_start_end_indexes(self):
conn_helpers_old = self.file_conn_helpers.copy()
all_indexes_filled = False all_indexes_filled = False
max_outer_iterations = 15 max_outer_iterations = 15
current_iteration = 0 current_iteration = 0
while not all_indexes_filled: while not all_indexes_filled:
for idx, start_name_list in enumerate(start_list_list_completed): for idx, conn_helper in enumerate(conn_helpers_old):
if start_name_list[1].isdigit(): sh = conn_helper.sh
start_list_list_completed[idx][2] = int(start_name_list[1]) # In the very first file, the first index might/will be a number
end_list_list_completed[idx][1] = ( if sh.start_name_or_value.isdigit():
start_list_list_completed[idx][2] sh.cumulative_start_index = int(sh.start_name_or_value)
+ start_list_list_completed[idx][3] conn_helpers_old[idx].eh.cumulative_end_value = (
sh.cumulative_start_index + sh.count
) )
target_end_name = start_name_list[1] # Now, we try to connect the start and end of the files using the start and end
for end_name_list in end_list_list_completed: # names respectively
end_name = end_name_list[0] end_name_to_search = conn_helper.sh.start_name_or_value
end_value = end_name_list[1] for end_name_helper in conn_helpers_old:
if end_name == target_end_name and end_value is not None: eh = end_name_helper.eh
start_list_list_completed[idx][2] = end_value if (
end_list_list_completed[idx][1] = ( eh.end_name == end_name_to_search
end_value + start_list_list_completed[idx][3] and eh.cumulative_end_value is not None
):
self.file_conn_helpers[
idx
].sh.cumulative_start_index = eh.cumulative_end_value
self.file_conn_helpers[idx].eh.cumulative_end_value = (
eh.cumulative_end_value
+ self.file_conn_helpers[idx].sh.count
) )
all_indexes_filled = True all_indexes_filled = True
for idx, start_name_list in enumerate(start_list_list_completed): for idx, conn_helper in enumerate(conn_helpers_old):
if start_name_list[2] is None or end_name_list_list[idx][1] is None: if (
conn_helper.sh.cumulative_start_index is None
or conn_helper.eh.cumulative_end_value is None
):
all_indexes_filled = False all_indexes_filled = False
current_iteration += 1 current_iteration += 1
if current_iteration >= max_outer_iterations: if current_iteration >= max_outer_iterations:
@ -146,7 +190,6 @@ class InterfaceParser(FileParser):
"given number of maximum outer iterations!" "given number of maximum outer iterations!"
) )
sys.exit(1) sys.exit(1)
return start_list_list_completed, end_list_list_completed
def __build_mod_interface_table(self, count_start: int, interface_dict: dict): def __build_mod_interface_table(self, count_start: int, interface_dict: dict):
dict_to_build = dict() dict_to_build = dict()