Compare commits

...

12 Commits
v0.3.1 ... main

Author SHA1 Message Date
7eb6a6f7a8
changelog 2025-03-18 14:37:06 +01:00
bb7886f063
bump fsfwgen version 2025-03-18 14:36:22 +01:00
2d7a2a09b8 Merge pull request 'Added new compile time const event parsing' (#4) from baumgartl/events into main
Reviewed-on: #4
Reviewed-by: Robin Müller <muellerr@irs.uni-stuttgart.de>
2025-03-18 14:34:56 +01:00
76139f3934 added new compile time const event parsing 2025-03-14 13:22:10 +01:00
b9c8c9880f
prep v0.3.4 2025-01-13 10:40:41 +01:00
7c14b2a2e0
prep v0.3.4 2025-01-13 10:38:07 +01:00
2403f3a019 prep v0.3.3 2025-01-13 10:33:51 +01:00
f52073d117
some minor updates for linting and formatting 2025-01-13 10:28:02 +01:00
2b31b91237 Merge pull request 'Event Parser Improvements' (#2) from meier/event-parser-improvements into main
Reviewed-on: #2
Reviewed-by: Robin Müller <muellerr@irs.uni-stuttgart.de>
2025-01-09 12:30:16 +01:00
Jakob Meier
497781555d - supports definition of event ids in hexadecimal format
- subsystem id can be extracted also when the subsystem id is not defined separately in the header file
2024-12-22 18:16:59 +01:00
bbe55592ec
prep v0.3.2 2023-03-24 15:37:36 +01:00
fe6c68d97b
add explicit handling for duplicate event names
when writing the translation file
2023-03-24 15:33:11 +01:00
12 changed files with 122 additions and 197 deletions

View File

@ -8,6 +8,24 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
# [unreleased]
# [v0.4.0]
- Update event handling for new `constexpr` templated arguments
# [v0.3.4]
- Hotfixes for pyproject.toml file
# [v0.3.3]
- Fixes for event parsing
- Removed `setup.cfg` and `setup.py`
# [v0.3.2]
- Added handling for duplicate event names when writing the event translation
file.
# [v0.3.1]
- Sorted returnvalue export by raw returnvalue.
@ -19,4 +37,4 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
# [v0.2.0]
- Remove `logging` module and move to more pythonic logging usage.
- Remove `logging` module and move to more pythonic logging usage.

View File

@ -1,5 +1,5 @@
__version__ = "0.3.1"
__version__ = "0.3.2"
VERSION_MAJOR = 0
VERSION_MINOR = 3
VERSION_REVISION = 1
VERSION_REVISION = 2

View File

@ -64,9 +64,7 @@ EventDictT = Dict[int, EventEntry]
class EventParser(FileParser):
def __init__(
self, file_list: List[Path], interface_list, moving_window_size: int = 7
):
def __init__(self, file_list: List[Path], interface_list, moving_window_size: int = 7):
super().__init__(file_list)
self.set_moving_window_mode(moving_window_size)
self.interfaces = interface_list
@ -91,16 +89,14 @@ class EventParser(FileParser):
**kwargs,
):
subsystem_id_assignment_match = re.search(
rf"([\w]*)[\s]*=[\s]*{SUBSYSTEM_ID_NAMESPACE}::([A-Z_0-9]*);",
rf"{SUBSYSTEM_ID_NAMESPACE}::([A-Z_0-9]*)",
moving_window[self.moving_window_center_idx],
)
if subsystem_id_assignment_match:
# For now, it is assumed that there is only going to be one subsystem ID per
# class / source file
try:
self.current_id = self.interfaces[
subsystem_id_assignment_match.group(2)
][0]
self.current_id = self.interfaces[subsystem_id_assignment_match.group(1)][0]
self.my_id = self.return_number_from_string(self.current_id)
except KeyError as e:
print(f"Key not found: {e}")
@ -165,6 +161,8 @@ class EventParser(FileParser):
)
severity = event_full_match.group(3)
else:
if event_full_match.group(1) == "EV_REPLY_INVALID_SIZE":
print(f"Group 3: {event_full_match.group(3)}")
full_id = (self.my_id * 100) + self.return_number_from_string(
event_full_match.group(3)
)
@ -177,9 +175,7 @@ class EventParser(FileParser):
f"Name: {self.mib_table.get(full_id).name}| "
f"Description: {self.mib_table.get(full_id).description}"
)
self.mib_table.update(
{full_id: EventEntry(name, severity, description, file_name)}
)
self.mib_table.update({full_id: EventEntry(name, severity, description, file_name)})
self.count = self.count + 1
@staticmethod
@ -188,19 +184,25 @@ class EventParser(FileParser):
# One line event definition.
regex_string = (
r"static const(?:expr)? Event[\s]*([\w]*)[\s]*=[\s]*"
r"MAKE_EVENT\(([0-9]{1,3}),[\s]*severity::([A-Z]*)\)[\s]*;"
)
else:
regex_string = (
r"static const(?:expr)? Event[\s]*([\w]*)[\s]*=[\s]*"
r"event::makeEvent\(([\w]*),[\s]*([0-9]{1,3})[\s]*,[\s]*severity::([A-Z]*)\)[\s]*;"
r"MAKE_EVENT\((0x[0-9a-fA-F]+|[0-9]{1,3}),[\s]*severity::([A-Z]*)\)[\s]*;"
)
return re.search(regex_string, full_string)
# Non compiletime const version kept for backwards compatibility
regex_string = r"static const(?:expr)? Event\s*([\w]+)\s*=\s*event::makeEvent\(([\w:]+),\s*(0x[0-9a-fA-F]+|[0-9]{1,3})\s*,\s*severity::([A-Z]+)\)\s*;"
event_full_match = re.search(regex_string, full_string)
# Using old, non compiletime const version
if event_full_match:
return event_full_match
# Using compiletime const version
regex_string = r"static const(?:expr)? Event\s*([\w]+)\s*=\s*event::makeEvent<([\w:]+),\s*(0x[0-9a-fA-F]+|[0-9]{1,3})\s*,\s*severity::([A-Z]+)>\(\)\s*;"
event_full_match = re.search(regex_string, full_string)
return event_full_match
def __build_multi_line_event_string(
self, first_line: str, moving_window: List[str]
) -> str:
def __build_multi_line_event_string(self, first_line: str, moving_window: List[str]) -> str:
return self._build_multi_line_string_generic(
first_line=first_line, moving_window=moving_window
)
@ -274,10 +276,16 @@ def write_translation_source_file(
event_id = entry[0]
event_value = entry[1]
name = event_value.name
if name not in lut:
definitions += f"const char *{name}_STRING " f'= "{name}";\n'
function += f" case ({event_id}):\n " f"return {name}_STRING;\n"
lut.update({name: event_value})
if name in lut:
_LOGGER.warning(
"Duplicate name detected when generating event translation source file"
)
_LOGGER.warning(f"Name: {name}, Event Entry: {event_value}")
name = f"{name}_{event_id}"
_LOGGER.info(f"Created unique name {name}")
definitions += f"const char *{name}_STRING " f'= "{name}";\n'
function += f" case ({event_id}):\n " f"return {name}_STRING;\n"
lut.update({name: event_value})
function += ' default:\n return "UNKNOWN_EVENT";\n'
out.write(
f"/**\n * @brief Auto-generated event translation file. "

View File

@ -70,11 +70,11 @@ def write_translation_file(filename: str, list_of_entries, date_string_full: str
def write_translation_header_file(filename: str = "translateObjects.h"):
file = open(filename, "w")
file.write(
f"#ifndef FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_\n"
f"#define FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_\n\n"
f"#include <fsfw/objectmanager/SystemObjectIF.h>\n\n"
f"const char *translateObject(object_id_t object);\n\n"
f"#endif /* FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_ */\n"
"#ifndef FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_\n"
"#define FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_\n\n"
"#include <fsfw/objectmanager/SystemObjectIF.h>\n\n"
"const char *translateObject(object_id_t object);\n\n"
"#endif /* FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_ */\n"
)

View File

@ -1,6 +1,7 @@
"""Generic File Parser class
Used by parse header files. Implemented as class in case header parser becomes more complex
"""
import logging
from pathlib import Path
from typing import Union, List
@ -42,9 +43,7 @@ class FileListParser:
"""
print(printout_string, end="")
for directory in self.directory_list:
self.__get_header_file_list(
directory, search_recursively, print_current_dir
)
self.__get_header_file_list(directory, search_recursively, print_current_dir)
print(str(len(self.header_files)) + " header files were found.")
# g.PP.pprint(self.header_files)
return self.header_files
@ -59,11 +58,7 @@ class FileListParser:
if print_current_dir:
print(f"Parsing header files in: {base_directory}")
for entry in base_directory.iterdir():
if (
entry.is_file()
and entry.suffix == ".h"
and entry.as_posix()[0] not in [".", "_"]
):
if entry.is_file() and entry.suffix == ".h" and entry.as_posix()[0] not in [".", "_"]:
local_header_files.append(entry)
if seach_recursively:
if entry.is_dir():

View File

@ -11,6 +11,7 @@ Child classes fill out the MIB table (self.mib_table)
@author R. Mueller
@date 14.11.2019
"""
import dataclasses
import enum
import re
@ -108,7 +109,7 @@ class FileParser:
:return: Returns the mib table dictionary.
"""
if self.file_list_empty:
print(f"Nothing to parse, supplied file list is empty!")
print("Nothing to parse, supplied file list is empty!")
return self.mib_table
if self.__parser_mode == FileParserModes.REGULAR:
@ -173,10 +174,7 @@ class FileParser:
return
moving_window = [""] * moving_window_size
for line_idx, line in enumerate(all_lines):
if (
self.__debug_moving_window
and self.__debug_moving_window_filename in file_name
):
if self.__debug_moving_window and self.__debug_moving_window_filename in file_name:
print(f"Moving window pre line anaylsis line {line_idx}")
print(moving_window)
# The moving window will start with only the bottom being in the file
@ -194,10 +192,7 @@ class FileParser:
for idx in range(moving_window_size - 1):
moving_window[idx] = moving_window[idx + 1]
moving_window[moving_window_size - 1] = line
if (
self.__debug_moving_window
and self.__debug_moving_window_filename in file_name
):
if self.__debug_moving_window and self.__debug_moving_window_filename in file_name:
print(f"Moving window post line analysis line {line_idx}")
print(moving_window)
self._handle_file_parsing_moving_window(
@ -206,22 +201,16 @@ class FileParser:
# Now the moving window moved past the end of the file. Sections which are outside
# the file are assigned an empty string until the window has moved out of file completely
for remaining_windows_idx in range(moving_window_size):
if (
self.__debug_moving_window
and self.__debug_moving_window_filename in file_name
):
print(f"Moving window pre line analysis post EOF")
if self.__debug_moving_window and self.__debug_moving_window_filename in file_name:
print("Moving window pre line analysis post EOF")
print(moving_window)
num_entries_to_clear = remaining_windows_idx + 1
for idx_to_clear in range(num_entries_to_clear):
moving_window[moving_window_size - 1 - idx_to_clear] = ""
for idx_to_reassign in range(moving_window_size - 1 - num_entries_to_clear):
moving_window[idx_to_reassign] = moving_window[idx_to_reassign + 1]
if (
self.__debug_moving_window
and self.__debug_moving_window_filename in file_name
):
print(f"Moving window post line anaylsis post EOF")
if self.__debug_moving_window and self.__debug_moving_window_filename in file_name:
print("Moving window post line analysis post EOF")
print(moving_window)
pass
@ -241,9 +230,7 @@ class FileParser:
all_lines = file.readlines()
return all_lines
def _build_multi_line_string_generic(
self, first_line: str, moving_window: List[str]
) -> str:
def _build_multi_line_string_generic(self, first_line: str, moving_window: List[str]) -> str:
"""This function transforms a multi line match into a one line match by searching for the
semicolon at the string end"""
all_lines = first_line.rstrip()
@ -265,16 +252,12 @@ class FileParser:
) -> List[MetaInfo]:
current_idx = self._moving_window_center_idx - 1
# Look at the line above first
export_match = re.search(
r"\[EXPORT]\s*:\s*\[(\w*)]", moving_window[current_idx]
)
export_match = re.search(r"\[EXPORT]\s*:\s*\[(\w*)]", moving_window[current_idx])
if not export_match:
while True:
if re.search(break_pattern, moving_window[current_idx]):
break
export_match = re.search(
r"\[EXPORT]\s*:\s*\[(\w*)]", moving_window[current_idx]
)
export_match = re.search(r"\[EXPORT]\s*:\s*\[(\w*)]", moving_window[current_idx])
if export_match or current_idx <= 0:
break
current_idx -= 1
@ -289,9 +272,7 @@ class FileParser:
string_to_add = string_to_add.rstrip()
descrip_string += string_to_add
current_build_idx += 1
resulting_description = re.search(
r"\[EXPORT]\s*:\s*\[\w*]\s(.*)", descrip_string
)
resulting_description = re.search(r"\[EXPORT]\s*:\s*\[\w*]\s(.*)", descrip_string)
if resulting_description:
info.value = resulting_description.group(1)

View File

@ -110,18 +110,14 @@ class InterfaceParser(FileParser):
current_file_table.update({count: [match.group(1), short_name]})
if not start_matched:
print("No start match detected when parsing interface files..")
print(
f"Current file: {file_name} | Make sure to include a start definition"
)
print(f"Current file: {file_name} | Make sure to include a start definition")
sys.exit(1)
if not end_matched:
raise ValueError(
f"No end match detected when parsing interface file {file_name}. "
f"Make sure to use [EXPORT] : [END]"
)
file_conn_entry.sh = FileStartHelper(
start_name, first_entry_name_or_index, count, None
)
file_conn_entry.sh = FileStartHelper(start_name, first_entry_name_or_index, count, None)
if self.file_conn_helpers is None:
self.file_conn_helpers = []
self.file_conn_helpers.append(file_conn_entry)
@ -164,16 +160,12 @@ class InterfaceParser(FileParser):
end_name_to_search = conn_helper.sh.start_name_or_value
for end_name_helper in conn_helpers_old:
eh = end_name_helper.eh
if (
eh.end_name == end_name_to_search
and eh.cumulative_end_value is not None
):
if eh.end_name == end_name_to_search and eh.cumulative_end_value is not None:
self.file_conn_helpers[
idx
].sh.cumulative_start_index = eh.cumulative_end_value
self.file_conn_helpers[idx].eh.cumulative_end_value = (
eh.cumulative_end_value
+ self.file_conn_helpers[idx].sh.count
eh.cumulative_end_value + self.file_conn_helpers[idx].sh.count
)
all_indexes_filled = True
for idx, conn_helper in enumerate(conn_helpers_old):
@ -333,9 +325,7 @@ class ReturnValueParser(FileParser):
full_returnvalue_string,
)
if returnvalue_match:
number_match = get_number_from_dec_or_hex_str(
returnvalue_match.group(2)
)
number_match = get_number_from_dec_or_hex_str(returnvalue_match.group(2))
else:
number_match = get_number_from_dec_or_hex_str(returnvalue_match.group(3))
if returnvalue_match:
@ -348,7 +338,7 @@ class ReturnValueParser(FileParser):
return
if number_match == INVALID_IF_ID:
_LOGGER.warning(f"Invalid number match detected for file {file_name}")
_LOGGER.warning(f"Match groups:")
_LOGGER.warning("Match groups:")
for group in returnvalue_match.groups():
_LOGGER.info(group)
self.__handle_returnvalue_match(
@ -403,9 +393,7 @@ class ReturnValueParser(FileParser):
Returns whether the interface ID was found successfully in the IF ID header files
"""
if self.get_verbosity() == VerbosityLevels.DEBUG:
_LOGGER.info(
f"Interface ID {interface_id_match.group(1)} found in {file_name}"
)
_LOGGER.info(f"Interface ID {interface_id_match.group(1)} found in {file_name}")
if_id_entry = self.interfaces.get(interface_id_match.group(1))
if if_id_entry is not None:
self.current_interface_id_entries["ID"] = if_id_entry[0]
@ -414,9 +402,9 @@ class ReturnValueParser(FileParser):
f"Interface ID {interface_id_match.group(1)} not found in IF ID dictionary"
)
return False
self.current_interface_id_entries["Name"] = self.interfaces[
interface_id_match.group(1)
][1].lstrip()
self.current_interface_id_entries["Name"] = self.interfaces[interface_id_match.group(1)][
1
].lstrip()
self.current_interface_id_entries["FullName"] = interface_id_match.group(1)
if self.get_verbosity() == VerbosityLevels.DEBUG:
current_id = self.current_interface_id_entries["ID"]

View File

@ -8,9 +8,7 @@ from pathlib import Path
_LOGGER = logging.getLogger(__name__)
def copy_file(
filename: Path, destination: Path = "", delete_existing_file: bool = False
):
def copy_file(filename: Path, destination: Path = "", delete_existing_file: bool = False):
if not os.path.exists(filename):
_LOGGER.warning(f"File {filename} does not exist")
return

39
lint.py
View File

@ -1,39 +0,0 @@
#!/usr/bin/env python3
import os
import sys
def main():
exclude_dirs_flag = ""
if not os.path.exists("setup.cfg"):
exclude_dirs_flag = (
"--exclude .git,__pycache__,docs/conf.py,old,build,dist,venv"
)
additional_flags_both_steps = "--count --statistics"
additional_flags_first_step = "--select=E9,F63,F7,F82 --show-source"
flake8_first_step_cmd = (
f"flake8 . {additional_flags_both_steps} "
f"{additional_flags_first_step} {exclude_dirs_flag}"
)
status = os.system(flake8_first_step_cmd)
if os.name == "nt":
if status != 0:
print(f"Flake8 linter errors with status {status}")
else:
if os.WEXITSTATUS(status) != 0:
print(f"Flake8 linter errors with status {status}")
sys.exit(0)
additional_flags_second_step = (
'--exit-zero --max-complexity=10 --per-file-ignores="__init__.py:F401"'
)
if not os.path.exists("setup.cfg"):
additional_flags_second_step += " --max-line-length=100"
flake8_second_step_cmd = (
f"flake8 . {additional_flags_both_steps} {additional_flags_second_step}"
f" {exclude_dirs_flag}"
)
os.system(flake8_second_step_cmd)
if __name__ == "__main__":
main()

View File

@ -3,4 +3,42 @@ requires = [
"setuptools>=42",
"wheel"
]
build-backend = "setuptools.build_meta"
build-backend = "setuptools.build_meta"
[project]
name = "fsfwgen"
description = "FSFW Generator Core"
version = "0.4.0"
license = { text = "Apache-2.0" }
authors = [
{name = "Robin Mueller", email = "robin.mueller.m@gmail.com"}
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Communications",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Scientific/Engineering"
]
dependencies = [
"colorlog~=6.0"
]
[project.urls]
"Homepage" = "https://egit.irs.uni-stuttgart.de/fsfw/fsfwgen"
[tool.ruff]
exclude = [
".git",
"venv",
"docs"
]
line-length = 100

View File

@ -1,50 +0,0 @@
[metadata]
name = fsfwgen
description = FSFW Generator Core
version = attr: fsfwgen.__version__
long_description = file: README.md, NOTICE
long_description_content_type = text/markdown
license = Apache-2.0
author = Robin Mueller
author_email = muellerr@irs.uni-stuttgart.de
platform = any
url = https://egit.irs.uni-stuttgart.de/fsfw/fsfwgen
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: Apache Software License
Natural Language :: English
Operating System :: POSIX
Operating System :: Microsoft :: Windows
Programming Language :: Python :: 3
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Topic :: Communications
Topic :: Software Development :: Libraries
Topic :: Software Development :: Libraries :: Python Modules
Topic :: Scientific/Engineering
[options]
install_requires =
colorlog>=6.0.0
package_dir =
= .
packages = find:
python_requires = >=3.8
[flake8]
max-line-length = 100
ignore = D203, W503
exclude =
.git,
__pycache__,
docs/conf.py,
old,
build,
dist,
venv
max-complexity = 10
extend-ignore =
# See https://github.com/PyCQA/pycodestyle/issues/373
E203,

View File

@ -1,12 +0,0 @@
#!/usr/bin/python3
"""
We do the package handling in the static setup.cfg but include an empty setup.py
to allow editable installs https://packaging.python.org/tutorials/packaging-projects/
and provide extensibility
"""
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup()