init repository
This commit is contained in:
commit
7deef94c04
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
__pycache__
|
202
LICENSE
Normal file
202
LICENSE
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
14
NOTICE
Normal file
14
NOTICE
Normal file
@ -0,0 +1,14 @@
|
||||
Copyright 2020 KSat e.V. Stuttgart
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
5
README.md
Normal file
5
README.md
Normal file
@ -0,0 +1,5 @@
|
||||
# FSFW Generators
|
||||
|
||||
Generic core module to generate various files or databases like the Mission Information Base (MIB),
|
||||
returnvalue and event lists, mode tables for on-board software (OBSW) using the
|
||||
Flight Software Framework (FSFW)
|
0
__init__.py
Normal file
0
__init__.py
Normal file
0
events/__init__.py
Normal file
0
events/__init__.py
Normal file
267
events/event_parser.py
Normal file
267
events/event_parser.py
Normal file
@ -0,0 +1,267 @@
|
||||
import re
|
||||
|
||||
from modgen.parserbase.parser import FileParser
|
||||
|
||||
EVENT_ENTRY_NAME_IDX = 0
|
||||
EVENT_ENTRY_SEVERITY_IDX = 1
|
||||
EVENT_ENTRY_INFO_IDX = 2
|
||||
EVENT_SOURCE_FILE_IDX = 3
|
||||
FSFW_EVENT_HEADER_INCLUDE = "#include <fsfw/events/Event.h>"
|
||||
DEFAULT_MOVING_WINDOWS_SIZE = 7
|
||||
SUBSYSTEM_ID_NAMESPACE = "SUBSYSTEM_ID"
|
||||
|
||||
EVENT_NAME_IDX = 1
|
||||
|
||||
|
||||
class SubsystemDefinitionParser(FileParser):
|
||||
def __init__(self, file_list):
|
||||
super().__init__(file_list)
|
||||
self.moving_window_center_idx = 3
|
||||
|
||||
def _handle_file_parsing(self, file_name: str, *args, **kwargs):
|
||||
file = open(file_name, "r")
|
||||
for line in file.readlines():
|
||||
match = re.search(r'([A-Z0-9_]*) = ([0-9]{1,3})', line)
|
||||
if match:
|
||||
self.mib_table.update({match.group(1): [match.group(2)]})
|
||||
|
||||
def _handle_file_parsing_moving_window(
|
||||
self, file_name: str, current_line: int, moving_window_size: int, moving_window: list,
|
||||
*args, **kwargs):
|
||||
match = re.search(r'([A-Z0-9_]*) = ([0-9]{1,3})', moving_window[self.moving_window_center_idx])
|
||||
if match:
|
||||
self.mib_table.update({match.group(1): [match.group(2)]})
|
||||
|
||||
def _post_parsing_operation(self):
|
||||
pass
|
||||
|
||||
|
||||
class EventParser(FileParser):
|
||||
def __init__(self, file_list, interface_list):
|
||||
super().__init__(file_list)
|
||||
self.interfaces = interface_list
|
||||
self.count = 0
|
||||
self.my_id = 0
|
||||
self.current_id = 0
|
||||
self.last_lines = ["", "", ""]
|
||||
self.moving_window_center_idx = 3
|
||||
|
||||
def _handle_file_parsing(self, file_name: str, *args: any, **kwargs):
|
||||
try:
|
||||
file = open(file_name, 'r', encoding='utf-8')
|
||||
all_lines = file.readlines()
|
||||
except UnicodeDecodeError:
|
||||
file = open(file_name, 'r', encoding='cp1252')
|
||||
all_lines = file.readlines()
|
||||
total_count = 0
|
||||
for line in all_lines:
|
||||
self.__handle_line_reading(line, file_name)
|
||||
if self.count > 0:
|
||||
print("File " + file_name + " contained " + str(self.count) + " events.")
|
||||
total_count += self.count
|
||||
self.count = 0
|
||||
|
||||
def _handle_file_parsing_moving_window(
|
||||
self, file_name: str, current_line: int, moving_window_size: int, moving_window: list,
|
||||
*args, **kwargs):
|
||||
subsystem_id_assignment_match = re.search(
|
||||
rf"([\w]*)[\s]*=[\s]*{SUBSYSTEM_ID_NAMESPACE}::([A-Z_0-9]*);", moving_window[self.moving_window_center_idx]
|
||||
)
|
||||
if subsystem_id_assignment_match:
|
||||
# For now, it is assumed that there is only going to be one subsystem ID per class / source file
|
||||
# if "SUBSYSTEM_ID" in subsystem_id_assignment_match.group(1):
|
||||
try:
|
||||
self.current_id = self.interfaces[subsystem_id_assignment_match.group(2)][0]
|
||||
self.my_id = self.return_number_from_string(self.current_id)
|
||||
except KeyError as e:
|
||||
print(f"Key not found: {e}")
|
||||
|
||||
# Now try to look for event definitions. Moving windows allows multi line event definitions
|
||||
# These two variants need to be checked
|
||||
event_match = re.match(
|
||||
r"[\s]*static const(?:expr)?[\s]*Event[\s]*([\w]*)[\s]*=[\s]*event::makeEvent[^\n]*",
|
||||
moving_window[self.moving_window_center_idx]
|
||||
)
|
||||
macro_api_match = False
|
||||
if not event_match:
|
||||
event_match = re.match(
|
||||
r"[\s]*static[\s]*const(?:expr)?[\s]*Event[\s]*([\w]*)[\s]*=[\s]*MAKE_EVENT[^\n]*",
|
||||
moving_window[self.moving_window_center_idx]
|
||||
)
|
||||
if event_match:
|
||||
macro_api_match = True
|
||||
if event_match:
|
||||
self.__handle_event_match(
|
||||
event_match=event_match, macro_api_match=macro_api_match, moving_window=moving_window,
|
||||
file_name=file_name
|
||||
)
|
||||
|
||||
def __handle_event_match(self, event_match, macro_api_match: bool, moving_window: list, file_name: str):
|
||||
event_full_match = False
|
||||
if ";" in event_match.group(0):
|
||||
event_full_match = self.__handle_one_line_event_match(
|
||||
macro_api_match=macro_api_match, moving_window=moving_window
|
||||
)
|
||||
# Description will be parsed separately later
|
||||
description = " "
|
||||
if event_full_match:
|
||||
name = event_match.group(EVENT_NAME_IDX)
|
||||
if macro_api_match:
|
||||
full_id = (self.my_id * 100) + self.return_number_from_string(event_full_match.group(2))
|
||||
severity = event_full_match.group(3)
|
||||
else:
|
||||
full_id = (self.my_id * 100) + self.return_number_from_string(event_full_match.group(3))
|
||||
severity = event_full_match.group(4)
|
||||
self.mib_table.update({full_id: (name, severity, description, file_name)})
|
||||
self.count = self.count + 1
|
||||
|
||||
def __handle_one_line_event_match(self, macro_api_match: bool, moving_window: list):
|
||||
if macro_api_match:
|
||||
# One line event definition.
|
||||
regex_string = \
|
||||
r"static const(?:expr)? Event[\s]*([\w]*)[\s]*=[\s]*" \
|
||||
r"MAKE_EVENT\(([0-9]{1,3}),[\s]*severity::([A-Z]*)\)[\s]*;"
|
||||
else:
|
||||
regex_string = \
|
||||
r"static const(?:expr)? Event[\s]*([\w]*)[\s]*=[\s]*" \
|
||||
r"event::makeEvent\(([\w]*),[\s]*([0-9]{1,3})[\s]*,[\s]*severity::([A-Z]*)\)[\s]*;"
|
||||
event_full_match = re.search(regex_string, moving_window[self.moving_window_center_idx])
|
||||
return event_full_match
|
||||
|
||||
def _post_parsing_operation(self):
|
||||
pass
|
||||
|
||||
def __handle_line_reading(self, line, file_name):
|
||||
if not self.last_lines[0] == '\n':
|
||||
twolines = self.last_lines[0] + ' ' + line.strip()
|
||||
else:
|
||||
twolines = ''
|
||||
match1 = re.search(r"SUBSYSTEM_ID[\s]*=[\s]*SUBSYSTEM_ID::([A-Z_0-9]*);", twolines)
|
||||
if match1:
|
||||
self.current_id = self.interfaces[match1.group(1)][0]
|
||||
# print( "Current ID: " + str(currentId) )
|
||||
self.my_id = self.return_number_from_string(self.current_id)
|
||||
match = re.search(
|
||||
r"(//)?[\t ]*static const(?:expr)? Event[\s]*([A-Z_0-9]*)[\s]*=[\s]*"
|
||||
r"MAKE_EVENT\(([0-9]{1,2}),[\s]*severity::([A-Z]*)\);[\t ]*(//!<)?([^\n]*)", twolines
|
||||
)
|
||||
if match:
|
||||
if match.group(1):
|
||||
self.last_lines[0] = line
|
||||
return
|
||||
description = " "
|
||||
if match.group(6):
|
||||
description = self.clean_up_description(match.group(6))
|
||||
string_to_add = match.group(2)
|
||||
full_id = (self.my_id * 100) + self.return_number_from_string(match.group(3))
|
||||
severity = match.group(4)
|
||||
if full_id in self.mib_table:
|
||||
# print("EventParser: Duplicate Event " + hex(full_id) + " from " + file_name +
|
||||
# " was already in " + self.mib_table[full_id][3])
|
||||
pass
|
||||
self.mib_table.update({full_id: (string_to_add, severity, description, file_name)})
|
||||
self.count = self.count + 1
|
||||
self.last_lines[0] = line
|
||||
|
||||
def build_checked_string(self, first_part, second_part):
|
||||
my_str = first_part + self.convert(second_part)
|
||||
if len(my_str) > 16:
|
||||
print(f"EventParser: Entry: {my_str} too long. Will truncate.")
|
||||
my_str = my_str[0:14]
|
||||
# else:
|
||||
# print( "Entry: " + myStr + " is all right.")
|
||||
return my_str
|
||||
|
||||
@staticmethod
|
||||
def return_number_from_string(a_string):
|
||||
if a_string.startswith('0x'):
|
||||
return int(a_string, 16)
|
||||
elif a_string.isdigit():
|
||||
return int(a_string)
|
||||
else:
|
||||
print('EventParser: Illegal number representation: ' + a_string)
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def convert(name):
|
||||
single_strings = name.split('_')
|
||||
new_string = ''
|
||||
for one_string in single_strings:
|
||||
one_string = one_string.lower()
|
||||
one_string = one_string.capitalize()
|
||||
new_string = new_string + one_string
|
||||
return new_string
|
||||
|
||||
@staticmethod
|
||||
def clean_up_description(description):
|
||||
description = description.lstrip('//!<>')
|
||||
description = description.lstrip()
|
||||
if description == '':
|
||||
description = ' '
|
||||
return description
|
||||
|
||||
|
||||
def export_to_file(filename: str, event_list: list, file_separator: str):
|
||||
file = open(filename, "w")
|
||||
for entry in event_list:
|
||||
event_id = entry[0]
|
||||
event_value = entry[1]
|
||||
file.write(
|
||||
str(event_id) + file_separator + event_value[EVENT_ENTRY_NAME_IDX] + file_separator +
|
||||
event_value[EVENT_ENTRY_SEVERITY_IDX] + file_separator + event_value[EVENT_ENTRY_INFO_IDX] +
|
||||
file_separator + event_value[EVENT_SOURCE_FILE_IDX] + '\n'
|
||||
)
|
||||
file.close()
|
||||
return
|
||||
|
||||
|
||||
def write_translation_source_file(event_list: list, date_string: str, filename: str = "translateEvents.cpp"):
|
||||
outputfile = open(filename, "w")
|
||||
definitions = ""
|
||||
|
||||
function = "const char * translateEvents(Event event) {\n\tswitch( (event & 0xffff) ) {\n"
|
||||
for entry in event_list:
|
||||
event_id = entry[0]
|
||||
event_value = entry[1]
|
||||
definitions += \
|
||||
f"const char *{event_value[EVENT_ENTRY_NAME_IDX]}_STRING = \"{event_value[EVENT_ENTRY_NAME_IDX]}\";\n"
|
||||
function += f"\tcase({event_id}):\n\t\treturn {event_value[EVENT_ENTRY_NAME_IDX]}_STRING;\n"
|
||||
function += '\tdefault:\n\t\treturn "UNKNOWN_EVENT";\n'
|
||||
outputfile.write(
|
||||
f"/**\n * @brief Auto-generated event translation file. Contains {len(event_list)} translations.\n"
|
||||
f" * @details\n"
|
||||
f" * Generated on: {date_string}\n */\n"
|
||||
)
|
||||
outputfile.write("#include \"translateEvents.h\"\n\n")
|
||||
outputfile.write(definitions + "\n" + function + "\t}\n\treturn 0;\n}\n")
|
||||
outputfile.close()
|
||||
|
||||
|
||||
def write_translation_header_file(filename: str = "translateEvents.h"):
|
||||
file = open(filename, "w")
|
||||
file.write(
|
||||
f"#ifndef FSFWCONFIG_EVENTS_TRANSLATEEVENTS_H_\n"
|
||||
f"#define FSFWCONFIG_EVENTS_TRANSLATEEVENTS_H_\n\n"
|
||||
f"{FSFW_EVENT_HEADER_INCLUDE}\n\n"
|
||||
f"const char * translateEvents(Event event);\n\n"
|
||||
f"#endif /* FSFWCONFIG_EVENTS_TRANSLATEEVENTS_H_ */\n"
|
||||
)
|
||||
|
||||
|
||||
def handle_csv_export(file_name: str, event_list: list, file_separator: str):
|
||||
"""
|
||||
Generates the CSV in the same directory as the .py file and copes the CSV to another
|
||||
directory if specified.
|
||||
"""
|
||||
print("EventParser: Exporting to file: " + file_name)
|
||||
export_to_file(filename=file_name, event_list=event_list, file_separator=file_separator)
|
||||
|
||||
|
||||
def handle_cpp_export(
|
||||
event_list: list, date_string: str, file_name: str = "translateEvents.cpp", generate_header: bool = True,
|
||||
header_file_name: str = "translateEvents.h"
|
||||
):
|
||||
print("EventParser: Generating translation cpp file.")
|
||||
write_translation_source_file(event_list=event_list, date_string=date_string, filename=file_name)
|
||||
if generate_header:
|
||||
write_translation_header_file(filename=header_file_name)
|
0
objects/__init__.py
Normal file
0
objects/__init__.py
Normal file
76
objects/objects.py
Normal file
76
objects/objects.py
Normal file
@ -0,0 +1,76 @@
|
||||
import re
|
||||
|
||||
from modgen.parserbase.parser import FileParser
|
||||
from modgen.utility.sql_writer import SqlWriter
|
||||
|
||||
|
||||
class ObjectDefinitionParser(FileParser):
|
||||
|
||||
def __init__(self, file_list: list):
|
||||
super().__init__(file_list)
|
||||
|
||||
def _handle_file_parsing(self, file_name: str, *args, **kwargs):
|
||||
file = open(file_name, "r", encoding="utf-8")
|
||||
for line in file.readlines():
|
||||
match = re.search(r'([\w]*)[\s]*=[\s]*(0[xX][0-9a-fA-F]+)', line)
|
||||
if match:
|
||||
self.mib_table.update({match.group(2): [match.group(1)]})
|
||||
|
||||
def _handle_file_parsing_moving_window(self, file_name: str, current_line: int, moving_window_size: int,
|
||||
moving_window: list, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def _post_parsing_operation(self):
|
||||
pass
|
||||
|
||||
|
||||
def export_object_file(filename, object_list, file_separator: str = ","):
|
||||
file = open(filename, "w")
|
||||
for entry in object_list:
|
||||
file.write(str(entry[0]) + file_separator + entry[1][0] + '\n')
|
||||
file.close()
|
||||
|
||||
|
||||
def write_translation_file(filename: str, list_of_entries, date_string_full: str):
|
||||
outputfile = open(filename, "w")
|
||||
print('ObjectParser: Writing translation file ' + filename)
|
||||
definitions = ""
|
||||
function = "const char* translateObject(object_id_t object) {\n\tswitch( (object & 0xFFFFFFFF) ) {\n"
|
||||
for entry in list_of_entries:
|
||||
# first part of translate file
|
||||
definitions += f"const char *{entry[1][0]}_STRING = \"{entry[1][0]}\";\n"
|
||||
# second part of translate file. entry[i] contains 32 bit hexadecimal numbers
|
||||
function += f"\tcase {entry[0]}:\n\t\treturn {entry[1][0]}_STRING;\n"
|
||||
function += '\tdefault:\n\t\treturn "UNKNOWN_OBJECT";\n'
|
||||
outputfile.write(f"/**\n * @brief\tAuto-generated object translation file.\n"
|
||||
f" * @details\n"
|
||||
f" * Contains {len(list_of_entries)} translations.\n"
|
||||
f" * Generated on: {date_string_full}\n */\n")
|
||||
outputfile.write("#include \"translateObjects.h\"\n\n")
|
||||
outputfile.write(definitions + "\n" + function + "\t}\n\treturn 0;\n}\n")
|
||||
outputfile.close()
|
||||
|
||||
|
||||
def write_translation_header_file(filename: str = "translateObjects.h"):
|
||||
file = open(filename, "w")
|
||||
file.write(
|
||||
f"#ifndef FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_\n"
|
||||
f"#define FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_\n\n"
|
||||
f"#include <fsfw/objectmanager/SystemObjectIF.h>\n\n"
|
||||
f"const char* translateObject(object_id_t object);\n\n"
|
||||
f"#endif /* FSFWCONFIG_OBJECTS_TRANSLATEOBJECTS_H_ */\n"
|
||||
)
|
||||
|
||||
|
||||
def sql_object_exporter(
|
||||
object_table: list, db_filename: str, delete_cmd: str, create_cmd: str, insert_cmd: str
|
||||
):
|
||||
sql_writer = SqlWriter(db_filename=db_filename)
|
||||
sql_writer.delete(delete_cmd)
|
||||
sql_writer.open(create_cmd)
|
||||
for entry in object_table:
|
||||
sql_writer.write_entries(
|
||||
insert_cmd, (entry[0], entry[1][0])
|
||||
)
|
||||
sql_writer.commit()
|
||||
sql_writer.close()
|
0
parserbase/__init__.py
Normal file
0
parserbase/__init__.py
Normal file
71
parserbase/file_list_parser.py
Normal file
71
parserbase/file_list_parser.py
Normal file
@ -0,0 +1,71 @@
|
||||
"""
|
||||
@file file_list_parser.py
|
||||
@brief Generic File Parser class
|
||||
@details
|
||||
Used by parse header files. Implemented as class in case header parser becomes more complex
|
||||
@author R. Mueller
|
||||
@date 22.11.2019
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
from typing import Union
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
class FileListParser:
|
||||
"""
|
||||
Generic header parser which takes a directory name or directory name list
|
||||
and parses all included header files recursively.
|
||||
"""
|
||||
def __init__(self, directory_list_or_name: Union[str, list]):
|
||||
if isinstance(directory_list_or_name, str):
|
||||
self.directory_list = [directory_list_or_name]
|
||||
elif isinstance(directory_list_or_name, list):
|
||||
self.directory_list = directory_list_or_name
|
||||
else:
|
||||
print("Header Parser: Passed directory list is not a header name or list of "
|
||||
"header names")
|
||||
self.header_files = []
|
||||
|
||||
def parse_header_files(self, search_recursively: bool = False,
|
||||
printout_string: str = "Parsing header files: ",
|
||||
print_current_dir: bool = False):
|
||||
"""
|
||||
This function is called to get a list of header files
|
||||
:param search_recursively:
|
||||
:param printout_string:
|
||||
:param print_current_dir:
|
||||
:return:
|
||||
"""
|
||||
print(printout_string, end="")
|
||||
for directory in self.directory_list:
|
||||
self.__get_header_file_list(directory, search_recursively, print_current_dir)
|
||||
print(str(len(self.header_files)) + " header files were found.")
|
||||
# g.PP.pprint(self.header_files)
|
||||
return self.header_files
|
||||
|
||||
def __get_header_file_list(self, base_directory: str, seach_recursively: bool = False,
|
||||
print_current_dir: bool = False):
|
||||
if base_directory[-1] != '/':
|
||||
base_directory += '/'
|
||||
local_header_files = []
|
||||
if print_current_dir:
|
||||
print("Parsing header files in: " + base_directory)
|
||||
base_list = os.listdir(base_directory)
|
||||
# g.PP.pprint(base_list)
|
||||
for entry in base_list:
|
||||
header_file_match = re.match(r"[_.]*.*\.h", entry)
|
||||
if header_file_match:
|
||||
if os.path.isfile(base_directory + entry):
|
||||
match_string = header_file_match.group(0)
|
||||
if match_string[0] == '.' or match_string[0] == '_':
|
||||
pass
|
||||
else:
|
||||
local_header_files.append(base_directory + entry)
|
||||
if seach_recursively:
|
||||
next_path = base_directory + entry
|
||||
if os.path.isdir(next_path):
|
||||
self.__get_header_file_list(next_path, seach_recursively)
|
||||
# print("Files found in: " + base_directory)
|
||||
# g.PP.pprint(local_header_files)
|
||||
self.header_files.extend(local_header_files)
|
197
parserbase/parser.py
Normal file
197
parserbase/parser.py
Normal file
@ -0,0 +1,197 @@
|
||||
#! /usr/bin/python3
|
||||
"""
|
||||
@file mib_packet_content_parser.py
|
||||
@brief Generic File Parser class
|
||||
@details
|
||||
Used by the MIB Exporter. There are multiple functions which are abstract and should
|
||||
be implemented by a custom parser implementation.
|
||||
|
||||
A file list to parse must be supplied.
|
||||
Child classes fill out the MIB table (self.mib_table)
|
||||
@author R. Mueller
|
||||
@date 14.11.2019
|
||||
"""
|
||||
import enum
|
||||
from abc import abstractmethod
|
||||
from typing import Dict
|
||||
|
||||
|
||||
class FileParserModes(enum.Enum):
|
||||
REGULAR = enum.auto(),
|
||||
MOVING_WINDOW = enum.auto()
|
||||
|
||||
|
||||
class FileParser:
|
||||
"""
|
||||
This parent class gathers common file parser operations into a super class.
|
||||
The user should do the following to use this base class:
|
||||
1. Create a custom parser class which implements this class and implement the abstract
|
||||
functions
|
||||
2. Set the parser mode
|
||||
3. Call parse_files. Additional arguments and keyword arguments can be supplied as well and
|
||||
will be passed through to the abstract function implementations.
|
||||
"""
|
||||
def __init__(self, file_list):
|
||||
if len(file_list) == 0:
|
||||
print("File list is empty !")
|
||||
self.file_list_empty = True
|
||||
else:
|
||||
self.file_list_empty = False
|
||||
self.file_list = file_list
|
||||
# Can be used to have unique key in MIB tables
|
||||
self.index = 0
|
||||
# Initialize empty MIB table which will be filled by specific parser implementation
|
||||
self.mib_table = dict()
|
||||
self.__parser_mode = FileParserModes.REGULAR
|
||||
self.__parser_args = 0
|
||||
|
||||
self.__debug_moving_window = False
|
||||
self.__debug_moving_window_filename = ""
|
||||
|
||||
self._verbose_level = 1
|
||||
|
||||
def set_regular_parser_mode(self):
|
||||
"""
|
||||
Set regular parsing mode. This will be the default, so it is not strictly necessary to call
|
||||
this.
|
||||
:return:
|
||||
"""
|
||||
self.__parser_mode = FileParserModes.REGULAR
|
||||
|
||||
def set_moving_window_mode(self, moving_window_size: int):
|
||||
"""
|
||||
Set moving window parsing mode
|
||||
:param moving_window_size:
|
||||
:return:
|
||||
"""
|
||||
self.__parser_mode = FileParserModes.MOVING_WINDOW
|
||||
self.__parser_args = moving_window_size
|
||||
|
||||
def set_verbosity(self, verbose_level: int):
|
||||
self._verbose_level = verbose_level
|
||||
|
||||
def enable_moving_window_debugging(self, file_name: str):
|
||||
self.__debug_moving_window = True
|
||||
self.__debug_moving_window_filename = file_name
|
||||
|
||||
def parse_files(self, *args: any, **kwargs) -> Dict:
|
||||
"""
|
||||
Core method which is called to parse the files
|
||||
:param args: Optional positional arguments. Passed on the file parser
|
||||
:param kwargs: Optional keyword arguments. Passed on to file parser
|
||||
:return: Returns the mib table dictionary.
|
||||
"""
|
||||
if self.file_list_empty:
|
||||
print(f"Nothing to parse, supplied file list is empty!")
|
||||
return self.mib_table
|
||||
|
||||
if self.__parser_mode == FileParserModes.REGULAR:
|
||||
for file_name in self.file_list:
|
||||
# Implemented by child class ! Fill out info table (self.mib_table) in this routine
|
||||
self._handle_file_parsing(file_name, *args, **kwargs)
|
||||
# Can be implemented by child class to edit the table after it is finished.
|
||||
# default implementation is empty
|
||||
self._post_parsing_operation()
|
||||
elif self.__parser_mode == FileParserModes.MOVING_WINDOW:
|
||||
for file_name in self.file_list:
|
||||
self.__parse_file_with_moving_window(file_name, *args, **kwargs)
|
||||
self._post_parsing_operation()
|
||||
return self.mib_table
|
||||
|
||||
@abstractmethod
|
||||
def _handle_file_parsing(self, file_name: str, *args, **kwargs):
|
||||
"""
|
||||
Implemented by child class. The developer should fill the info table (self.mib_table)
|
||||
in this routine
|
||||
:param file_name:
|
||||
:param args: Additional arguments passed through the parse_files method.
|
||||
:param kwargs: Additional keyword arguments passed through the parse_files method.
|
||||
:return: Nothing. Fill out the member dictionary self.mib_table in the function instead.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _handle_file_parsing_moving_window(
|
||||
self, file_name: str, current_line: int, moving_window_size: int, moving_window: list,
|
||||
*args, **kwargs):
|
||||
"""
|
||||
This will be called for the MOVING_WINDOW parser mode.
|
||||
:param file_name: Current file name
|
||||
:param current_line: Current line number.
|
||||
:param moving_window_size: Size of the moving window
|
||||
:param moving_window: Current moving window. The last entry of the moving window
|
||||
is the current line number
|
||||
:return: Nothing. Fill out the member dictionary self.mib_table in the function instead.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _post_parsing_operation(self):
|
||||
"""
|
||||
# Can be implemented by child class to perform post parsing operations (e.g. setting a
|
||||
flag or editting MIB table entries)
|
||||
:return:
|
||||
"""
|
||||
|
||||
def __parse_file_with_moving_window(self, file_name: str, *args, **kwargs):
|
||||
all_lines = self._open_file(file_name=file_name)
|
||||
moving_window_size = self.__parser_args
|
||||
if moving_window_size == 0:
|
||||
print("Moving window size is 0!")
|
||||
return
|
||||
moving_window = [""] * moving_window_size
|
||||
for line_idx, line in enumerate(all_lines):
|
||||
if self.__debug_moving_window and self.__debug_moving_window_filename in file_name:
|
||||
print(f"Moving window pre line anaylsis line {line_idx}")
|
||||
print(moving_window)
|
||||
# The moving window will start with only the bottom being in the file
|
||||
if line_idx == 0:
|
||||
moving_window[self.__parser_args - 1] = line
|
||||
# More and more of the window is inside the file now
|
||||
elif line_idx < moving_window_size:
|
||||
for idx in range(line_idx, 0, -1):
|
||||
moving_window[moving_window_size - 1 - idx] = \
|
||||
moving_window[moving_window_size - idx]
|
||||
moving_window[moving_window_size - 1] = line
|
||||
# The full window is inside the file now.
|
||||
elif line_idx >= moving_window_size:
|
||||
for idx in range(moving_window_size - 1):
|
||||
moving_window[idx] = moving_window[idx + 1]
|
||||
moving_window[moving_window_size - 1] = line
|
||||
if self.__debug_moving_window and self.__debug_moving_window_filename in file_name:
|
||||
print(f"Moving window post line anaylsis line {line_idx}")
|
||||
print(moving_window)
|
||||
self._handle_file_parsing_moving_window(
|
||||
file_name, line_idx, moving_window_size, moving_window, *args, **kwargs
|
||||
)
|
||||
# Now the moving window moved past the end of the file. Sections which are outside
|
||||
# the file are assigned an empty string until the window has moved out of file completely
|
||||
for remaining_windows_idx in range(moving_window_size):
|
||||
if self.__debug_moving_window and self.__debug_moving_window_filename in file_name:
|
||||
print(f"Moving window pre line analysis post EOF")
|
||||
print(moving_window)
|
||||
num_entries_to_clear = remaining_windows_idx + 1
|
||||
for idx_to_clear in range(num_entries_to_clear):
|
||||
moving_window[moving_window_size - 1 - idx_to_clear] = ""
|
||||
for idx_to_reassign in range(moving_window_size - 1 - num_entries_to_clear):
|
||||
moving_window[idx_to_reassign] = moving_window[idx_to_reassign + 1]
|
||||
if self.__debug_moving_window and self.__debug_moving_window_filename in file_name:
|
||||
print(f"Moving window post line anaylsis post EOF")
|
||||
print(moving_window)
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _open_file(file_name: str) -> list:
|
||||
"""
|
||||
Open a file, attempting common encodings utf-8 and cp1252
|
||||
:param file_name:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
file = open(file_name, 'r', encoding='utf-8')
|
||||
all_lines = file.readlines()
|
||||
except UnicodeDecodeError:
|
||||
print("ReturnValueParser: Decoding error with file " + file_name)
|
||||
file = open(file_name, 'r', encoding='cp1252')
|
||||
all_lines = file.readlines()
|
||||
return all_lines
|
0
returnvalues/__init__.py
Normal file
0
returnvalues/__init__.py
Normal file
373
returnvalues/returnvalues_parser.py
Normal file
373
returnvalues/returnvalues_parser.py
Normal file
@ -0,0 +1,373 @@
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
from modgen.parserbase.parser import FileParser
|
||||
from modgen.utility.printer import PrettyPrinter
|
||||
|
||||
# Intermediate solution
|
||||
MAX_STRING_LEN = 80
|
||||
|
||||
PRINT_TRUNCATED_ENTRIES = False
|
||||
DEBUG_INTERFACE_ID_COMPARISON = False
|
||||
DEBUG_FOR_FILE_NAME = False
|
||||
|
||||
DEBUG_FILE_NAME = "RingBufferAnalyzer"
|
||||
CLASS_ID_NAMESPACE = "CLASS_ID"
|
||||
DEFAULT_MOVING_WINDOWS_SIZE = 7
|
||||
|
||||
|
||||
class InterfaceParser(FileParser):
|
||||
|
||||
def __init__(self, file_list: list, print_table: bool = False):
|
||||
super().__init__(file_list)
|
||||
self.print_table = print_table
|
||||
self.file_table_list = []
|
||||
self.file_name_table = []
|
||||
self.start_name_list = []
|
||||
self.end_name_list = []
|
||||
|
||||
def _handle_file_parsing_moving_window(self, file_name: str, current_line: int,
|
||||
moving_window_size: int, moving_window: list, *args,
|
||||
**kwargs):
|
||||
pass
|
||||
|
||||
def _handle_file_parsing(self, file_name: str, *args, **kwargs):
|
||||
self.file_name_table.append(file_name)
|
||||
try:
|
||||
file = open(file_name, 'r', encoding='utf-8')
|
||||
all_lines = file.readlines()
|
||||
except UnicodeDecodeError:
|
||||
file = open(file_name, 'r', encoding='cp1252')
|
||||
all_lines = file.readlines()
|
||||
self.__handle_regular_class_id_parsing(file_name=file_name, all_lines=all_lines)
|
||||
|
||||
def __handle_regular_class_id_parsing(self, file_name: str, all_lines: List[str]):
|
||||
count = 0
|
||||
current_file_table = dict()
|
||||
start_matched = False
|
||||
end_matched = False
|
||||
start_name = ""
|
||||
target_end_name = ""
|
||||
for line in all_lines:
|
||||
if not start_matched:
|
||||
match = re.search(r'[\s]*([\w]*) = [\s]*([\w]*)', line)
|
||||
if match:
|
||||
# current_file_table.update({count: [match.group(1), match.group(2)]})
|
||||
start_name = match.group(1)
|
||||
target_end_name = match.group(2)
|
||||
start_matched = True
|
||||
else:
|
||||
match = re.search(r'[\s]*([\w]*),?(?:[\s]*//)?([^\n]*)?', line)
|
||||
if match:
|
||||
count += 1
|
||||
if re.search(r"\[EXPORT\][\s]*:[\s]*\[END\]", match.group(2)):
|
||||
last_entry_name = match.group(1)
|
||||
end_matched = True
|
||||
self.end_name_list.append([last_entry_name, None])
|
||||
else:
|
||||
short_name = match.group(2)
|
||||
if short_name == "":
|
||||
short_name = match.group(1)[0:3]
|
||||
current_file_table.update({count: [match.group(1), short_name]})
|
||||
if not start_matched:
|
||||
print("No start match detected when parsing interface files..")
|
||||
print(f"Current file: {file_name} | Make sure to include a start definition")
|
||||
sys.exit(1)
|
||||
if not end_matched:
|
||||
print("No end match detected when parsing interface files. Make sure to use [EXPORT] : [END]")
|
||||
sys.exit(1)
|
||||
self.start_name_list.append([start_name, target_end_name, None, count])
|
||||
self.file_name_table.append(file_name)
|
||||
self.file_table_list.append(current_file_table)
|
||||
|
||||
def _post_parsing_operation(self):
|
||||
self.start_name_list, self.end_name_list = self.__assign_start_end_indexes(
|
||||
self.start_name_list, self.end_name_list
|
||||
)
|
||||
for idx, file_table in enumerate(self.file_table_list):
|
||||
self.__build_mod_interface_table(self.start_name_list[idx][2], file_table)
|
||||
if self.print_table:
|
||||
PrettyPrinter.pprint(self.mib_table)
|
||||
|
||||
@staticmethod
|
||||
def __assign_start_end_indexes(start_name_list_list, end_name_list_list) -> Tuple[List, List]:
|
||||
start_list_list_completed = start_name_list_list
|
||||
end_list_list_completed = end_name_list_list
|
||||
all_indexes_filled = False
|
||||
max_outer_iterations = 15
|
||||
current_iteration = 0
|
||||
while not all_indexes_filled:
|
||||
for idx, start_name_list in enumerate(start_list_list_completed):
|
||||
if start_name_list[1].isdigit():
|
||||
start_list_list_completed[idx][2] = int(start_name_list[1])
|
||||
end_list_list_completed[idx][1] = \
|
||||
start_list_list_completed[idx][2] + start_list_list_completed[idx][3]
|
||||
target_end_name = start_name_list[1]
|
||||
for end_name_list in end_list_list_completed:
|
||||
end_name = end_name_list[0]
|
||||
end_value = end_name_list[1]
|
||||
if end_name == target_end_name and end_value is not None:
|
||||
start_list_list_completed[idx][2] = end_value
|
||||
end_list_list_completed[idx][1] = end_value + start_list_list_completed[idx][3]
|
||||
all_indexes_filled = True
|
||||
for idx, start_name_list in enumerate(start_list_list_completed):
|
||||
if start_name_list[2] is None or end_name_list_list[idx][1] is None:
|
||||
all_indexes_filled = False
|
||||
current_iteration += 1
|
||||
if current_iteration >= max_outer_iterations:
|
||||
print("Could not fill out start and end index list in given number of maximum outer iterations!")
|
||||
sys.exit(1)
|
||||
return start_list_list_completed, end_list_list_completed
|
||||
|
||||
def __build_mod_interface_table(self, count_start: int, interface_dict: dict):
|
||||
dict_to_build = dict()
|
||||
for local_count, interface_name_and_shortname in interface_dict.items():
|
||||
dict_to_build.update(
|
||||
{interface_name_and_shortname[0]: [local_count + count_start, interface_name_and_shortname[1]]}
|
||||
)
|
||||
self.mib_table.update(dict_to_build)
|
||||
|
||||
|
||||
class ReturnValueParser(FileParser):
|
||||
"""
|
||||
Generic return value parser.
|
||||
"""
|
||||
def __init__(self, interfaces, file_list, print_tables):
|
||||
super().__init__(file_list)
|
||||
self.print_tables = print_tables
|
||||
self.interfaces = interfaces
|
||||
self.return_value_dict = dict()
|
||||
self.count = 0
|
||||
# Stores last three lines
|
||||
self.last_lines = ["", "", ""]
|
||||
self.moving_window_center_idx = 3
|
||||
self.current_interface_id_entries = {
|
||||
"Name": "",
|
||||
"ID": 0,
|
||||
"FullName": ""
|
||||
}
|
||||
self.return_value_dict.update({0: ('OK', 'System-wide code for ok.', 'RETURN_OK',
|
||||
'HasReturnvaluesIF.h', 'HasReturnvaluesIF')})
|
||||
self.return_value_dict.update({1: ('Failed', 'Unspecified system-wide code for failed.',
|
||||
'RETURN_FAILED', 'HasReturnvaluesIF.h',
|
||||
'HasReturnvaluesIF')})
|
||||
|
||||
def set_moving_window_mode(self, moving_window_size: int):
|
||||
"""
|
||||
Set moving window parsing mode
|
||||
:param moving_window_size:
|
||||
:return:
|
||||
"""
|
||||
super().set_moving_window_mode(DEFAULT_MOVING_WINDOWS_SIZE)
|
||||
|
||||
def _handle_file_parsing(self, file_name: str, *args, **kwargs):
|
||||
"""
|
||||
Former way to parse returnvalues. Not recommended anymore.
|
||||
:param file_name:
|
||||
:param args:
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
if len(args) > 0:
|
||||
print_truncated_entries = args[0]
|
||||
else:
|
||||
print_truncated_entries = False
|
||||
all_lines = self._open_file(file_name=file_name)
|
||||
for line in all_lines:
|
||||
self.__handle_line_reading(line, file_name, print_truncated_entries)
|
||||
|
||||
def _handle_file_parsing_moving_window(self, file_name: str, current_line: int,
|
||||
moving_window_size: int, moving_window: list, *args,
|
||||
**kwargs):
|
||||
interface_id_match = re.search(
|
||||
rf"{CLASS_ID_NAMESPACE}::([a-zA-Z_0-9]*)", moving_window[self.moving_window_center_idx]
|
||||
)
|
||||
|
||||
if interface_id_match:
|
||||
self.__handle_interfaceid_match(interface_id_match=interface_id_match, file_name=file_name)
|
||||
returnvalue_match = re.search(
|
||||
r"^[\s]*static const(?:expr)?[\s]*ReturnValue_t[\s]*([\w]*)[\s]*=[\s]*((?!;).*$)",
|
||||
moving_window[self.moving_window_center_idx], re.DOTALL
|
||||
)
|
||||
full_returnvalue_string = ""
|
||||
if returnvalue_match:
|
||||
if ";" in returnvalue_match.group(0):
|
||||
full_returnvalue_string = returnvalue_match.group(0)
|
||||
else:
|
||||
full_returnvalue_string = self.__build_multi_line_returnvalue_string(
|
||||
moving_window=moving_window, first_line=moving_window[self.moving_window_center_idx]
|
||||
)
|
||||
returnvalue_match = re.search(
|
||||
r"^[\s]*static const(?:expr)? ReturnValue_t[\s]*([\w] *)[\s]*=[\s]*.*::[\w]*\(([\w]*),[\s]*([\d]*)\)",
|
||||
full_returnvalue_string
|
||||
)
|
||||
if not returnvalue_match:
|
||||
returnvalue_match = re.search(
|
||||
r'^[\s]*static const(?:expr)? ReturnValue_t[\s]*([a-zA-Z_0-9]*)[\s]*=[\s]*'
|
||||
r'MAKE_RETURN_CODE[\s]*\([\s]*([\w]*)[\s]*\)',
|
||||
full_returnvalue_string
|
||||
)
|
||||
if returnvalue_match:
|
||||
description = self.__search_for_descrip_string(moving_window=moving_window)
|
||||
self.__handle_returnvalue_match(
|
||||
name_match=returnvalue_match.group(1), file_name=file_name, number_match=returnvalue_match.group(2),
|
||||
description=description
|
||||
)
|
||||
pass
|
||||
|
||||
def __build_multi_line_returnvalue_string(
|
||||
self, first_line: str, moving_window: List[str]
|
||||
) -> str:
|
||||
all_lines = first_line.rstrip()
|
||||
end_found = False
|
||||
current_idx = self.moving_window_center_idx
|
||||
while not end_found and current_idx < len(moving_window) - 1:
|
||||
current_idx += 1
|
||||
string_to_add = moving_window[current_idx].lstrip()
|
||||
if ";" in moving_window[current_idx]:
|
||||
all_lines += string_to_add
|
||||
break
|
||||
else:
|
||||
string_to_add.rstrip()
|
||||
all_lines += string_to_add
|
||||
return all_lines
|
||||
|
||||
def __search_for_descrip_string(self, moving_window: List[str]) -> str:
|
||||
current_idx = self.moving_window_center_idx - 1
|
||||
# Look at the line above first
|
||||
descrip_match = re.search(
|
||||
r"\[EXPORT\][\s]*:[\s]*\[COMMENT\]", moving_window[current_idx]
|
||||
)
|
||||
if not descrip_match:
|
||||
while current_idx > 0:
|
||||
current_idx -= 1
|
||||
if re.search(r"^[\s]*static const(?:expr)? ReturnValue_t", moving_window[current_idx]):
|
||||
break
|
||||
descrip_match = re.search(
|
||||
r"\[EXPORT\][\s]*:[\s]*\[COMMENT\]", moving_window[current_idx]
|
||||
)
|
||||
if descrip_match:
|
||||
break
|
||||
if descrip_match:
|
||||
current_build_idx = current_idx
|
||||
descrip_string = ""
|
||||
while current_build_idx < self.moving_window_center_idx:
|
||||
string_to_add = moving_window[current_build_idx].lstrip()
|
||||
string_to_add = string_to_add.lstrip("//!<>")
|
||||
string_to_add = string_to_add.rstrip()
|
||||
descrip_string += string_to_add
|
||||
current_build_idx += 1
|
||||
else:
|
||||
return ""
|
||||
resulting_description = re.search(
|
||||
r"\[EXPORT\][\s]*:[\s]*\[COMMENT\](.*)", descrip_string
|
||||
)
|
||||
return resulting_description.group(1)
|
||||
|
||||
def __handle_line_reading(self, line, file_name, print_truncated_entries: bool):
|
||||
newline = line
|
||||
if self.last_lines[0] != '\n':
|
||||
two_lines = self.last_lines[0] + ' ' + newline.strip()
|
||||
else:
|
||||
two_lines = ''
|
||||
interface_id_match = re.search(r'INTERFACE_ID[\s]*=[\s]*CLASS_ID::([a-zA-Z_0-9]*)',
|
||||
two_lines)
|
||||
if interface_id_match:
|
||||
self.__handle_interfaceid_match(interface_id_match, file_name=file_name)
|
||||
|
||||
returnvalue_match = re.search(
|
||||
r'^[\s]*static const(?:expr)? ReturnValue_t[\s]*([a-zA-Z_0-9]*)[\s]*=[\s]*'
|
||||
r'MAKE_RETURN_CODE[\s]*\([\s]*([x0-9a-fA-F]{1,4})[\s]*\);[\t ]*(//)?([^\n]*)',
|
||||
two_lines
|
||||
)
|
||||
if returnvalue_match:
|
||||
self.__handle_returnvalue_match(returnvalue_match, file_name, print_truncated_entries)
|
||||
self.last_lines[1] = self.last_lines[0]
|
||||
self.last_lines[0] = newline
|
||||
|
||||
def __handle_interfaceid_match(self, interface_id_match, file_name: str):
|
||||
if DEBUG_INTERFACE_ID_COMPARISON:
|
||||
print(f"Interface ID {interface_id_match.group(1)} found in {file_name}")
|
||||
self.current_interface_id_entries["ID"] = \
|
||||
self.interfaces[interface_id_match.group(1)][0]
|
||||
self.current_interface_id_entries["Name"] = \
|
||||
self.interfaces[interface_id_match.group(1)][1]
|
||||
self.current_interface_id_entries["FullName"] = interface_id_match.group(1)
|
||||
if DEBUG_INTERFACE_ID_COMPARISON:
|
||||
current_id = self.current_interface_id_entries["ID"]
|
||||
print(f"Current ID: {current_id}")
|
||||
|
||||
def __handle_returnvalue_match(
|
||||
self, name_match: str, number_match: str, file_name: str, description: str
|
||||
):
|
||||
string_to_add = self.build_checked_string(
|
||||
self.current_interface_id_entries["Name"], name_match, MAX_STRING_LEN,
|
||||
PRINT_TRUNCATED_ENTRIES
|
||||
)
|
||||
full_id = (self.current_interface_id_entries["ID"] << 8) + return_number_from_string(number_match)
|
||||
if full_id in self.return_value_dict:
|
||||
# print('Duplicate returncode ' + hex(full_id) + ' from ' + file_name +
|
||||
# ' was already in ' + self.return_value_dict[full_id][3])
|
||||
pass
|
||||
dict_tuple = (
|
||||
string_to_add, description, number_match, file_name, self.current_interface_id_entries["FullName"]
|
||||
)
|
||||
self.return_value_dict.update({
|
||||
full_id: dict_tuple
|
||||
})
|
||||
self.count = self.count + 1
|
||||
|
||||
def _post_parsing_operation(self):
|
||||
if self.print_tables:
|
||||
PrettyPrinter.pprint(self.return_value_dict)
|
||||
self.mib_table = self.return_value_dict
|
||||
|
||||
@staticmethod
|
||||
def export_to_file(filename: str, list_of_entries: dict, file_separator: str):
|
||||
file = open(filename, "w")
|
||||
for entry in list_of_entries.items():
|
||||
file.write(hex(entry[0]) + file_separator + entry[1][0] + file_separator + entry[1][1] +
|
||||
file_separator + entry[1][2] + file_separator
|
||||
+ entry[1][3] + file_separator + entry[1][4] + '\n')
|
||||
file.close()
|
||||
|
||||
def build_checked_string(self, first_part, second_part, max_string_len: int,
|
||||
print_truncated_entries: bool):
|
||||
""" Build a checked string """
|
||||
my_str = first_part + '_' + self.convert(second_part)
|
||||
if len(my_str) > max_string_len:
|
||||
if print_truncated_entries:
|
||||
print("Warning: Entry " + my_str + " too long. Will truncate.")
|
||||
my_str = my_str[0:max_string_len]
|
||||
else:
|
||||
# print("Entry: " + myStr + " is all right.")
|
||||
pass
|
||||
return my_str
|
||||
|
||||
@staticmethod
|
||||
def convert(name):
|
||||
single_strings = name.split('_')
|
||||
new_string = ''
|
||||
for one_string in single_strings:
|
||||
one_string = one_string.lower()
|
||||
one_string = one_string.capitalize()
|
||||
new_string = new_string + one_string
|
||||
return new_string
|
||||
|
||||
@staticmethod
|
||||
def clean_up_description(descr_string):
|
||||
description = descr_string.lstrip('!<- ')
|
||||
if description == '':
|
||||
description = ' '
|
||||
return description
|
||||
|
||||
|
||||
def return_number_from_string(a_string):
|
||||
if a_string.startswith('0x'):
|
||||
return int(a_string, 16)
|
||||
if a_string.isdigit():
|
||||
return int(a_string)
|
||||
print('Error: Illegal number representation: ' + a_string)
|
||||
return 0
|
||||
|
0
utility/__init__.py
Normal file
0
utility/__init__.py
Normal file
61
utility/csv_writer.py
Normal file
61
utility/csv_writer.py
Normal file
@ -0,0 +1,61 @@
|
||||
#! /usr/bin/python3
|
||||
"""
|
||||
@file
|
||||
mib_packet_content_parser.py
|
||||
@brief
|
||||
CSV Writer
|
||||
@details
|
||||
This class writes tables to a csv.
|
||||
@author
|
||||
R. Mueller
|
||||
@date
|
||||
14.11.2019
|
||||
"""
|
||||
from modgen.utility.file_management import copy_file, move_file
|
||||
|
||||
|
||||
# TODO: Export to SQL
|
||||
class CsvWriter:
|
||||
def __init__(
|
||||
self, filename: str, table_to_print=None, header_array=None, file_separator: str = ","
|
||||
):
|
||||
if header_array is None:
|
||||
header_array = []
|
||||
if table_to_print is None:
|
||||
table_to_print = dict()
|
||||
self.filename = filename
|
||||
self.table_to_print = table_to_print
|
||||
self.header_array = header_array
|
||||
if self.header_array != 0:
|
||||
self.column_numbers = len(self.header_array)
|
||||
self.file_separator = file_separator
|
||||
|
||||
def write_to_csv(self):
|
||||
file = open(self.filename, "w")
|
||||
file.write("Index" + self.file_separator)
|
||||
for index in range(self.column_numbers):
|
||||
# noinspection PyTypeChecker
|
||||
if index < len(self.header_array)-1:
|
||||
file.write(self.header_array[index] + self.file_separator)
|
||||
else:
|
||||
file.write(self.header_array[index] + "\n")
|
||||
for index, entry in self.table_to_print.items():
|
||||
file.write(str(index) + self.file_separator)
|
||||
for columnIndex in range(self.column_numbers):
|
||||
# noinspection PyTypeChecker
|
||||
if columnIndex < len(self.header_array) - 1:
|
||||
file.write(str(entry[columnIndex]) + self.file_separator)
|
||||
else:
|
||||
file.write(str(entry[columnIndex]) + "\n")
|
||||
file.close()
|
||||
|
||||
def copy_csv(self, copy_destination: str = "."):
|
||||
copy_file(self.filename, copy_destination)
|
||||
print("CSV file was copied to " + copy_destination)
|
||||
|
||||
def move_csv(self, move_destination: str):
|
||||
move_file(self.filename, move_destination)
|
||||
if move_destination == ".." or move_destination == "../":
|
||||
print("CSV Writer: CSV file was moved to parser root directory")
|
||||
else:
|
||||
print(f"CSV Writer: CSV file was moved to {move_destination}")
|
25
utility/file_management.py
Normal file
25
utility/file_management.py
Normal file
@ -0,0 +1,25 @@
|
||||
#! /usr/bin/python3.8
|
||||
# -*- coding: utf-8 -*-
|
||||
import shutil
|
||||
import os
|
||||
|
||||
|
||||
def copy_file(filename: str, destination: str = "", delete_existing_file: bool = False):
|
||||
if os.path.exists(filename):
|
||||
try:
|
||||
shutil.copy2(filename, destination)
|
||||
except FileNotFoundError as error:
|
||||
print("copy_file: File not found!")
|
||||
print(error)
|
||||
except shutil.SameFileError:
|
||||
print("copy_file: Source and destination are the same!")
|
||||
|
||||
|
||||
def move_file(file_name: str, destination: str = ""):
|
||||
if os.path.exists(file_name):
|
||||
try:
|
||||
shutil.copy2(file_name, destination)
|
||||
os.remove(file_name)
|
||||
except FileNotFoundError as error:
|
||||
print("File not found!")
|
||||
print(error)
|
16
utility/printer.py
Normal file
16
utility/printer.py
Normal file
@ -0,0 +1,16 @@
|
||||
import pprint
|
||||
|
||||
PrettyPrinter = pprint.PrettyPrinter(indent=0, width=250)
|
||||
|
||||
|
||||
class Printer:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def print_content(dictionary, leading_string: str = ""):
|
||||
if leading_string != "":
|
||||
print(leading_string)
|
||||
PrettyPrinter.pprint(dictionary)
|
||||
print("\r\n", end="")
|
||||
|
36
utility/sql_writer.py
Normal file
36
utility/sql_writer.py
Normal file
@ -0,0 +1,36 @@
|
||||
import sqlite3
|
||||
|
||||
|
||||
class SqlWriter:
|
||||
def __init__(self, db_filename: str):
|
||||
self.filename = db_filename
|
||||
self.conn = sqlite3.connect(self.filename)
|
||||
|
||||
def open(self, sql_creation_command: str):
|
||||
print("SQL Writer: Opening " + self.filename)
|
||||
self.conn.execute(sql_creation_command)
|
||||
|
||||
def delete(self, sql_deletion_command):
|
||||
print("SQL Writer: Deleting SQL table")
|
||||
self.conn.execute(sql_deletion_command)
|
||||
|
||||
def write_entries(self, sql_insertion_command, current_entry):
|
||||
cur = self.conn.cursor()
|
||||
cur.execute(sql_insertion_command, current_entry)
|
||||
return cur.lastrowid
|
||||
|
||||
def commit(self):
|
||||
print("SQL Writer: Commiting SQL table")
|
||||
self.conn.commit()
|
||||
|
||||
def close(self):
|
||||
self.conn.close()
|
||||
|
||||
def sql_writing_helper(self, creation_cmd, insertion_cmd, mib_table: dict, deletion_cmd: str=""):
|
||||
if deletion_cmd != "":
|
||||
self.delete(deletion_cmd)
|
||||
self.open(creation_cmd)
|
||||
for i in mib_table:
|
||||
self.write_entries(insertion_cmd, mib_table[i])
|
||||
self.commit()
|
||||
self.close()
|
Loading…
Reference in New Issue
Block a user