301 Commits

Author SHA1 Message Date
6a4417c954 added and scheduled MGM assembly 2025-05-19 16:18:31 +02:00
f600ee499a Merge pull request 'add first generator' (#234) from generators into main
Reviewed-on: #234
2025-05-19 15:23:37 +02:00
3f28f60c59 add first generator 2025-05-19 15:21:51 +02:00
44b1f2b037 Merge pull request 'possible fix for clippy warning' (#233) from possible-fix-for-clippy-warning into main
Reviewed-on: #233
2025-05-19 15:15:24 +02:00
4b22958b34 possible fix for clippy warning 2025-05-19 15:14:59 +02:00
a711c6acd9 Merge pull request 'bump spacepackets' (#232) from bump-spacepackets into main
Reviewed-on: #232
2025-05-19 14:19:00 +02:00
99a954a1f5 some tweaks 2025-05-19 14:18:26 +02:00
4a4fd7ac2c use git deps 2025-05-16 19:49:43 +02:00
9bf08849a2 annoying 2025-05-16 19:43:45 +02:00
b8f7fefe26 Merge pull request 'should fix tests' (#231) from test-fix into main
Reviewed-on: #231
2025-05-10 16:31:14 +02:00
a501832698 should fix tests 2025-05-10 16:30:37 +02:00
c7284d3f1c Merge pull request 'minor wording improvements' (#230) from some-wording-improvements into main
Reviewed-on: #230
2025-05-10 16:27:37 +02:00
18263d4568 Merge branch 'main' into some-wording-improvements 2025-05-10 16:27:29 +02:00
95519c1363 minor wording improvements 2025-05-10 16:27:04 +02:00
2ec32717d0 Merge pull request 'use released dependencies' (#229) from use-relesed-dependencies into main
Reviewed-on: #229
2025-05-10 16:24:37 +02:00
bfdd777685 use released dependencies 2025-05-10 16:24:06 +02:00
b54c2b7863 Merge pull request 'spacepackets update' (#227) from spacepackets-update into main
Reviewed-on: #227
2025-05-10 16:19:07 +02:00
19f3355283 Merge pull request 'update SIM suite name' (#228) from update-sim-suite-name into main
Reviewed-on: #228
2025-05-10 16:18:16 +02:00
4aeb28d2f1 update SIM suite name 2025-05-10 16:14:48 +02:00
ddc4544456 spacepackets update, clippy fixes 2025-05-10 16:13:45 +02:00
9ab36c0362 Merge pull request 'update docs' (#226) from update-docs into main
Reviewed-on: #226
2025-05-06 16:21:17 +02:00
b95769c177 Merge branch 'main' into update-docs 2025-05-06 16:21:10 +02:00
bb20533ae1 Merge pull request 'remove token API for verification creator core' (#224) from simplify-verification-reporter-core into main
Reviewed-on: #224
2025-05-05 19:03:34 +02:00
27cacd0f43 remove token API for verification creator core 2025-05-05 19:02:37 +02:00
bd6488e87b update docs 2025-04-01 20:58:18 +02:00
52ec0d44aa Merge pull request 'bump README links' (#223) from update-readme-links into main
Reviewed-on: #223
2025-04-01 15:15:14 +02:00
4fa9f8d685 bump README links 2025-04-01 15:05:28 +02:00
767cf6b1a5 Merge pull request 'prepare alpha release' (#222) from prep-satrs-v0.3.0-alpha.0 into main
Reviewed-on: #222
2025-02-18 16:52:15 +01:00
5bf5518591 prepare alpha release 2025-02-18 16:49:33 +01:00
f3e0609910 Merge pull request 'Mode Sequences and Tables' (#211) from mode-tables-sequence-tables into main
Reviewed-on: #211
2025-02-13 12:44:28 +01:00
05106354e3 Mode Tree Feature Update 2025-02-13 12:18:05 +01:00
719b70d834 Merge pull request 'use static cell instead of custom struct' (#220) from use-static-cell into main
Reviewed-on: #220
2025-02-07 16:21:23 +01:00
d4082fa098 use static cell instead of custom struct 2025-02-07 16:15:23 +01:00
c9c9e4f735 Merge pull request 'update STM32H7 project' (#221) from update-stm32h7-project into main
Reviewed-on: #221
2025-02-07 16:15:08 +01:00
2c92a95184 update STM32H7 project 2025-02-07 15:43:03 +01:00
1a1d330814 Merge pull request 'update some outdated dependencies' (#219) from update-outdated-deps into main
Reviewed-on: #219
2025-01-31 18:37:49 +01:00
ef7b5d66fe update some outdated dependencies 2025-01-31 18:37:12 +01:00
b3497e0592 Merge pull request 'update embedded examples' (#218) from update-embedded-examples into main
Reviewed-on: #218
2025-01-31 18:32:41 +01:00
73286e36c2 update embedded examples 2025-01-31 18:20:06 +01:00
ed266a11f6 Merge pull request 'Avoid static muts for static pool buffer declaration' (#215) from avoid-static-muts-static-pools into main
Reviewed-on: #215
2025-01-31 12:12:57 +01:00
af972e174f macro fix 2025-01-31 12:04:40 +01:00
3d12083c16 use full path to helper type 2025-01-31 11:56:38 +01:00
e8d2c020fa Merge pull request 'use released nexosim version' (#217) from use-released-nexosim into main
Reviewed-on: #217
2025-01-31 11:33:56 +01:00
69e172b633 avoid static muts for static pools 2025-01-31 11:27:09 +01:00
e1dda751bc use released nexosim version 2025-01-31 11:24:56 +01:00
b01628d8ef Merge pull request 'fix minisim tests' (#216) from update-minisim into main
Reviewed-on: #216
2025-01-31 11:18:03 +01:00
31844e4fe2 fix tests 2025-01-31 11:17:39 +01:00
738872f421 Merge pull request 'update mini simulator' (#214) from update-minisim into main
Reviewed-on: #214
2025-01-30 18:57:27 +01:00
309e39999f small tweak 2025-01-30 18:56:44 +01:00
1c43c3adf9 update mini simulator 2025-01-30 18:55:47 +01:00
d9e0abffa7 Merge pull request 'clippy and test fix' (#213) from clippy-fixes into main
Reviewed-on: #213
2025-01-23 17:22:46 +01:00
abec9dd448 clippy and test fix 2025-01-23 17:19:26 +01:00
c18fbb59ad Merge pull request 're-run formatter' (#212) from run-afmt into main
Reviewed-on: #212
2025-01-23 17:04:58 +01:00
c91ddcd658 re-run formatter 2025-01-23 17:02:16 +01:00
c5fa1955d7 Merge pull request 'Scheduling Table for std runtimes' (#210) from scheduling-table into main
Reviewed-on: #210
2024-11-20 17:22:08 +01:00
c9708810e6 changelog 2024-11-20 17:21:54 +01:00
79d0c2e222 Scheduling Table for std runtimes 2024-11-20 17:20:42 +01:00
8e87875c0e Merge pull request 'use released satrs-shared' (#209) from use-released-satrs-shared into main
Reviewed-on: #209
2024-11-15 12:15:06 +01:00
1ac6c02c06 use released satrs-shared 2024-11-15 12:14:33 +01:00
6e7907522e Merge pull request 'changelog satrs-shared' (#208) from satrs-shared-changelog into main
Reviewed-on: #208
2024-11-15 12:08:27 +01:00
f747a5efdc changelog satrs-shared 2024-11-15 12:07:32 +01:00
6ffd55cec2 Merge pull request 'bump satrs-shared' (#207) from satrs-shared-v0.2.1 into main
Reviewed-on: #207
2024-11-15 11:59:00 +01:00
5e51b3de42 bump satrs-shared 2024-11-15 11:52:48 +01:00
bd059a2541 Merge pull request 'Bump dependecies' (#206) from bump-dependencies into main
Reviewed-on: #206
2024-11-15 11:49:19 +01:00
1a4d764f25 bump dependencies 2024-11-15 11:37:14 +01:00
b38c617fae Merge pull request 'clippy fixes' (#205) from small-clippy-fix into main
Reviewed-on: #205
2024-11-04 13:55:27 +01:00
edcd5491f1 clippy fixes 2024-11-04 13:51:36 +01:00
b4cb034b73 Merge pull request 'Extract CFDP' (#202) from extract-cfdp into main
Reviewed-on: #202
2024-11-04 12:19:32 +01:00
47c86aea5c Extracted CFDP components 2024-11-04 12:18:41 +01:00
c8bba48e76 Merge pull request 'bump spacepackets' (#204) from satrs-shared-spacepackets-bump into main
Reviewed-on: #204
2024-11-04 12:07:12 +01:00
176da4838a bump spacepackets 2024-11-04 12:06:10 +01:00
8114195bc6 Merge pull request 'satrs-shared v0.2.0' (#203) from bump-satrs-shared into main
Reviewed-on: #203
2024-11-04 11:51:20 +01:00
1726e34fa7 satrs-shared v0.2.0 2024-11-04 11:50:17 +01:00
1f2d6c9474 Merge pull request 'another small documentation fix' (#199) from another-small-doc-fix into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #199
2024-06-03 16:09:06 +02:00
9960930339 another small documentation fix
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-06-03 16:08:38 +02:00
cb270964a1 Merge pull request 'small README fix' (#198) from small-readme-fix into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #198
2024-06-03 16:02:33 +02:00
a45e634214 small README fix
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-06-03 15:52:57 +02:00
9e4132706c Merge pull request 'update README and book' (#197) from readme-and-book-updates into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #197
2024-06-03 15:32:24 +02:00
36d889a504 update README and book
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-06-03 15:31:43 +02:00
97a6510af7 Merge pull request 'Integrate Simulator into Example' (#185) from sim-mgm-update into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #185
2024-06-03 15:25:08 +02:00
2e5d6a5c41 update example changelog
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-06-03 15:22:27 +02:00
29167736db Integration of the mini simulator into the sat-rs example
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-06-03 15:18:23 +02:00
a4c433a7be Merge pull request 'fix sat-rs build by using released asynchronix build' (#196) from fix-minisim-deps into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #196
2024-06-02 00:10:19 +02:00
472a8ce0f9 fix sat-rs build by using released asynchronix build
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-06-02 00:09:08 +02:00
e753319dac Merge pull request 'Airbus BA ins Readme' (#195) from lkoester-patch-1 into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #195
Reviewed-by: Robin Müller <muellerr@irs.uni-stuttgart.de>
2024-05-29 10:11:42 +02:00
99dddf36f3 Airbus BA ins Readme
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
Passt das in Flight Heritage? Ist ja nie geflogen aber heritage passt schon
2024-05-29 09:50:55 +02:00
a819feeaa2 Merge pull request 'Update embedded examples' (#194) from update-embedded-examples into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #194
2024-05-25 13:07:29 +02:00
46ce3fc772 update folder name
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-05-25 12:35:26 +02:00
8d27bdf3bf Update embedded examples
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-05-25 12:32:46 +02:00
3d2a46f044 Merge pull request 'flight heritage segment in docs' (#193) from add-flight-heritage-docs into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #193
2024-05-22 19:13:18 +02:00
1f192af262 improve phrasing and links again 2024-05-22 19:11:43 +02:00
3f78c200ad improve phrasing
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build queued...
2024-05-22 19:09:43 +02:00
d73dfcdd67 flight heritage segment in docs
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-05-22 19:05:05 +02:00
5cae0f7036 Merge pull request 'Heapless memory pool' (#191) from heapless-mem-pool into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #191
2024-05-22 13:16:29 +02:00
832250d211 minor improvements
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-05-22 12:55:50 +02:00
3c3b4349e8 that should do the job
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-05-21 19:48:56 +02:00
acf73e93b1 Introduce heapless memory pools
Some checks failed
Rust/sat-rs/pipeline/pr-main There was a failure building this commit
2024-05-21 18:31:19 +02:00
0b2d4f6187 Merge pull request 'satrs v0.2.1' (#190) from prep-satrs-v0.2.1 into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #190
2024-05-19 13:19:23 +02:00
f7016b940a changelog
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
Rust/sat-rs/pipeline/pr-main There was a failure building this commit
2024-05-19 08:14:31 +02:00
397ecd0c40 prep patch release 2024-05-19 08:13:32 +02:00
422f2c11ab Merge pull request 'removed unsafe block which is not necessary' (#189) from remove-unnecessary-unsafe-block into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #189
2024-05-18 12:46:58 +02:00
37e945fd91 Merge branch 'main' into remove-unnecessary-unsafe-block 2024-05-18 12:46:44 +02:00
45379858f0 Merge pull request 'TCP server config default improvements' (#187) from tcp-server-cfg-improvements into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #187
2024-05-18 12:44:53 +02:00
7c194ab543 Merge branch 'main' into tcp-server-cfg-improvements
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build queued...
2024-05-18 12:44:42 +02:00
bca1d7292a removed unsafe block which is not necessary
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-05-13 17:01:26 +02:00
cdcb9cae1c Merge pull request 'cross ref docs for events' (#188) from cross-ref-docs-for-events into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #188
2024-05-13 15:57:00 +02:00
9dcbd42862 cross ref docs
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-05-13 15:36:09 +02:00
da05efc16d TCP server config default improvements
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-05-13 15:31:51 +02:00
e38e25a998 Merge pull request 'update the satrs example graph' (#186) from satrs-example-graph-update into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #186
2024-05-13 15:29:39 +02:00
14b381cf4a update the satrs example graph
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-05-10 17:03:46 +02:00
3746e9ebb0 Merge pull request 'Add timestamp to SimRequest' (#140) from add-timestamp-to-sim-request into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #140
2024-05-08 14:58:13 +02:00
d2fc783562 Merge remote-tracking branch 'origin/main' into add-timestamp-to-sim-request
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build queued...
2024-05-08 14:57:12 +02:00
282f799203 Merge pull request 'prep v0.2.0' (#184) from prep_v0.2.0 into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #184
2024-05-02 14:57:14 +02:00
46dbb4309b new clippy check
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-05-02 14:44:22 +02:00
42d1257e83 prepare next release v0.2.0
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-05-02 14:39:30 +02:00
583f6ce4d2 Merge pull request 'small robustness fix' (#183) from robustness-fix into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #183
2024-05-02 13:41:55 +02:00
408803fe86 small robustness fix
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-05-02 13:41:27 +02:00
9ffe4d0ae0 Merge pull request 'smaller improvements' (#182) from smaller-improvements into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #182
2024-05-02 12:28:39 +02:00
e37061dcf0 smaller improvements 2024-05-02 12:28:09 +02:00
3a2ac11407 Merge pull request 'bounded the PUS stack hot loop' (#181) from pus-hot-loop-bounding into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #181
2024-05-02 12:02:02 +02:00
23327a7786 bounded the PUS stack hot loop
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-05-02 12:01:24 +02:00
89d5a1022f Merge pull request 'optimize PUS stack code' (#180) from optimize-pus-stack-code into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #180
2024-05-02 11:59:26 +02:00
a00c843698 optimize PUS stack code
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-05-02 11:58:46 +02:00
c586fd7fef Merge pull request 'try unifying some direct PUS handler code' (#179) from unify-some-example-code into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #179
2024-05-02 11:29:11 +02:00
7e78e70a17 try unifying some direct PUS handler code
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-05-02 11:14:05 +02:00
424dfc439c Merge pull request 'simplified PUS stack' (#178) from simplify-pus-stack into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #178
2024-05-02 10:01:16 +02:00
45eb2f1343 cargo fmt
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-05-01 21:16:26 +02:00
736eb74e66 simplified PUS stack
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-05-01 21:13:08 +02:00
29f71c2a57 Merge pull request 'Reworked generic parameter handling for PUS service 1 and 5' (#175) from rework-generic-params-for-pus into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #175
2024-04-30 15:42:05 +02:00
f0d08b65a4 Merge branch 'main' into rework-generic-params-for-pus
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-30 13:35:08 +02:00
c7a74a844c Merge pull request 'renamed thread name' (#176) from small-tweak into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #176
2024-04-30 13:31:46 +02:00
9c60427f89 Reworked generic parameter handling for PUS service 1 and 5
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-30 13:29:55 +02:00
958ab9bab6 renamed thread name
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-25 11:11:31 +02:00
312849bddb Merge pull request 'More improvements for Event API' (#173) from improve-event-api into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #173
2024-04-24 19:34:33 +02:00
b0159a3ba7 prep next release candidate
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-24 19:18:45 +02:00
c477739f6d more improvements for API, tests for example event module
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-24 18:50:08 +02:00
b7ce039406 add optional defmt support for events
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-24 18:36:00 +02:00
4736d40997 Merge pull request 'simplified event management' (#172) from simplify-event-management into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #172
2024-04-24 15:58:00 +02:00
5ec5124ea3 Updated events modules and docs
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-24 14:30:45 +02:00
5e43259d4f Merge branch 'main' into add-timestamp-to-sim-request
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-23 16:36:30 +02:00
bfaddd0ebb Merge pull request 'prep next release' (#171) from pre-v0.2.0-rc.4 into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #171
2024-04-23 16:32:03 +02:00
423a068736 prep next release
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-23 14:55:19 +02:00
8022af1bf2 Merge pull request 'update Python client for example' (#170) from update-example-pyclient into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #170
2024-04-23 14:52:04 +02:00
acd2260dfd update Python client for example
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-23 14:22:50 +02:00
e5ee698dc4 Merge pull request 'TCP server improvements' (#169) from tcp-ip-improvements into main
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
Reviewed-on: #169
2024-04-23 13:21:41 +02:00
e8907c74d4 changelog
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-23 11:23:00 +02:00
536051e05b improvements and fixes
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-22 20:29:14 +02:00
701db659e9 Merge pull request 'formatting' (#168) from fmt into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #168
2024-04-22 15:47:58 +02:00
4b8e54b91b formatting
Some checks are pending
Rust/sat-rs/pipeline/head This commit looks good
Rust/sat-rs/pipeline/pr-main Build started...
2024-04-22 10:42:49 +02:00
870d60cfd6 Merge pull request 'bugfix and improvements for CCSDS SP decoder' (#167) from ccsds-decoder-bugfix into main
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
Reviewed-on: #167
2024-04-22 10:23:12 +02:00
9e62e4292c bugfix and improvements for CCSDS SP decoder
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-04-20 11:19:46 +02:00
b2e77fbc09 Merge pull request 'requires another hotfix' (#166) from and-another-docs-rs-hotfix into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #166
2024-04-17 20:42:09 +02:00
5371928496 docs_rs build argument hotfix
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build queued...
2024-04-17 20:41:30 +02:00
31cddbd99b Merge pull request 'bump msrv version' (#165) from bump-msrv into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #165
2024-04-17 18:56:21 +02:00
7c00e13e70 bump msrv version
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-17 18:10:32 +02:00
aa72063454 Merge pull request 'prepare next release candidate' (#164) from prep-v0.2.0-rc.2 into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #164
2024-04-17 18:03:28 +02:00
7b37b76695 prepare next release candidate
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-17 17:19:38 +02:00
ea5d95c12d Merge pull request 'why is this an issue for docs-rs?' (#163) from fix-for-docs-build-docs-rs into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #163
2024-04-17 17:09:36 +02:00
c62adbb300 Merge branch 'main' into fix-for-docs-build-docs-rs
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-04-17 16:41:45 +02:00
9242b8a607 Merge pull request 'prepare MIB release' (#162) from prepare-mib-release into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #162
2024-04-17 16:37:34 +02:00
4a27d2605d why is this an issue for docs-rs?
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-17 16:34:56 +02:00
8195245481 prepare MIB release
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
Rust/sat-rs/pipeline/pr-main Build queued...
2024-04-17 16:17:30 +02:00
f6f7519625 Merge pull request 'small cleanup' (#161) from small-cargo-toml-cleaning into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #161
2024-04-17 16:03:03 +02:00
0f0fbc1a18 small cleanup
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-17 15:17:46 +02:00
6e55e2ac95 Merge pull request 'Prepare next releases' (#160) from prep-next-satrs-releases into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #160
2024-04-17 14:58:01 +02:00
2f96bfe992 changelog sat-rs
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-17 10:03:17 +02:00
52aafb3aab prep next releases
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-17 10:01:46 +02:00
6ce9cb5ead Merge pull request 'use released satrs-shared' (#159) from use-released-satrs-shared into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #159
2024-04-16 21:31:21 +02:00
273f79d1e6 use release satrs-shared
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-16 21:07:51 +02:00
622221835e Merge pull request 'allow sat-rs shared spacepackets range' (#158) from satrs-shared-spacepackets-range into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #158
2024-04-16 20:54:59 +02:00
e396ad2e7a small fix
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-16 19:52:32 +02:00
772927d50b allow spacepackets range
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-16 19:50:46 +02:00
be9a45e55f Merge pull request 'changelog satrs-shared v0.1.3' (#157) from changelog-satrs-shared into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #157
2024-04-16 19:48:12 +02:00
eee8a69550 changelog satrs-shared v0.1.3
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-04-16 19:47:36 +02:00
f7a6d3ce47 Merge pull request 'bump spacepackets to v0.11.0' (#156) from bump-spacepackets into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #156
2024-04-16 19:46:17 +02:00
df97a3a93e small adjustment
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-04-16 19:39:07 +02:00
42750e08c0 bump spacepackets to v0.11.0
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-16 19:26:46 +02:00
786671bbd7 Merge pull request 're-worked TMTC modules' (#155) from rework-tmtc-modules into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #155
2024-04-16 11:10:52 +02:00
63f37f0917 Re-worked TMTC modules
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-16 11:04:22 +02:00
8cfe3b81e7 Merge pull request 'bugfix for targeted services' (#154) from bugfix-targeted-services into main
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
Reviewed-on: #154
2024-04-13 15:10:14 +02:00
de50bec562 bugfix for targeted services
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-10 17:18:53 +02:00
39ab9fa27b Merge pull request 'closure param name tweak' (#153) from small-example-tweak into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #153
2024-04-10 17:17:13 +02:00
1dbc81a8f5 closure param name tweak
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-10 15:51:08 +02:00
1ad74ee1d5 Merge pull request 'this makes a bit more sense' (#152) from naming-improvement-pus-actions into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #152
2024-04-10 15:37:39 +02:00
f96fe6bdc0 this makes a bit more sense
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-04-10 15:19:08 +02:00
d43a8eb571 Merge pull request 'improve example structure' (#151) from improve-example-structure into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #151
2024-04-10 13:19:41 +02:00
0bbada90ef improve example structure
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-04-10 12:58:51 +02:00
3375780e00 Merge pull request 'Refactor and improve TCP servers' (#150) from refactor-tcp-server into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #150
2024-04-10 12:29:23 +02:00
de028ed827 bugfix in example
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build started...
2024-04-10 11:54:05 +02:00
d27ac5dfc9 refactored TCP server
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-04-10 11:28:16 +02:00
c67b7cb93a this is non-trivial
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
2024-04-09 19:40:55 +02:00
f71ba3e8d8 Merge pull request 'introduce stop signal handling for TCP' (#149) from tcp-server-stop-signal into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #149
2024-04-09 18:11:29 +02:00
975cd927f4 Merge branch 'main' into add-timestamp-to-sim-request
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-09 17:27:57 +02:00
3cc9dd3c48 introduce stop signal handling
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-09 17:21:43 +02:00
0fec994028 Merge pull request 'Update STM32F3 example' (#148) from update-stm32f3-example-tmtc-handling into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #148
2024-04-04 18:33:00 +02:00
226a134aff Update STM32F3 example
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
1. New command to change blinky frequency.
2. Bump used sat-rs version.
2024-04-04 18:21:30 +02:00
aac59ec7c1 Merge pull request 'Major refactoring and update of PUS module' (#146) from pus-modules-update into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #146
2024-04-04 15:27:29 +02:00
ce7eb8650f changelog
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-04-04 15:20:07 +02:00
df2733a176 Major refactoring and update of PUS module 2024-04-04 15:18:53 +02:00
344fe6a4c0 Merge pull request 'Simplify low level PUS API' (#145) from simplify-low-level-pus-api into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #145
2024-03-29 23:41:14 +01:00
a5941751d7 Simplify low-level PUS API for verification and events
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-03-29 16:22:40 +01:00
977e29894b Merge pull request 'STM32 defmt + RTT support' (#144) from stm32-defmt-support into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #144
2024-03-29 12:34:02 +01:00
dd1417a368 Upgrade example to use defmt/RTT/probe-rs
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-03-28 23:47:07 +01:00
3195cf5111 update config.toml template file
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-03-28 23:06:16 +01:00
8280c70682 Merge pull request 'Framework to Library' (#143) from framework-to-library into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #143
2024-03-27 14:33:02 +01:00
19c5aa9b83 update rust book as well
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-03-27 14:17:59 +01:00
713b4e097b update the README 2024-03-27 14:14:45 +01:00
9039c1b59a Merge branch 'main' into add-timestamp-to-sim-request
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-03-25 16:14:04 +01:00
746b31ec5d Merge pull request 'satrs-example RTIC v2' (#142) from satrs-example-rtic-v2 into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #142
2024-03-25 16:09:06 +01:00
2318cd4293 Update satrs-example for the STM32F3
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
- Update RTIC to v2
- Update Python client version
2024-03-25 14:26:07 +01:00
a6b57d3eb9 Merge pull request 'Update STM32F3 example' (#141) from update-stm32f3-example into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #141
2024-03-22 15:19:58 +01:00
bddd3132d4 added some more instructions for Python
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-03-22 13:18:20 +01:00
6a6ffba754 why have two files
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-03-22 13:09:27 +01:00
d27a41e4de Start updating the STM32F3 Discovery example
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-03-22 13:08:01 +01:00
972bf19188 cargo fmt
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-03-13 12:03:11 +01:00
9d711d2b73 add fern logging
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
2024-03-13 10:49:24 +01:00
d0005cdd63 this works
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
2024-03-13 10:36:08 +01:00
f00e6cf50c we require an asynchronix update here I guess
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
2024-03-12 18:25:21 +01:00
128df9a813 Merge pull request 'First version of asynchronix based mini simulator' (#139) from init-minisim into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #139
2024-03-11 10:41:24 +01:00
7387be3bc3 new request/reponse API
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-03-11 10:26:48 +01:00
d3fb504545 clean up manifest file
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-03-09 15:14:15 +01:00
ae8e39f626 First version of asynchronix based mini simulator
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build queued...
- Basic simulator with 3 devices
- Can be driven via a UDP interface
- Design allows to drive the simulation via different interface in the future
  by using Request/Reply messaging.
2024-03-09 15:11:11 +01:00
ab3d907d4e Merge pull request 'Refactor TMTC distribution modules' (#138) from ccsds-distrib-refactoring into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #138
2024-03-04 16:53:23 +01:00
3de5954898 Refactor TMTC distribution modules
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-03-04 16:26:34 +01:00
5600aa576c Merge pull request 'use generics for the PUS stack' (#134) from pus-stack-use-generics into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #134
2024-02-26 15:46:47 +01:00
88793cfa87 add some helper types
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-26 15:34:20 +01:00
223b637eb8 use generics for the PUS stack
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-26 15:18:15 +01:00
cf9b115e1e Merge pull request 'Refactored Verification Reporter Module' (#132) from refactor-verification-mod into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #132
2024-02-26 11:58:57 +01:00
eea9b11b39 refactored verification reporter
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
- Use generics instead of trait objects where applicable.
2024-02-26 11:41:42 +01:00
f21ab0017e Merge pull request 'fixed for scheduler' (#133) from scheduler-fixes into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #133
2024-02-26 11:15:50 +01:00
a7ca00317f cargo fmt
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-26 11:00:48 +01:00
75fda42f4f fixed for scheduler
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
2024-02-26 10:53:33 +01:00
faf0f6f6c6 Merge pull request 'refactored event manager' (#131) from refactor-event-man into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #131
2024-02-23 14:31:48 +01:00
a690c7720d Refactored event manager
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-23 14:19:30 +01:00
b48b5b8caa Merge pull request 'bump example patch version' (#129) from prepare-example-release into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #129
2024-02-21 13:51:49 +01:00
238c3a8d43 bump example patch version
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-21 11:34:35 +01:00
de8c0bc13e Merge pull request 'Use released package versions again' (#128) from bump-example into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #128
2024-02-21 11:34:07 +01:00
012eb82f42 bump example
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-21 11:22:16 +01:00
d26f6cbe27 Merge pull request 'sat-rs v0.2.0-rc.0' (#127) from prepare-satrs-release-candidate into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #127
2024-02-21 11:13:05 +01:00
82d3215761 changelog
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-21 11:08:22 +01:00
2b80244636 prepare release candidate
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-21 10:06:58 +01:00
f1611cd5b8 Merge pull request 'Custom badges' (#125) from try-custom-badges into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #125
2024-02-20 14:57:10 +01:00
808126ee41 better green
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-02-20 14:55:52 +01:00
05df24447b let's try this
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-02-20 14:54:58 +01:00
b229360233 try custom badges
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-02-20 14:52:01 +01:00
52be26829b Merge pull request 'First PUS handler abstractions with request mapping' (#121) from move-some-requests into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #121
2024-02-20 14:42:02 +01:00
ca2c8aa359 update changelog
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-20 14:36:34 +01:00
ba03150178 Added high-level abstraction for some PUS services
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
Introduced high-level abstractions for targetable requests in general.

- PUS Service 3 (HK) abstraction for targetable HK requests
- PUS Service 8 (Action) abstraction for targetable action requests
2024-02-20 14:33:21 +01:00
4e45bfa7e6 Merge pull request 'add mode tree graph' (#124) from add-mode-tree-graph into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #124
2024-02-16 19:21:54 +01:00
93c01c8c22 add mode tree graph
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-16 17:42:44 +01:00
2d062f3010 Merge pull request 'rename Python client' (#123) from rename-pyclient into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #123
2024-02-16 13:27:28 +01:00
01d9a85976 rename Python client
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-02-16 13:24:16 +01:00
fa7cd39f3e Merge pull request 'added goal graph for example' (#122) from example-goal-graph into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #122
2024-02-16 13:10:53 +01:00
5bb37d8e87 added goal graph for example
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-16 12:54:50 +01:00
813e221030 Merge pull request 'prepare example release v0.1.0' (#120) from example-release into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #120
2024-02-13 11:22:22 +01:00
18cec8bcf0 prepare example release v0.1.0
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-13 11:07:27 +01:00
d4a122e462 Merge pull request 'of course, something is missing..' (#119) from of-course-forgot-a-link into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #119
2024-02-12 18:28:02 +01:00
7af327d077 of course, something is missing..
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-02-12 18:17:50 +01:00
3a6cd6712d Merge pull request 'MIB docs update' (#118) from mib-docs-update into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #118
2024-02-12 18:11:54 +01:00
c4eba03043 Merge branch 'main' into mib-docs-update
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build queued...
2024-02-12 18:11:05 +01:00
e9e5c999ec better documentation
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-02-12 18:10:33 +01:00
f14a85cb84 add missing links for MIB
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-02-12 18:04:43 +01:00
c3902c2c06 Merge pull request 'Corrections for docs and links' (#117) from links-and-docs-corrections into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #117
2024-02-12 17:56:28 +01:00
24f82d4c5e prep v0.1.1
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build queued...
2024-02-12 17:56:01 +01:00
44ff62e947 more link corrections
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-12 17:34:16 +01:00
9cf80b44ea homepage link corrections
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-02-12 17:29:48 +01:00
f4bc33aefa link corrections
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-12 17:25:22 +01:00
445fdfe066 Merge pull request 'prepare sat-rs release' (#116) from prepare-satrs-v0.1.0 into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #116
2024-02-12 17:12:18 +01:00
7c3879fbce last preparations
Some checks are pending
Rust/sat-rs/pipeline/pr-main Build queued...
2024-02-12 17:11:59 +01:00
c3e9d4441f prepare sat-rs release
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-12 16:55:19 +01:00
bbcad18dfa Merge pull request 'changelog for the MIB' (#115) from changelog-mib into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #115
2024-02-12 16:42:02 +01:00
2607252be2 changelog for the MIB
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-02-12 16:41:32 +01:00
8b79e967bb Merge pull request 'prepare MIB release v0.1.0' (#114) from prep-mib-release into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #114
2024-02-12 16:40:07 +01:00
c1e1c10f2d delete commented config
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-12 16:23:56 +01:00
eef6f42d3b prepare MIB release v0.1.0
Some checks are pending
Rust/sat-rs/pipeline/head Build queued...
2024-02-12 16:23:00 +01:00
6bfd37ba24 Merge pull request 'tiny improvement' (#113) from tiny-doc-improvement into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #113
2024-02-12 16:20:31 +01:00
94fbb50e11 better english 2024-02-12 16:20:21 +01:00
0caab60d74 tiny improvement
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-02-12 16:19:28 +01:00
e2e6605f50 Merge pull request 'satrs-shared v0.1.1' (#112) from satrs-shared-patch into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #112
2024-02-12 16:18:38 +01:00
0b99a40c6f prep patch release
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-12 15:59:57 +01:00
c635c7eed3 Merge pull request 'restructure the crate' (#111) from restructure-crate into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #111
2024-02-12 15:56:27 +01:00
de4e6183b3 Re-structure sat-rs
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
- Add new shared subcrate satrs-shared to split off some shared
  components not expected to change very often.
- Renmame `satrs-core` to `satrs`. It is expected that sat-rs will remain
  the primary crate, so the core information is superfluous, and core also
  implies stability, which will not be the case for some time.
2024-02-12 15:51:37 +01:00
f58a4eaee5 Merge pull request 'released version' (#110) from bump-example-released-version into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #110
2024-02-12 13:44:19 +01:00
544488eaa3 released version
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
2024-02-12 13:25:07 +01:00
b06b4150b1 Merge pull request 'prepare MIB release' (#109) from mib-v0.1.0-alpha.2 into main
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
Reviewed-on: #109
2024-02-12 13:21:34 +01:00
4e16790092 prepare MIB release
Some checks failed
Rust/sat-rs/pipeline/head There was a failure building this commit
2024-02-12 13:16:11 +01:00
20d5212710 Merge pull request 'satrs-core v0.1.0-alpha.3' (#108) from core-v0.1.0-alpha.3 into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #108
2024-02-12 13:13:12 +01:00
2dd38c163f better name for new trait
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-12 12:54:25 +01:00
79d095b1f7 some doc improvements
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-12 12:48:28 +01:00
377ffc052c bump release
Some checks are pending
Rust/sat-rs/pipeline/head Build started...
2024-02-12 12:43:17 +01:00
c39ce99e2c Merge pull request 'Add Static Pool Spillover feature' (#107) from add-pool-spillover into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #107
2024-02-12 12:21:00 +01:00
c0692a3523 Added static pool spillover feature
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
- Allows to utilize the full pool even if some subpools are full.
2024-02-12 11:35:10 +01:00
712dc718f9 Merge pull request 'Pool docs improvements' (#105) from pool-docs-improvements into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #105
2024-02-12 11:30:48 +01:00
a69347af7b Merge remote-tracking branch 'origin/main' into pool-docs-improvements
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-12 10:56:13 +01:00
3c7113c231 Merge pull request 'Refactor and improve pool abstraction' (#104) from refactor-pool-impl into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #104
2024-02-10 15:39:38 +01:00
9c310e7a36 Merge branch 'main' into refactor-pool-impl
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-10 12:54:36 +01:00
0bab5799e5 Merge pull request 'Graph for static pools' (#103) from graph-for-static-pools into main
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Reviewed-on: #103
2024-02-10 12:54:29 +01:00
b56bbc8c41 update pool docs
All checks were successful
Rust/sat-rs/pipeline/head This commit looks good
Rust/sat-rs/pipeline/pr-main This commit looks good
2024-02-10 12:54:13 +01:00
12ac5913aa Merge branch 'graph-for-static-pools' into refactor-pool-impl 2024-02-10 12:30:35 +01:00
d017b9c179 Refactored pool abstraction
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
- Redesigned PoolProvider and PoolProviderWithGuards to allow
  easer optimizations and increase flexbility
2024-02-10 11:59:26 +01:00
f3ca570e53 Update pool docs in sat-rs book
All checks were successful
Rust/sat-rs/pipeline/pr-main This commit looks good
- Add simple graph to show how it works
2024-02-10 11:57:19 +01:00
264 changed files with 185438 additions and 18360 deletions

66
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,66 @@
name: ci
on: [push, pull_request]
jobs:
check:
name: Check build
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- run: cargo check
# Check example with static pool configuration
- run: cargo check -p satrs-example --no-default-features
test:
name: Run Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- name: Install nextest
uses: taiki-e/install-action@nextest
- run: cargo nextest run --all-features
- run: cargo test --doc --all-features
cross-check:
name: Check Cross-Compilation
runs-on: ubuntu-latest
strategy:
matrix:
target:
- armv7-unknown-linux-gnueabihf
- thumbv7em-none-eabihf
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
with:
targets: "armv7-unknown-linux-gnueabihf, thumbv7em-none-eabihf"
- run: cargo check -p satrs --target=${{matrix.target}} --no-default-features
fmt:
name: Check formatting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- run: cargo fmt --all -- --check
docs:
name: Check Documentation Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@nightly
- run: cargo +nightly doc --all-features --config 'build.rustdocflags=["--cfg", "docs_rs"]'
clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- run: cargo clippy -- -D warnings

7
.gitignore vendored
View File

@ -1,5 +1,10 @@
/target
target/
output.log
/Cargo.lock
output.log
output.log
/.idea/*
!/.idea/runConfigurations

View File

@ -1,12 +1,14 @@
[workspace]
resolver = "2"
members = [
"satrs-core",
"satrs",
"satrs-mib",
"satrs-example",
"satrs-minisim",
"satrs-shared",
]
exclude = [
"satrs-example-stm32f3-disco",
"embedded-examples/stm32f3-disco-rtic",
"embedded-examples/stm32h7-nucleo-rtic",
]

View File

@ -1,13 +1,23 @@
<p align="center"> <img src="misc/satrs-logo.png" width="40%"> </p>
<p align="center"> <img src="misc/satrs-logo-v2.png" width="40%"> </p>
[![sat-rs website](https://img.shields.io/badge/sat--rs-website-darkgreen?style=flat)](https://absatsw.irs.uni-stuttgart.de/projects/sat-rs/)
[![sat-rs book](https://img.shields.io/badge/sat--rs-book-darkgreen?style=flat)](https://robamu.github.io/sat-rs/book/)
[![Crates.io](https://img.shields.io/crates/v/satrs)](https://crates.io/crates/satrs)
[![docs.rs](https://img.shields.io/docsrs/satrs)](https://docs.rs/satrs)
sat-rs
=========
This is the repository of the sat-rs framework. Its primary goal is to provide re-usable components
This is the repository of the sat-rs library. Its primary goal is to provide re-usable components
to write on-board software for remote systems like rovers or satellites. It is specifically written
for the special requirements for these systems. You can find an overview of the project and the
link to the [more high-level sat-rs book](https://absatsw.irs.uni-stuttgart.de/projects/sat-rs/)
at the [IRS documentation website](https://absatsw.irs.uni-stuttgart.de/sat-rs.html).
link to the [more high-level sat-rs book](https://robamu.github.io/sat-rs/book/)
at the [IRS software projects website](https://absatsw.irs.uni-stuttgart.de/projects/sat-rs/).
This is early-stage software. Important features are missing. New releases
with breaking changes are released regularly, with all changes documented inside respective
changelog files. You should only use this library if your are willing to work in this
environment.
A lot of the architecture and general design considerations are based on the
[FSFW](https://egit.irs.uni-stuttgart.de/fsfw/fsfw) C++ framework which has flight heritage
@ -22,16 +32,24 @@ This project currently contains following crates:
Primary information resource in addition to the API documentation, hosted
[here](https://documentation.irs.uni-stuttgart.de/projects/sat-rs/). It can be useful to read
this first before delving into the example application and the API documentation.
* [`satrs-core`](https://egit.irs.uni-stuttgart.de/rust/satrs-launchpad/src/branch/main/satrs-core):
Core components of sat-rs.
* [`satrs-example`](https://egit.irs.uni-stuttgart.de/rust/satrs-launchpad/src/branch/main/satrs-example):
* [`satrs`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs):
Primary crate.
* [`satrs-example`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-example):
Example of a simple example on-board software using various sat-rs components which can be run
on a host computer or on any system with a standard runtime like a Raspberry Pi.
* [`satrs-mib`](https://egit.irs.uni-stuttgart.de/rust/satrs-launchpad/src/branch/main/satrs-mib):
* [`satrs-minisim`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-minisim):
Mini-Simulator based on [nexosim](https://github.com/asynchronics/nexosim) which
simulates some physical devices for the `satrs-example` application device handlers.
* [`satrs-mib`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-mib):
Components to build a mission information base from the on-board software directly.
* [`satrs-example-stm32f3-disco`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-example-stm32f3-disco):
Example of a simple example on-board software using sat-rs components on a bare-metal system
with constrained resources.
* [`satrs-stm32f3-disco-rtic`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/embedded-examples/stm32f3-disco-rtic):
Example of a simple example using low-level sat-rs components on a bare-metal system
with constrained resources. This example uses the [RTIC](https://github.com/rtic-rs/rtic)
framework on the STM32F3-Discovery device.
* [`satrs-stm32h-nucleo-rtic`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/embedded-examples/stm32h7-nucleo-rtic):
Example of a simple example using sat-rs components on a bare-metal system
with constrained resources. This example uses the [RTIC](https://github.com/rtic-rs/rtic)
framework on the STM32H743ZIT device.
Each project has its own `CHANGELOG.md`.
@ -41,9 +59,24 @@ Each project has its own `CHANGELOG.md`.
* [`spacepackets`](https://egit.irs.uni-stuttgart.de/rust/spacepackets): Basic ECSS and CCSDS
packet protocol implementations. This repository is re-exported in the
[`satrs-core`](https://egit.irs.uni-stuttgart.de/rust/satrs-launchpad/src/branch/main/satrs-core)
[`satrs`](https://egit.irs.uni-stuttgart.de/rust/satrs/src/branch/main/satrs)
crate.
# Flight Heritage
There is an active and continuous effort to get early flight heritage for the sat-rs library.
Currently this library has the following flight heritage:
- Submission as an [OPS-SAT experiment](https://www.esa.int/Enabling_Support/Operations/OPS-SAT)
which has also
[flown on the satellite](https://blogs.esa.int/rocketscience/2024/05/21/ops-sat-reentry-tomorrow-final-experiments-continue/).
The application is strongly based on the sat-rs example application. You can find the repository
of the experiment [here](https://egit.irs.uni-stuttgart.de/rust/ops-sat-rs).
- Development and use of a sat-rs-based [demonstration on-board software](https://egit.irs.uni-stuttgart.de/rust/eurosim-obsw)
alongside a Flight System Simulator in the context of a
[Bachelors Thesis](https://www.researchgate.net/publication/380785984_Design_and_Development_of_a_Hardware-in-the-Loop_EuroSim_Demonstrator)
at [Airbus Netherlands](https://www.airbusdefenceandspacenetherlands.nl/).
# Coverage
Coverage was generated using [`grcov`](https://github.com/mozilla/grcov). If you have not done so
@ -54,5 +87,5 @@ rustup component add llvm-tools-preview
cargo install grcov --locked
```
After that, you can simply run `coverage.py` to test the `satrs-core` crate with coverage. You can
After that, you can simply run `coverage.py` to test the `satrs` crate with coverage. You can
optionally supply the `--open` flag to open the coverage report in your webbrowser.

View File

@ -15,7 +15,9 @@ RUN rustup install nightly && \
rustup component add rustfmt clippy
WORKDIR "/tmp"
# RUN cargo install mdbook --no-default-features --features search --vers "^0.4" --locked
# Install cargo-nextest
RUN curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin
# Install mdbook and mdbook-linkcheck
RUN curl -sSL https://github.com/rust-lang/mdBook/releases/download/v0.4.37/mdbook-v0.4.37-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory /usr/local/bin
RUN curl -sSL https://github.com/Michael-F-Bryan/mdbook-linkcheck/releases/latest/download/mdbook-linkcheck.x86_64-unknown-linux-gnu.zip -o mdbook-linkcheck.zip && \
unzip mdbook-linkcheck.zip && \

View File

@ -32,7 +32,8 @@ pipeline {
}
stage('Test') {
steps {
sh 'cargo test --all-features'
sh 'cargo nextest r --all-features'
sh 'cargo test --doc --all-features'
}
}
stage('Check with all features') {
@ -47,7 +48,7 @@ pipeline {
}
stage('Check Cross Embedded Bare Metal') {
steps {
sh 'cargo check -p satrs-core --target thumbv7em-none-eabihf --no-default-features'
sh 'cargo check -p satrs --target thumbv7em-none-eabihf --no-default-features'
}
}
stage('Check Cross Embedded Linux') {

View File

@ -18,15 +18,19 @@ def generate_cov_report(open_report: bool, format: str, package: str):
out_path = "./target/debug/coverage"
if format == "lcov":
out_path = "./target/debug/lcov.info"
os.system(
grcov_cmd = (
f"grcov . -s . --binary-path ./target/debug/ -t {format} --branch --ignore-not-existing "
f"-o {out_path}"
)
print(f"Running: {grcov_cmd}")
os.system(grcov_cmd)
if format == "lcov":
os.system(
lcov_cmd = (
"genhtml -o ./target/debug/coverage/ --show-details --highlight --ignore-errors source "
"--legend ./target/debug/lcov.info"
)
print(f"Running: {lcov_cmd}")
os.system(lcov_cmd)
if open_report:
coverage_report_path = os.path.abspath("./target/debug/coverage/index.html")
webbrowser.open_new_tab(coverage_report_path)
@ -43,8 +47,8 @@ def main():
parser.add_argument(
"-p",
"--package",
choices=["satrs-core"],
default="satrs-core",
choices=["satrs", "satrs-minisim", "satrs-example"],
default="satrs",
help="Choose project to generate coverage for",
)
parser.add_argument(

3
docs.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
export RUSTDOCFLAGS="--cfg docsrs --generate-link-to-definition -Z unstable-options"
cargo +nightly doc --all-features --open

View File

@ -0,0 +1,37 @@
[target.'cfg(all(target_arch = "arm", target_os = "none"))']
# uncomment ONE of these three option to make `cargo run` start a GDB session
# which option to pick depends on your system
# You can also replace openocd.gdb by jlink.gdb when using a J-Link.
# runner = "arm-none-eabi-gdb -q -x openocd.gdb"
# runner = "gdb-multiarch -q -x openocd.gdb"
# runner = "gdb -q -x openocd.gdb"
runner = "probe-rs run --chip STM32F303VCTx"
rustflags = [
"-C", "linker=flip-link",
# LLD (shipped with the Rust toolchain) is used as the default linker
"-C", "link-arg=-Tlink.x",
"-C", "link-arg=-Tdefmt.x",
# This is needed if your flash or ram addresses are not aligned to 0x10000 in memory.x
# See https://github.com/rust-embedded/cortex-m-quickstart/pull/95
"-C", "link-arg=--nmagic",
# if you run into problems with LLD switch to the GNU linker by commenting out
# this line
# "-C", "linker=arm-none-eabi-ld",
# if you need to link to pre-compiled C libraries provided by a C toolchain
# use GCC as the linker by commenting out both lines above and then
# uncommenting the three lines below
# "-C", "linker=arm-none-eabi-gcc",
# "-C", "link-arg=-Wl,-Tlink.x",
# "-C", "link-arg=-nostartfiles",
]
[build]
# comment out the following line if you intend to run unit tests on host machine
target = "thumbv7em-none-eabihf" # Cortex-M4F and Cortex-M7F (with FPU)
[env]
DEFMT_LOG = "info"

View File

@ -1,3 +1,4 @@
/target
/itm.txt
/.cargo/config*
/.vscode

View File

@ -0,0 +1,83 @@
[package]
name = "satrs-stm32f3-disco-rtic"
version = "0.1.0"
edition = "2021"
default-run = "satrs-stm32f3-disco-rtic"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
cortex-m = { version = "0.7", features = ["critical-section-single-core"] }
cortex-m-rt = "0.7"
defmt = "0.3"
defmt-brtt = { version = "0.1", default-features = false, features = ["rtt"] }
panic-probe = { version = "0.3", features = ["print-defmt"] }
embedded-hal = "0.2.7"
cortex-m-semihosting = "0.5.0"
enumset = "1"
heapless = "0.8"
[dependencies.rtic]
version = "2"
features = ["thumbv7-backend"]
[dependencies.rtic-monotonics]
version = "2"
features = ["cortex-m-systick"]
[dependencies.cobs]
version = "0.3"
default-features = false
[dependencies.stm32f3xx-hal]
git = "https://github.com/robamu/stm32f3xx-hal"
version = "0.11.0-alpha.0"
features = ["stm32f303xc", "rt", "enumset"]
branch = "complete-dma-update"
# Can be used in workspace to develop and update HAL
# path = "../stm32f3xx-hal"
[dependencies.stm32f3-discovery]
git = "https://github.com/robamu/stm32f3-discovery"
version = "0.8.0-alpha.0"
branch = "complete-dma-update-hal"
# Can be used in workspace to develop and update BSP
# path = "../stm32f3-discovery"
[dependencies.satrs]
# path = "satrs"
version = "0.2"
default-features = false
features = ["defmt"]
[dev-dependencies]
defmt-test = "0.3"
# cargo test
[profile.test]
codegen-units = 1
debug = 2
debug-assertions = true # <-
incremental = false
opt-level = "s" # <-
overflow-checks = true # <-
# cargo build/run --release
[profile.release]
codegen-units = 1
debug = 2
debug-assertions = false # <-
incremental = false
lto = 'fat'
opt-level = "s" # <-
overflow-checks = false # <-
# cargo test --release
[profile.bench]
codegen-units = 1
debug = 2
debug-assertions = false # <-
incremental = false
lto = 'fat'
opt-level = "s" # <-
overflow-checks = false # <-

View File

@ -0,0 +1,114 @@
sat-rs example for the STM32F3-Discovery board
=======
This example application shows how the [sat-rs library](https://egit.irs.uni-stuttgart.de/rust/sat-rs)
can be used on an embedded target.
It also shows how a relatively simple OBSW could be built when no standard runtime is available.
It uses [RTIC](https://rtic.rs/2/book/en/) as the concurrency framework and the
[defmt](https://defmt.ferrous-systems.com/) framework for logging.
The STM32F3-Discovery device was picked because it is a cheap Cortex-M4 based device which is also
used by the [Rust Embedded Book](https://docs.rust-embedded.org/book/intro/hardware.html) and the
[Rust Discovery](https://docs.rust-embedded.org/discovery/f3discovery/) book as an introduction
to embedded Rust.
## Pre-Requisites
Make sure the following tools are installed:
1. [`probe-rs`](https://probe.rs/): Application used to flash and debug the MCU.
2. Optional and recommended: [VS Code](https://code.visualstudio.com/) with
[probe-rs plugin](https://marketplace.visualstudio.com/items?itemName=probe-rs.probe-rs-debugger)
for debugging.
## Preparing Rust and the repository
Building an application requires the `thumbv7em-none-eabihf` cross-compiler toolchain.
If you have not installed it yet, you can do so with
```sh
rustup target add thumbv7em-none-eabihf
```
A default `.cargo` config file is provided for this project, but needs to be copied to have
the correct name. This is so that the config file can be updated or edited for custom needs
without being tracked by git.
```sh
cp def_config.toml config.toml
```
The configuration file will also set the target so it does not always have to be specified with
the `--target` argument.
## Building
After that, assuming that you have a `.cargo/config.toml` setting the correct build target,
you can simply build the application with
```sh
cargo build
```
## Flashing from the command line
You can flash the application from the command line using `probe-rs`:
```sh
probe-rs run --chip STM32F303VCTx
```
## Debugging with VS Code
The STM32F3-Discovery comes with an on-board ST-Link so all that is required to flash and debug
the board is a Mini-USB cable. The code in this repository was debugged using [`probe-rs`](https://probe.rs/docs/tools/debuggerA)
and the VS Code [`probe-rs` plugin](https://marketplace.visualstudio.com/items?itemName=probe-rs.probe-rs-debugger).
Make sure to install this plugin first.
Sample configuration files are provided inside the `vscode` folder.
Use `cp vscode .vscode -r` to use them for your project.
Some sample configuration files for VS Code were provided as well. You can simply use `Run` and `Debug`
to automatically rebuild and flash your application.
The `tasks.json` and `launch.json` files are generic and you can use them immediately by opening
the folder in VS code or adding it to a workspace.
## Commanding with Python
When the SW is running on the Discovery board, you can command the MCU via a serial interface,
using COBS encoded PUS packets.
It is recommended to use a virtual environment to do this. To set up one in the command line,
you can use `python3 -m venv venv` on Unix systems or `py -m venv venv` on Windows systems.
After doing this, you can check the [venv tutorial](https://docs.python.org/3/tutorial/venv.html)
on how to activate the environment and then use the following command to install the required
dependency:
```sh
pip install -r requirements.txt
```
The packets are exchanged using a dedicated serial interface. You can use any generic USB-to-UART
converter device with the TX pin connected to the PA3 pin and the RX pin connected to the PA2 pin.
A default configuration file for the python application is provided and can be used by running
```sh
cp def_tmtc_conf.json tmtc_conf.json
```
After that, you can for example send a ping to the MCU using the following command
```sh
./main.py -p /ping
```
You can configure the blinky frequency using
```sh
./main.py -p /change_blink_freq
```
All these commands will package a PUS telecommand which will be sent to the MCU using the COBS
format as the packet framing format.

File diff suppressed because it is too large Load Diff

View File

@ -24,7 +24,9 @@ break main
# # send captured ITM to the file itm.fifo
# # (the microcontroller SWO pin must be connected to the programmer SWO pin)
# # 8000000 must match the core clock frequency
monitor tpiu config internal itm.txt uart off 8000000
# # 2000000 is the frequency of the SWO pin. This was added for newer
# openocd versions like v0.12.0.
# monitor tpiu config internal itm.txt uart off 8000000 2000000
# # OR: make the microcontroller SWO pin output compatible with UART (8N1)
# # 8000000 must match the core clock frequency
@ -32,7 +34,7 @@ monitor tpiu config internal itm.txt uart off 8000000
# monitor tpiu config external uart off 8000000 2000000
# # enable ITM port 0
monitor itm port 0 on
# monitor itm port 0 on
load

View File

@ -1,9 +1,8 @@
__pycache__
/venv
/.tmtc-history.txt
/log
/.idea/*
!/.idea/runConfigurations
/seqcnt.txt
/.tmtc-history.txt
/tmtc_conf.json

View File

@ -1,20 +1,22 @@
#!/usr/bin/env python3
"""Example client for the sat-rs example application"""
import struct
import logging
import sys
import time
from typing import Optional
from prompt_toolkit.history import History
from prompt_toolkit.history import FileHistory
from typing import Any, Optional, cast
from prompt_toolkit.history import FileHistory, History
from spacepackets.ecss.tm import CdsShortTimestamp
import tmtccmd
from spacepackets.ecss import PusTelemetry, PusVerificator
from spacepackets.ecss import PusTelemetry, PusTelecommand, PusTm, PusVerificator
from spacepackets.ecss.pus_17_test import Service17Tm
from spacepackets.ecss.pus_1_verification import UnpackParams, Service1Tm
from spacepackets.ccsds.time import CdsShortTimestamp
from tmtccmd import TcHandlerBase, ProcedureParamsWrapper
from tmtccmd.core.base import BackendRequest
from tmtccmd.core.ccsds_backend import QueueWrapper
from tmtccmd.logging import add_colorlog_console_logger
from tmtccmd.pus import VerificationWrapper
from tmtccmd.tmtc import CcsdsTmHandler, SpecificApidHandlerBase
from tmtccmd.com import ComInterface
@ -25,8 +27,8 @@ from tmtccmd.config import (
HookBase,
params_to_procedure_conversion,
)
from tmtccmd.config.com import SerialCfgWrapper
from tmtccmd.config import PreArgsParsingWrapper, SetupWrapper
from tmtccmd.logging import add_colorlog_console_logger
from tmtccmd.logging.pus import (
RegularTmtcLogWrapper,
RawTmtcTimedLogWrapper,
@ -39,21 +41,19 @@ from tmtccmd.tmtc import (
FeedWrapper,
SendCbParams,
DefaultPusQueueHelper,
QueueWrapper,
)
from tmtccmd.pus.s5_fsfw_event import Service5Tm
from spacepackets.seqcount import FileSeqCountProvider, PusFileSeqCountProvider
from tmtccmd.util.obj_id import ObjectIdDictT
import pus_tc
from common import EXAMPLE_PUS_APID, TM_PACKET_IDS, EventU32
_LOGGER = logging.getLogger()
EXAMPLE_PUS_APID = 0x02
class SatRsConfigHook(HookBase):
def __init__(self, json_cfg_path: str):
super().__init__(json_cfg_path=json_cfg_path)
super().__init__(json_cfg_path)
def get_communication_interface(self, com_if_key: str) -> Optional[ComInterface]:
from tmtccmd.config.com import (
@ -65,14 +65,20 @@ class SatRsConfigHook(HookBase):
cfg = create_com_interface_cfg_default(
com_if_key=com_if_key,
json_cfg_path=self.cfg_path,
space_packet_ids=TM_PACKET_IDS,
space_packet_ids=None,
)
assert cfg is not None
if cfg is None:
raise ValueError(
f"No valid configuration could be retrieved for the COM IF with key {com_if_key}"
)
if cfg.com_if_key == "serial_cobs":
cfg = cast(SerialCfgWrapper, cfg)
cfg.serial_cfg.serial_timeout = 0.5
return create_com_interface_default(cfg)
def get_command_definitions(self) -> CmdTreeNode:
"""This function should return the root node of the command definition tree."""
return pus_tc.create_cmd_definition_tree()
return create_cmd_definition_tree()
def get_cmd_history(self) -> Optional[History]:
"""Optionlly return a history class for the past command paths which will be used
@ -85,6 +91,13 @@ class SatRsConfigHook(HookBase):
return get_core_object_ids()
def create_cmd_definition_tree() -> CmdTreeNode:
root_node = CmdTreeNode.root_node()
root_node.add_child(CmdTreeNode("ping", "Send PUS ping TC"))
root_node.add_child(CmdTreeNode("change_blink_freq", "Change blink frequency"))
return root_node
class PusHandler(SpecificApidHandlerBase):
def __init__(
self,
@ -97,17 +110,20 @@ class PusHandler(SpecificApidHandlerBase):
self.raw_logger = raw_logger
self.verif_wrapper = verif_wrapper
def handle_tm(self, packet: bytes, _user_args: any):
def handle_tm(self, packet: bytes, _user_args: Any):
try:
pus_tm = PusTelemetry.unpack(packet, time_reader=CdsShortTimestamp.empty())
pus_tm = PusTm.unpack(
packet, timestamp_len=CdsShortTimestamp.TIMESTAMP_SIZE
)
except ValueError as e:
_LOGGER.warning("Could not generate PUS TM object from raw data")
_LOGGER.warning(f"Raw Packet: [{packet.hex(sep=',')}], REPR: {packet!r}")
raise e
service = pus_tm.service
tm_packet = None
if service == 1:
tm_packet = Service1Tm.unpack(
data=packet, params=UnpackParams(CdsShortTimestamp.empty(), 1, 2)
data=packet, params=UnpackParams(CdsShortTimestamp.TIMESTAMP_SIZE, 1, 2)
)
res = self.verif_wrapper.add_tm(tm_packet)
if res is None:
@ -121,48 +137,39 @@ class PusHandler(SpecificApidHandlerBase):
else:
self.verif_wrapper.log_to_console(tm_packet, res)
self.verif_wrapper.log_to_file(tm_packet, res)
elif service == 3:
if service == 3:
_LOGGER.info("No handling for HK packets implemented")
_LOGGER.info(f"Raw packet: 0x[{packet.hex(sep=',')}]")
pus_tm = PusTelemetry.unpack(packet, time_reader=CdsShortTimestamp.empty())
pus_tm = PusTelemetry.unpack(packet, CdsShortTimestamp.TIMESTAMP_SIZE)
if pus_tm.subservice == 25:
if len(pus_tm.source_data) < 8:
raise ValueError("No addressable ID in HK packet")
json_str = pus_tm.source_data[8:]
_LOGGER.info(json_str)
elif service == 5:
tm_packet = PusTelemetry.unpack(
packet, time_reader=CdsShortTimestamp.empty()
)
src_data = tm_packet.source_data
event_u32 = EventU32.unpack(src_data)
_LOGGER.info(f"Received event packet. Event: {event_u32}")
if event_u32.group_id == 0 and event_u32.unique_id == 0:
_LOGGER.info("Received test event")
elif service == 17:
tm_packet = Service17Tm.unpack(
packet, time_reader=CdsShortTimestamp.empty()
)
_LOGGER.info("received JSON string: " + json_str.decode("utf-8"))
if service == 5:
tm_packet = Service5Tm.unpack(packet, CdsShortTimestamp.TIMESTAMP_SIZE)
if service == 17:
tm_packet = Service17Tm.unpack(packet, CdsShortTimestamp.TIMESTAMP_SIZE)
if tm_packet.subservice == 2:
self.file_logger.info("Received Ping Reply TM[17,2]")
_LOGGER.info("Received Ping Reply TM[17,2]")
else:
self.file_logger.info(
f"Received Test Packet with unknown subservice {tm_packet.subservice}"
)
_LOGGER.info(
f"Received Test Packet with unknown subservice {tm_packet.subservice}"
)
else:
if tm_packet is None:
_LOGGER.info(
f"The service {service} is not implemented in Telemetry Factory"
)
tm_packet = PusTelemetry.unpack(
packet, time_reader=CdsShortTimestamp.empty()
)
tm_packet = PusTelemetry.unpack(packet, CdsShortTimestamp.TIMESTAMP_SIZE)
self.raw_logger.log_tm(pus_tm)
def make_addressable_id(target_id: int, unique_id: int) -> bytes:
byte_string = bytearray(struct.pack("!I", target_id))
byte_string.extend(struct.pack("!I", unique_id))
return byte_string
class TcHandler(TcHandlerBase):
def __init__(
self,
@ -174,9 +181,9 @@ class TcHandler(TcHandlerBase):
self.verif_wrapper = verif_wrapper
self.queue_helper = DefaultPusQueueHelper(
queue_wrapper=QueueWrapper.empty(),
tc_sched_timestamp_len=CdsShortTimestamp.TIMESTAMP_SIZE,
tc_sched_timestamp_len=7,
seq_cnt_provider=seq_count_provider,
pus_verificator=self.verif_wrapper.pus_verificator,
pus_verificator=verif_wrapper.pus_verificator,
default_pus_apid=EXAMPLE_PUS_APID,
)
@ -185,6 +192,10 @@ class TcHandler(TcHandlerBase):
if entry_helper.is_tc:
if entry_helper.entry_type == TcQueueEntryType.PUS_TC:
pus_tc_wrapper = entry_helper.to_pus_tc_entry()
pus_tc_wrapper.pus_tc.seq_count = (
self.seq_count_provider.get_and_increment()
)
self.verif_wrapper.add_tc(pus_tc_wrapper.pus_tc)
raw_tc = pus_tc_wrapper.pus_tc.pack()
_LOGGER.info(f"Sending {pus_tc_wrapper.pus_tc}")
send_params.com_if.send(raw_tc)
@ -193,17 +204,38 @@ class TcHandler(TcHandlerBase):
_LOGGER.info(log_entry.log_str)
def queue_finished_cb(self, info: ProcedureWrapper):
if info.proc_type == TcProcedureType.DEFAULT:
def_proc = info.to_def_procedure()
if info.proc_type == TcProcedureType.TREE_COMMANDING:
def_proc = info.to_tree_commanding_procedure()
_LOGGER.info(f"Queue handling finished for command {def_proc.cmd_path}")
def feed_cb(self, info: ProcedureWrapper, wrapper: FeedWrapper):
q = self.queue_helper
q.queue_wrapper = wrapper.queue_wrapper
if info.proc_type == TcProcedureType.DEFAULT:
def_proc = info.to_def_procedure()
assert def_proc.cmd_path is not None
pus_tc.pack_pus_telecommands(q, def_proc.cmd_path)
if info.proc_type == TcProcedureType.TREE_COMMANDING:
def_proc = info.to_tree_commanding_procedure()
cmd_path = def_proc.cmd_path
if cmd_path == "/ping":
q.add_log_cmd("Sending PUS ping telecommand")
q.add_pus_tc(PusTelecommand(service=17, subservice=1))
if cmd_path == "/change_blink_freq":
self.create_change_blink_freq_command(q)
def create_change_blink_freq_command(self, q: DefaultPusQueueHelper):
q.add_log_cmd("Changing blink frequency")
while True:
blink_freq = int(
input(
"Please specify new blink frequency in ms. Valid Range [2..10000]: "
)
)
if blink_freq < 2 or blink_freq > 10000:
print(
"Invalid blink frequency. Please specify a value between 2 and 10000."
)
continue
break
app_data = struct.pack("!I", blink_freq)
q.add_pus_tc(PusTelecommand(service=8, subservice=1, app_data=app_data))
def main():

View File

@ -1,2 +1,2 @@
tmtccmd == 4.0.0a0
tmtccmd == 8.0.1
# -e git+https://github.com/robamu-org/tmtccmd.git@main#egg=tmtccmd

View File

@ -1,17 +1,15 @@
#![no_std]
#![no_main]
use satrs_stm32f3_disco_rtic as _;
extern crate panic_itm;
use cortex_m_rt::entry;
use stm32f3_discovery::leds::Leds;
use stm32f3_discovery::stm32f3xx_hal::delay::Delay;
use stm32f3_discovery::stm32f3xx_hal::{pac, prelude::*};
use stm32f3_discovery::leds::Leds;
use stm32f3_discovery::switch_hal::{OutputSwitch, ToggleableOutputSwitch};
#[entry]
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::println!("STM32F3 Discovery Blinky");
let dp = pac::Peripherals::take().unwrap();
let mut rcc = dp.RCC.constrain();
let cp = cortex_m::Peripherals::take().unwrap();
@ -30,49 +28,49 @@ fn main()-> ! {
gpioe.pe14,
gpioe.pe15,
&mut gpioe.moder,
&mut gpioe.otyper
&mut gpioe.otyper,
);
let delay_ms = 200u16;
loop {
leds.ld3.toggle().ok();
leds.ld3_n.toggle().ok();
delay.delay_ms(delay_ms);
leds.ld3.toggle().ok();
leds.ld3_n.toggle().ok();
delay.delay_ms(delay_ms);
//explicit on/off
leds.ld4.on().ok();
leds.ld4_nw.on().ok();
delay.delay_ms(delay_ms);
leds.ld4.off().ok();
leds.ld4_nw.off().ok();
delay.delay_ms(delay_ms);
leds.ld5.on().ok();
leds.ld5_ne.on().ok();
delay.delay_ms(delay_ms);
leds.ld5.off().ok();
leds.ld5_ne.off().ok();
delay.delay_ms(delay_ms);
leds.ld6.on().ok();
leds.ld6_w.on().ok();
delay.delay_ms(delay_ms);
leds.ld6.off().ok();
leds.ld6_w.off().ok();
delay.delay_ms(delay_ms);
leds.ld7.on().ok();
leds.ld7_e.on().ok();
delay.delay_ms(delay_ms);
leds.ld7.off().ok();
leds.ld7_e.off().ok();
delay.delay_ms(delay_ms);
leds.ld8.on().ok();
leds.ld8_sw.on().ok();
delay.delay_ms(delay_ms);
leds.ld8.off().ok();
leds.ld8_sw.off().ok();
delay.delay_ms(delay_ms);
leds.ld9.on().ok();
leds.ld9_se.on().ok();
delay.delay_ms(delay_ms);
leds.ld9.off().ok();
leds.ld9_se.off().ok();
delay.delay_ms(delay_ms);
leds.ld10.on().ok();
leds.ld10_s.on().ok();
delay.delay_ms(delay_ms);
leds.ld10.off().ok();
leds.ld10_s.off().ok();
delay.delay_ms(delay_ms);
}
}

View File

@ -0,0 +1,51 @@
#![no_main]
#![no_std]
use cortex_m_semihosting::debug;
use defmt_brtt as _; // global logger
use stm32f3xx_hal as _; // memory layout
use panic_probe as _;
// same panicking *behavior* as `panic-probe` but doesn't print a panic message
// this prevents the panic message being printed *twice* when `defmt::panic` is invoked
#[defmt::panic_handler]
fn panic() -> ! {
cortex_m::asm::udf()
}
/// Terminates the application and makes a semihosting-capable debug tool exit
/// with status code 0.
pub fn exit() -> ! {
loop {
debug::exit(debug::EXIT_SUCCESS);
}
}
/// Hardfault handler.
///
/// Terminates the application and makes a semihosting-capable debug tool exit
/// with an error. This seems better than the default, which is to spin in a
/// loop.
#[cortex_m_rt::exception]
unsafe fn HardFault(_frame: &cortex_m_rt::ExceptionFrame) -> ! {
loop {
debug::exit(debug::EXIT_FAILURE);
}
}
// defmt-test 0.3.0 has the limitation that this `#[tests]` attribute can only be used
// once within a crate. the module can be in any file but there can only be at most
// one `#[tests]` module in this library crate
#[cfg(test)]
#[defmt_test::tests]
mod unit_tests {
use defmt::assert;
#[test]
fn it_works() {
assert!(true)
}
}

View File

@ -0,0 +1,682 @@
#![no_std]
#![no_main]
use satrs::pus::verification::{
FailParams, TcStateAccepted, VerificationReportCreator, VerificationToken,
};
use satrs::spacepackets::ecss::tc::PusTcReader;
use satrs::spacepackets::ecss::tm::{PusTmCreator, PusTmSecondaryHeader};
use satrs::spacepackets::ecss::EcssEnumU16;
use satrs::spacepackets::CcsdsPacket;
use satrs::spacepackets::{ByteConversionError, SpHeader};
// global logger + panicking-behavior + memory layout
use satrs_stm32f3_disco_rtic as _;
use rtic::app;
use heapless::{mpmc::Q8, Vec};
#[allow(unused_imports)]
use rtic_monotonics::fugit::{MillisDurationU32, TimerInstantU32};
use rtic_monotonics::systick::prelude::*;
use satrs::seq_count::SequenceCountProviderCore;
use satrs::spacepackets::{ecss::PusPacket, ecss::WritablePusPacket};
use stm32f3xx_hal::dma::dma1;
use stm32f3xx_hal::gpio::{PushPull, AF7, PA2, PA3};
use stm32f3xx_hal::pac::USART2;
use stm32f3xx_hal::serial::{Rx, RxEvent, Serial, SerialDmaRx, SerialDmaTx, Tx, TxEvent};
const UART_BAUD: u32 = 115200;
const DEFAULT_BLINK_FREQ_MS: u32 = 1000;
const TX_HANDLER_FREQ_MS: u32 = 20;
const MIN_DELAY_BETWEEN_TX_PACKETS_MS: u32 = 5;
const MAX_TC_LEN: usize = 128;
const MAX_TM_LEN: usize = 128;
pub const PUS_APID: u16 = 0x02;
type TxType = Tx<USART2, PA2<AF7<PushPull>>>;
type RxType = Rx<USART2, PA3<AF7<PushPull>>>;
type InstantFugit = TimerInstantU32<1000>;
type TxDmaTransferType = SerialDmaTx<&'static [u8], dma1::C7, TxType>;
type RxDmaTransferType = SerialDmaRx<&'static mut [u8], dma1::C6, RxType>;
// This is the predictable maximum overhead of the COBS encoding scheme.
// It is simply the maximum packet lenght dividied by 254 rounded up.
const COBS_TC_OVERHEAD: usize = (MAX_TC_LEN + 254 - 1) / 254;
const COBS_TM_OVERHEAD: usize = (MAX_TM_LEN + 254 - 1) / 254;
const TC_BUF_LEN: usize = MAX_TC_LEN + COBS_TC_OVERHEAD;
const TM_BUF_LEN: usize = MAX_TC_LEN + COBS_TM_OVERHEAD;
// This is a static buffer which should ONLY (!) be used as the TX DMA
// transfer buffer.
static mut DMA_TX_BUF: [u8; TM_BUF_LEN] = [0; TM_BUF_LEN];
// This is a static buffer which should ONLY (!) be used as the RX DMA
// transfer buffer.
static mut DMA_RX_BUF: [u8; TC_BUF_LEN] = [0; TC_BUF_LEN];
type TmPacket = Vec<u8, MAX_TM_LEN>;
type TcPacket = Vec<u8, MAX_TC_LEN>;
static TM_REQUESTS: Q8<TmPacket> = Q8::new();
use core::sync::atomic::{AtomicU16, Ordering};
pub struct SeqCountProviderAtomicRef {
atomic: AtomicU16,
ordering: Ordering,
}
impl SeqCountProviderAtomicRef {
pub const fn new(ordering: Ordering) -> Self {
Self {
atomic: AtomicU16::new(0),
ordering,
}
}
}
impl SequenceCountProviderCore<u16> for SeqCountProviderAtomicRef {
fn get(&self) -> u16 {
self.atomic.load(self.ordering)
}
fn increment(&self) {
self.atomic.fetch_add(1, self.ordering);
}
fn get_and_increment(&self) -> u16 {
self.atomic.fetch_add(1, self.ordering)
}
}
static SEQ_COUNT_PROVIDER: SeqCountProviderAtomicRef =
SeqCountProviderAtomicRef::new(Ordering::Relaxed);
pub struct TxIdle {
tx: TxType,
dma_channel: dma1::C7,
}
#[derive(Debug, defmt::Format)]
pub enum TmSendError {
ByteConversion(ByteConversionError),
Queue,
}
impl From<ByteConversionError> for TmSendError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
fn send_tm(tm_creator: PusTmCreator) -> Result<(), TmSendError> {
if tm_creator.len_written() > MAX_TM_LEN {
return Err(ByteConversionError::ToSliceTooSmall {
expected: tm_creator.len_written(),
found: MAX_TM_LEN,
}
.into());
}
let mut tm_vec = TmPacket::new();
tm_vec
.resize(tm_creator.len_written(), 0)
.expect("vec resize failed");
tm_creator.write_to_bytes(tm_vec.as_mut_slice())?;
defmt::info!(
"Sending TM[{},{}] with size {}",
tm_creator.service(),
tm_creator.subservice(),
tm_creator.len_written()
);
TM_REQUESTS
.enqueue(tm_vec)
.map_err(|_| TmSendError::Queue)?;
Ok(())
}
fn handle_tm_send_error(error: TmSendError) {
defmt::warn!("sending tm failed with error {}", error);
}
pub enum UartTxState {
// Wrapped in an option because we need an owned type later.
Idle(Option<TxIdle>),
// Same as above
Transmitting(Option<TxDmaTransferType>),
}
pub struct UartTxShared {
last_completed: Option<InstantFugit>,
state: UartTxState,
}
pub struct RequestWithToken {
token: VerificationToken<TcStateAccepted>,
request: Request,
}
#[derive(Debug, defmt::Format)]
pub enum Request {
Ping,
ChangeBlinkFrequency(u32),
}
#[derive(Debug, defmt::Format)]
pub enum RequestError {
InvalidApid = 1,
InvalidService = 2,
InvalidSubservice = 3,
NotEnoughAppData = 4,
}
pub fn convert_pus_tc_to_request(
tc: &PusTcReader,
verif_reporter: &mut VerificationReportCreator,
src_data_buf: &mut [u8],
timestamp: &[u8],
) -> Result<RequestWithToken, RequestError> {
defmt::info!(
"Found PUS TC [{},{}] with length {}",
tc.service(),
tc.subservice(),
tc.len_packed()
);
let token = verif_reporter.add_tc(tc);
if tc.apid() != PUS_APID {
defmt::warn!("Received tc with unknown APID {}", tc.apid());
let result = send_tm(
verif_reporter
.acceptance_failure(
src_data_buf,
token,
SEQ_COUNT_PROVIDER.get_and_increment(),
0,
FailParams::new(timestamp, &EcssEnumU16::new(0), &[]),
)
.unwrap(),
);
if let Err(e) = result {
handle_tm_send_error(e);
}
return Err(RequestError::InvalidApid);
}
let (tm_creator, accepted_token) = verif_reporter
.acceptance_success(
src_data_buf,
token,
SEQ_COUNT_PROVIDER.get_and_increment(),
0,
timestamp,
)
.unwrap();
if let Err(e) = send_tm(tm_creator) {
handle_tm_send_error(e);
}
if tc.service() == 17 && tc.subservice() == 1 {
if tc.subservice() == 1 {
return Ok(RequestWithToken {
request: Request::Ping,
token: accepted_token,
});
} else {
return Err(RequestError::InvalidSubservice);
}
} else if tc.service() == 8 {
if tc.subservice() == 1 {
if tc.user_data().len() < 4 {
return Err(RequestError::NotEnoughAppData);
}
let new_freq_ms = u32::from_be_bytes(tc.user_data()[0..4].try_into().unwrap());
return Ok(RequestWithToken {
request: Request::ChangeBlinkFrequency(new_freq_ms),
token: accepted_token,
});
} else {
return Err(RequestError::InvalidSubservice);
}
} else {
return Err(RequestError::InvalidService);
}
}
#[app(device = stm32f3xx_hal::pac, peripherals = true)]
mod app {
use super::*;
use core::slice::Iter;
use satrs::pus::verification::{TcStateStarted, VerificationReportCreator};
use satrs::spacepackets::{ecss::tc::PusTcReader, time::cds::P_FIELD_BASE};
#[allow(unused_imports)]
use stm32f3_discovery::leds::Direction;
use stm32f3_discovery::leds::Leds;
use stm32f3xx_hal::prelude::*;
use stm32f3_discovery::switch_hal::OutputSwitch;
use stm32f3xx_hal::Switch;
#[allow(dead_code)]
type SerialType = Serial<USART2, (PA2<AF7<PushPull>>, PA3<AF7<PushPull>>)>;
systick_monotonic!(Mono, 1000);
#[shared]
struct Shared {
blink_freq: MillisDurationU32,
tx_shared: UartTxShared,
rx_transfer: Option<RxDmaTransferType>,
}
#[local]
struct Local {
verif_reporter: VerificationReportCreator,
leds: Leds,
last_dir: Direction,
curr_dir: Iter<'static, Direction>,
}
#[init]
fn init(cx: init::Context) -> (Shared, Local) {
let mut rcc = cx.device.RCC.constrain();
// Initialize the systick interrupt & obtain the token to prove that we did
Mono::start(cx.core.SYST, 8_000_000);
let mut flash = cx.device.FLASH.constrain();
let clocks = rcc
.cfgr
.use_hse(8.MHz())
.sysclk(8.MHz())
.pclk1(8.MHz())
.freeze(&mut flash.acr);
// Set up monotonic timer.
//let mono_timer = MonoTimer::new(cx.core.DWT, clocks, &mut cx.core.DCB);
defmt::info!("Starting sat-rs demo application for the STM32F3-Discovery");
let mut gpioe = cx.device.GPIOE.split(&mut rcc.ahb);
let leds = Leds::new(
gpioe.pe8,
gpioe.pe9,
gpioe.pe10,
gpioe.pe11,
gpioe.pe12,
gpioe.pe13,
gpioe.pe14,
gpioe.pe15,
&mut gpioe.moder,
&mut gpioe.otyper,
);
let mut gpioa = cx.device.GPIOA.split(&mut rcc.ahb);
// USART2 pins
let mut pins = (
// TX pin: PA2
gpioa
.pa2
.into_af_push_pull(&mut gpioa.moder, &mut gpioa.otyper, &mut gpioa.afrl),
// RX pin: PA3
gpioa
.pa3
.into_af_push_pull(&mut gpioa.moder, &mut gpioa.otyper, &mut gpioa.afrl),
);
pins.1.internal_pull_up(&mut gpioa.pupdr, true);
let mut usart2 = Serial::new(
cx.device.USART2,
pins,
UART_BAUD.Bd(),
clocks,
&mut rcc.apb1,
);
usart2.configure_rx_interrupt(RxEvent::Idle, Switch::On);
// This interrupt is enabled to re-schedule new transfers in the interrupt handler immediately.
usart2.configure_tx_interrupt(TxEvent::TransmissionComplete, Switch::On);
let dma1 = cx.device.DMA1.split(&mut rcc.ahb);
let (mut tx_serial, mut rx_serial) = usart2.split();
// This interrupt is immediately triggered, clear it. It will only be reset
// by the hardware when data is received on RX (RXNE event)
rx_serial.clear_event(RxEvent::Idle);
// For some reason, this is also immediately triggered..
tx_serial.clear_event(TxEvent::TransmissionComplete);
let rx_transfer = rx_serial.read_exact(unsafe { DMA_RX_BUF.as_mut_slice() }, dma1.ch6);
defmt::info!("Spawning tasks");
blink::spawn().unwrap();
serial_tx_handler::spawn().unwrap();
let verif_reporter = VerificationReportCreator::new(PUS_APID).unwrap();
(
Shared {
blink_freq: MillisDurationU32::from_ticks(DEFAULT_BLINK_FREQ_MS),
tx_shared: UartTxShared {
last_completed: None,
state: UartTxState::Idle(Some(TxIdle {
tx: tx_serial,
dma_channel: dma1.ch7,
})),
},
rx_transfer: Some(rx_transfer),
},
Local {
verif_reporter,
leds,
last_dir: Direction::North,
curr_dir: Direction::iter(),
},
)
}
#[task(local = [leds, curr_dir, last_dir], shared=[blink_freq])]
async fn blink(mut cx: blink::Context) {
let blink::LocalResources {
leds,
curr_dir,
last_dir,
..
} = cx.local;
let mut toggle_leds = |dir: &Direction| {
let last_led = leds.for_direction(*last_dir);
last_led.off().ok();
let led = leds.for_direction(*dir);
led.on().ok();
*last_dir = *dir;
};
loop {
match curr_dir.next() {
Some(dir) => {
toggle_leds(dir);
}
None => {
*curr_dir = Direction::iter();
toggle_leds(curr_dir.next().unwrap());
}
}
let current_blink_freq = cx.shared.blink_freq.lock(|current| *current);
Mono::delay(current_blink_freq).await;
}
}
#[task(
shared = [tx_shared],
)]
async fn serial_tx_handler(mut cx: serial_tx_handler::Context) {
loop {
let is_idle = cx.shared.tx_shared.lock(|tx_shared| {
if let UartTxState::Idle(_) = tx_shared.state {
return true;
}
false
});
if is_idle {
let last_completed = cx.shared.tx_shared.lock(|shared| shared.last_completed);
if let Some(last_completed) = last_completed {
let elapsed_ms = (Mono::now() - last_completed).to_millis();
if elapsed_ms < MIN_DELAY_BETWEEN_TX_PACKETS_MS {
Mono::delay((MIN_DELAY_BETWEEN_TX_PACKETS_MS - elapsed_ms).millis()).await;
}
}
} else {
// Check for completion after 1 ms
Mono::delay(1.millis()).await;
continue;
}
if let Some(vec) = TM_REQUESTS.dequeue() {
cx.shared
.tx_shared
.lock(|tx_shared| match &mut tx_shared.state {
UartTxState::Idle(tx) => {
let encoded_len;
//debug!(target: "serial_tx_handler", "bytes: {:x?}", &buf[0..len]);
// Safety: We only copy the data into the TX DMA buffer in this task.
// If the DMA is active, another branch will be taken.
unsafe {
// 0 sentinel value as start marker
DMA_TX_BUF[0] = 0;
encoded_len =
cobs::encode(&vec[0..vec.len()], &mut DMA_TX_BUF[1..]);
// Should never panic, we accounted for the overhead.
// Write into transfer buffer directly, no need for intermediate
// encoding buffer.
// 0 end marker
DMA_TX_BUF[encoded_len + 1] = 0;
}
//debug!(target: "serial_tx_handler", "Sending {} bytes", encoded_len + 2);
//debug!("sent: {:x?}", &mut_tx_dma_buf[0..encoded_len + 2]);
let tx_idle = tx.take().unwrap();
// Transfer completion and re-scheduling of new TX transfers will be done
// by the IRQ handler.
// SAFETY: The DMA is the exclusive writer to the DMA buffer now.
let transfer = tx_idle.tx.write_all(
unsafe { &DMA_TX_BUF[0..encoded_len + 2] },
tx_idle.dma_channel,
);
tx_shared.state = UartTxState::Transmitting(Some(transfer));
// The memory block is automatically returned to the pool when it is dropped.
}
UartTxState::Transmitting(_) => (),
});
// Check for completion after 1 ms
Mono::delay(1.millis()).await;
continue;
}
// Nothing to do, and we are idle.
Mono::delay(TX_HANDLER_FREQ_MS.millis()).await;
}
}
#[task(
local = [
verif_reporter,
decode_buf: [u8; MAX_TC_LEN] = [0; MAX_TC_LEN],
src_data_buf: [u8; MAX_TM_LEN] = [0; MAX_TM_LEN],
timestamp: [u8; 7] = [0; 7],
],
shared = [blink_freq]
)]
async fn serial_rx_handler(
mut cx: serial_rx_handler::Context,
received_packet: Vec<u8, MAX_TC_LEN>,
) {
cx.local.timestamp[0] = P_FIELD_BASE;
defmt::info!("Received packet with {} bytes", received_packet.len());
let decode_buf = cx.local.decode_buf;
let packet = received_packet.as_slice();
let mut start_idx = None;
for (idx, byte) in packet.iter().enumerate() {
if *byte != 0 {
start_idx = Some(idx);
break;
}
}
if start_idx.is_none() {
defmt::warn!("decoding error, can only process cobs encoded frames, data is all 0");
return;
}
let start_idx = start_idx.unwrap();
match cobs::decode(&received_packet.as_slice()[start_idx..], decode_buf) {
Ok(len) => {
defmt::info!("Decoded packet length: {}", len);
let pus_tc = PusTcReader::new(decode_buf);
match pus_tc {
Ok((tc, _tc_len)) => {
match convert_pus_tc_to_request(
&tc,
cx.local.verif_reporter,
cx.local.src_data_buf,
cx.local.timestamp,
) {
Ok(request_with_token) => {
let started_token = handle_start_verification(
request_with_token.token,
cx.local.verif_reporter,
cx.local.src_data_buf,
cx.local.timestamp,
);
match request_with_token.request {
Request::Ping => {
handle_ping_request(cx.local.timestamp);
}
Request::ChangeBlinkFrequency(new_freq_ms) => {
defmt::info!("Received blink frequency change request with new frequncy {}", new_freq_ms);
cx.shared.blink_freq.lock(|blink_freq| {
*blink_freq =
MillisDurationU32::from_ticks(new_freq_ms);
});
}
}
handle_completion_verification(
started_token,
cx.local.verif_reporter,
cx.local.src_data_buf,
cx.local.timestamp,
);
}
Err(e) => {
// TODO: Error handling: Send verification failure based on request error.
defmt::warn!("request error {}", e);
}
}
}
Err(e) => {
defmt::warn!("Error unpacking PUS TC: {}", e);
}
}
}
Err(_) => {
defmt::warn!("decoding error, can only process cobs encoded frames")
}
}
}
fn handle_ping_request(timestamp: &[u8]) {
defmt::info!("Received PUS ping telecommand, sending ping reply TM[17,2]");
let sp_header =
SpHeader::new_for_unseg_tc(PUS_APID, SEQ_COUNT_PROVIDER.get_and_increment(), 0);
let sec_header = PusTmSecondaryHeader::new_simple(17, 2, timestamp);
let ping_reply = PusTmCreator::new(sp_header, sec_header, &[], true);
let mut tm_packet = TmPacket::new();
tm_packet
.resize(ping_reply.len_written(), 0)
.expect("vec resize failed");
ping_reply.write_to_bytes(&mut tm_packet).unwrap();
if TM_REQUESTS.enqueue(tm_packet).is_err() {
defmt::warn!("TC queue full");
return;
}
}
fn handle_start_verification(
accepted_token: VerificationToken<TcStateAccepted>,
verif_reporter: &mut VerificationReportCreator,
src_data_buf: &mut [u8],
timestamp: &[u8],
) -> VerificationToken<TcStateStarted> {
let (tm_creator, started_token) = verif_reporter
.start_success(
src_data_buf,
accepted_token,
SEQ_COUNT_PROVIDER.get(),
0,
&timestamp,
)
.unwrap();
let result = send_tm(tm_creator);
if let Err(e) = result {
handle_tm_send_error(e);
}
started_token
}
fn handle_completion_verification(
started_token: VerificationToken<TcStateStarted>,
verif_reporter: &mut VerificationReportCreator,
src_data_buf: &mut [u8],
timestamp: &[u8],
) {
let result = send_tm(
verif_reporter
.completion_success(
src_data_buf,
started_token,
SEQ_COUNT_PROVIDER.get(),
0,
timestamp,
)
.unwrap(),
);
if let Err(e) = result {
handle_tm_send_error(e);
}
}
#[task(binds = DMA1_CH6, shared = [rx_transfer])]
fn rx_dma_isr(mut cx: rx_dma_isr::Context) {
let mut tc_packet = TcPacket::new();
cx.shared.rx_transfer.lock(|rx_transfer| {
let rx_ref = rx_transfer.as_ref().unwrap();
if rx_ref.is_complete() {
let uart_rx_owned = rx_transfer.take().unwrap();
let (buf, c, rx) = uart_rx_owned.stop();
// The received data is transferred to another task now to avoid any processing overhead
// during the interrupt. There are multiple ways to do this, we use a stack allocaed vector here
// to do this.
tc_packet.resize(buf.len(), 0).expect("vec resize failed");
tc_packet.copy_from_slice(buf);
// Start the next transfer as soon as possible.
*rx_transfer = Some(rx.read_exact(buf, c));
// Send the vector to a regular task.
serial_rx_handler::spawn(tc_packet).expect("spawning rx handler task failed");
// If this happens, there is a high chance that the maximum packet length was
// exceeded. Circular mode is not used here, so data might be missed.
defmt::warn!(
"rx transfer with maximum length {}, might miss data",
TC_BUF_LEN
);
}
});
}
#[task(binds = USART2_EXTI26, shared = [rx_transfer, tx_shared])]
fn serial_isr(mut cx: serial_isr::Context) {
cx.shared
.tx_shared
.lock(|tx_shared| match &mut tx_shared.state {
UartTxState::Idle(_) => (),
UartTxState::Transmitting(transfer) => {
let transfer_ref = transfer.as_ref().unwrap();
if transfer_ref.is_complete() {
let transfer = transfer.take().unwrap();
let (_, dma_channel, mut tx) = transfer.stop();
tx.clear_event(TxEvent::TransmissionComplete);
tx_shared.state = UartTxState::Idle(Some(TxIdle { tx, dma_channel }));
// We cache the last completed time to ensure that there is a minimum delay between consecutive
// transferred packets.
tx_shared.last_completed = Some(Mono::now());
}
}
});
let mut tc_packet = TcPacket::new();
cx.shared.rx_transfer.lock(|rx_transfer| {
let rx_transfer_ref = rx_transfer.as_ref().unwrap();
// Received a partial packet.
if rx_transfer_ref.is_event_triggered(RxEvent::Idle) {
let rx_transfer_owned = rx_transfer.take().unwrap();
let (buf, ch, mut rx, rx_len) = rx_transfer_owned.stop_and_return_received_bytes();
// The received data is transferred to another task now to avoid any processing overhead
// during the interrupt. There are multiple ways to do this, we use a stack
// allocated vector to do this.
tc_packet
.resize(rx_len as usize, 0)
.expect("vec resize failed");
tc_packet[0..rx_len as usize].copy_from_slice(&buf[0..rx_len as usize]);
rx.clear_event(RxEvent::Idle);
serial_rx_handler::spawn(tc_packet).expect("spawning rx handler failed");
*rx_transfer = Some(rx.read_exact(buf, ch));
}
});
}
}

View File

@ -5,7 +5,7 @@
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"rust-lang.rust",
"marus25.cortex-debug",
"probe-rs.probe-rs-debugger"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": []

View File

@ -0,0 +1,22 @@
{
"version": "0.2.0",
"configurations": [
{
"preLaunchTask": "${defaultBuildTask}",
"type": "probe-rs-debug",
"request": "launch",
"name": "probe-rs Debugging ",
"flashingConfig": {
"flashingEnabled": true
},
"chip": "STM32F303VCTx",
"coreConfigs": [
{
"programBinary": "${workspaceFolder}/target/thumbv7em-none-eabihf/debug/satrs-stm32f3-disco-rtic",
"rttEnabled": true,
"svdFile": "STM32F303.svd"
}
]
}
]
}

View File

@ -11,7 +11,8 @@ proc CDSWOConfigure { CDCPUFreqHz CDSWOFreqHz CDSWOOutput } {
# Alternative option: Pipe ITM output into itm.txt file
# tpiu config internal itm.txt uart off $CDCPUFreqHz
# Default option so SWO display of VS code works.
# Default option so SWO display of VS code works. Please note that this might not be required
# anymore starting at openocd v0.12.0
tpiu config internal $CDSWOOutput uart off $CDCPUFreqHz $CDSWOFreqHz
itm port 0 on
}

View File

@ -0,0 +1,29 @@
[target.'cfg(all(target_arch = "arm", target_os = "none"))']
runner = "probe-rs run --chip STM32H743ZITx"
# runner = ["probe-rs", "run", "--chip", "$CHIP", "--log-format", "{L} {s}"]
rustflags = [
"-C", "linker=flip-link",
"-C", "link-arg=-Tlink.x",
"-C", "link-arg=-Tdefmt.x",
# This is needed if your flash or ram addresses are not aligned to 0x10000 in memory.x
# See https://github.com/rust-embedded/cortex-m-quickstart/pull/95
"-C", "link-arg=--nmagic",
# Can be useful for debugging.
# "-Clink-args=-Map=app.map"
]
[build]
# (`thumbv6m-*` is compatible with all ARM Cortex-M chips but using the right
# target improves performance)
# target = "thumbv6m-none-eabi" # Cortex-M0 and Cortex-M0+
# target = "thumbv7m-none-eabi" # Cortex-M3
# target = "thumbv7em-none-eabi" # Cortex-M4 and Cortex-M7 (no FPU)
target = "thumbv7em-none-eabihf" # Cortex-M4F and Cortex-M7F (with FPU)
[alias]
rb = "run --bin"
rrb = "run --release --bin"
[env]
DEFMT_LOG = "info"

View File

@ -0,0 +1,4 @@
/target
/.cargo/config*
/.vscode
/app.map

View File

@ -0,0 +1,871 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "autocfg"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "bare-metal"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5deb64efa5bd81e31fcd1938615a6d98c82eafcbcd787162b6f63b91d6bac5b3"
dependencies = [
"rustc_version",
]
[[package]]
name = "bare-metal"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8fe8f5a8a398345e52358e18ff07cc17a568fbca5c6f73873d3a62056309603"
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "bitfield"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46afbd2983a5d5a7bd740ccb198caf5b82f45c40c09c0eed36052d91cb92e719"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "cast"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cobs"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1"
dependencies = [
"thiserror",
]
[[package]]
name = "const-default"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b396d1f76d455557e1218ec8066ae14bba60b4b36ecd55577ba979f5db7ecaa"
[[package]]
name = "cortex-m"
version = "0.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ec610d8f49840a5b376c69663b6369e71f4b34484b9b2eb29fb918d92516cb9"
dependencies = [
"bare-metal 0.2.5",
"bitfield",
"critical-section",
"embedded-hal 0.2.7",
"volatile-register",
]
[[package]]
name = "cortex-m-rt"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "801d4dec46b34c299ccf6b036717ae0fce602faa4f4fe816d9013b9a7c9f5ba6"
dependencies = [
"cortex-m-rt-macros",
]
[[package]]
name = "cortex-m-rt-macros"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e37549a379a9e0e6e576fd208ee60394ccb8be963889eebba3ffe0980364f472"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "cortex-m-semihosting"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c23234600452033cc77e4b761e740e02d2c4168e11dbf36ab14a0f58973592b0"
dependencies = [
"cortex-m",
]
[[package]]
name = "crc"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636"
dependencies = [
"crc-catalog",
]
[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "critical-section"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b"
[[package]]
name = "defmt"
version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86f6162c53f659f65d00619fe31f14556a6e9f8752ccc4a41bd177ffcf3d6130"
dependencies = [
"bitflags",
"defmt-macros",
]
[[package]]
name = "defmt-brtt"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2f0ac3635d0c89d12b8101fcb44a7625f5f030a1c0491124b74467eb5a58a78"
dependencies = [
"critical-section",
"defmt",
]
[[package]]
name = "defmt-macros"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d135dd939bad62d7490b0002602d35b358dce5fd9233a709d3c1ef467d4bde6"
dependencies = [
"defmt-parser",
"proc-macro-error2",
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "defmt-parser"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3983b127f13995e68c1e29071e5d115cd96f215ccb5e6812e3728cd6f92653b3"
dependencies = [
"thiserror",
]
[[package]]
name = "defmt-test"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290966e8c38f94b11884877242de876280d0eab934900e9642d58868e77c5df1"
dependencies = [
"cortex-m-rt",
"cortex-m-semihosting",
"defmt",
"defmt-test-macros",
]
[[package]]
name = "defmt-test-macros"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "984bc6eca246389726ac2826acc2488ca0fe5fcd6b8d9b48797021951d76a125"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "delegate"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "297806318ef30ad066b15792a8372858020ae3ca2e414ee6c2133b1eb9e9e945"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "derive-new"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2cdc8d50f426189eef89dac62fabfa0abb27d5cc008f25bf4156a0203325becc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "embedded-alloc"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f2de9133f68db0d4627ad69db767726c99ff8585272716708227008d3f1bddd"
dependencies = [
"const-default",
"critical-section",
"linked_list_allocator",
"rlsf",
]
[[package]]
name = "embedded-dma"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "994f7e5b5cb23521c22304927195f236813053eb9c065dd2226a32ba64695446"
dependencies = [
"stable_deref_trait",
]
[[package]]
name = "embedded-hal"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35949884794ad573cf46071e41c9b60efb0cb311e3ca01f7af807af1debc66ff"
dependencies = [
"nb 0.1.3",
"void",
]
[[package]]
name = "embedded-hal"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "361a90feb7004eca4019fb28352a9465666b24f840f5c3cddf0ff13920590b89"
dependencies = [
"defmt",
]
[[package]]
name = "embedded-hal-async"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c4c685bbef7fe13c3c6dd4da26841ed3980ef33e841cddfa15ce8a8fb3f1884"
dependencies = [
"defmt",
"embedded-hal 1.0.0",
]
[[package]]
name = "embedded-hal-bus"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57b4e6ede84339ebdb418cd986e6320a34b017cdf99b5cc3efceec6450b06886"
dependencies = [
"critical-section",
"defmt",
"embedded-hal 1.0.0",
"embedded-hal-async",
]
[[package]]
name = "embedded-storage"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a21dea9854beb860f3062d10228ce9b976da520a73474aed3171ec276bc0c032"
[[package]]
name = "equivalent"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "fugit"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17186ad64927d5ac8f02c1e77ccefa08ccd9eaa314d5a4772278aa204a22f7e7"
dependencies = [
"gcd",
]
[[package]]
name = "futures-core"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "futures-task"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
[[package]]
name = "futures-util"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [
"futures-core",
"futures-task",
"pin-project-lite",
"pin-utils",
]
[[package]]
name = "gcd"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d758ba1b47b00caf47f24925c0074ecb20d6dfcffe7f6d53395c0465674841a"
[[package]]
name = "hash32"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606"
dependencies = [
"byteorder",
]
[[package]]
name = "hashbrown"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
[[package]]
name = "heapless"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bfb9eb618601c89945a70e254898da93b13be0388091d42117462b265bb3fad"
dependencies = [
"defmt",
"hash32",
"stable_deref_trait",
]
[[package]]
name = "indexmap"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652"
dependencies = [
"equivalent",
"hashbrown",
]
[[package]]
name = "libc"
version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]]
name = "linked_list_allocator"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afa463f5405ee81cdb9cc2baf37e08ec7e4c8209442b5d72c04cfb2cd6e6286"
[[package]]
name = "managed"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ca88d725a0a943b096803bd34e73a4437208b6077654cc4ecb2947a5f91618d"
[[package]]
name = "nb"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "801d31da0513b6ec5214e9bf433a77966320625a37860f910be265be6e18d06f"
dependencies = [
"nb 1.1.0",
]
[[package]]
name = "nb"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d5439c4ad607c3c23abf66de8c8bf57ba8adcd1f129e699851a6e43935d339d"
[[package]]
name = "num-traits"
version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
dependencies = [
"autocfg",
]
[[package]]
name = "num_enum"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179"
dependencies = [
"num_enum_derive",
]
[[package]]
name = "num_enum_derive"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "panic-probe"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4047d9235d1423d66cc97da7d07eddb54d4f154d6c13805c6d0793956f4f25b0"
dependencies = [
"cortex-m",
"defmt",
]
[[package]]
name = "paste"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "pin-project-lite"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "pin-utils"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "portable-atomic"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6"
[[package]]
name = "proc-macro-error-attr2"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
dependencies = [
"proc-macro2",
"quote",
]
[[package]]
name = "proc-macro-error2"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
dependencies = [
"proc-macro-error-attr2",
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "proc-macro2"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rlsf"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "222fb240c3286247ecdee6fa5341e7cdad0ffdf8e7e401d9937f2d58482a20bf"
dependencies = [
"cfg-if",
"const-default",
"libc",
"svgbobdoc",
]
[[package]]
name = "rtic"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "401961431a1e491124cdd216a313fada2d395aa2b5bee2867c872fc8af7c1bc1"
dependencies = [
"bare-metal 1.0.0",
"cortex-m",
"critical-section",
"portable-atomic",
"rtic-core",
"rtic-macros",
]
[[package]]
name = "rtic-common"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0786b50b81ef9d2a944a000f60405bb28bf30cd45da2d182f3fe636b2321f35c"
dependencies = [
"critical-section",
]
[[package]]
name = "rtic-core"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9369355b04d06a3780ec0f51ea2d225624db777acbc60abd8ca4832da5c1a42"
[[package]]
name = "rtic-macros"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac22ab522d80079b48f46ac66ded4d349e1adf81b52430d6a74faa3a7790ed80"
dependencies = [
"indexmap",
"proc-macro-error2",
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "rtic-monotonics"
version = "2.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1cb90bcfdbbacf3ca37340cdab52ec2de5611c744095ef7889e9c50c233b748"
dependencies = [
"cfg-if",
"cortex-m",
"fugit",
"portable-atomic",
"rtic-time",
]
[[package]]
name = "rtic-sync"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49b1200137ccb2bf272a1801fa6e27264535facd356cb2c1d5bc8e12aa211bad"
dependencies = [
"critical-section",
"defmt",
"embedded-hal 1.0.0",
"embedded-hal-async",
"embedded-hal-bus",
"heapless",
"portable-atomic",
"rtic-common",
]
[[package]]
name = "rtic-time"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7b1d853fa50dc125695414ce4510567a0d420221e455b1568cfa8c9aece9614"
dependencies = [
"critical-section",
"embedded-hal 1.0.0",
"embedded-hal-async",
"fugit",
"futures-util",
"rtic-common",
]
[[package]]
name = "rustc_version"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
dependencies = [
"semver",
]
[[package]]
name = "satrs"
version = "0.2.1"
dependencies = [
"cobs",
"crc",
"defmt",
"delegate",
"derive-new",
"heapless",
"num-traits",
"num_enum",
"paste",
"satrs-shared",
"smallvec",
"spacepackets",
"static_cell",
"thiserror",
]
[[package]]
name = "satrs-shared"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f10b962c131d0bcb0de06fefa5d4716cbd7f836167f90229f7f38405dc573bd3"
dependencies = [
"spacepackets",
]
[[package]]
name = "satrs-stm32h7-nucleo-rtic"
version = "0.1.0"
dependencies = [
"cortex-m",
"cortex-m-rt",
"cortex-m-semihosting",
"defmt",
"defmt-brtt",
"defmt-test",
"embedded-alloc",
"panic-probe",
"rtic",
"rtic-monotonics",
"rtic-sync",
"satrs",
"smoltcp",
"stm32h7xx-hal",
]
[[package]]
name = "semver"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
dependencies = [
"semver-parser",
]
[[package]]
name = "semver-parser"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "smallvec"
version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "smoltcp"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a1a996951e50b5971a2c8c0fa05a381480d70a933064245c4a223ddc87ccc97"
dependencies = [
"bitflags",
"byteorder",
"cfg-if",
"defmt",
"heapless",
"managed",
]
[[package]]
name = "spacepackets"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "591d42bfa6af2ab308e5efd8f0870e2d0eb5dd19a141bdcb7da731f5ff9cc11c"
dependencies = [
"crc",
"defmt",
"delegate",
"num-traits",
"num_enum",
"paste",
"thiserror",
"zerocopy",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "static_cell"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d89b0684884a883431282db1e4343f34afc2ff6996fe1f4a1664519b66e14c1e"
dependencies = [
"portable-atomic",
]
[[package]]
name = "stm32h7"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "362f288cd8341e9209587b889c385f323e82fc237b60c272868965bb879bb9b1"
dependencies = [
"bare-metal 1.0.0",
"cortex-m",
"cortex-m-rt",
"vcell",
]
[[package]]
name = "stm32h7xx-hal"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bd869329be25440b24e2b3583a1c016151b4a54bc36d96d82af7fcd9d010b98"
dependencies = [
"bare-metal 1.0.0",
"cast",
"cortex-m",
"embedded-dma",
"embedded-hal 0.2.7",
"embedded-storage",
"fugit",
"nb 1.1.0",
"paste",
"smoltcp",
"stm32h7",
"void",
]
[[package]]
name = "svgbobdoc"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2c04b93fc15d79b39c63218f15e3fdffaa4c227830686e3b7c5f41244eb3e50"
dependencies = [
"base64",
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-width",
]
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thiserror"
version = "2.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "2.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "unicode-ident"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034"
[[package]]
name = "unicode-width"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "vcell"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77439c1b53d2303b20d9459b1ade71a83c716e3f9c34f3228c00e6f185d6c002"
[[package]]
name = "void"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
[[package]]
name = "volatile-register"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de437e2a6208b014ab52972a27e59b33fa2920d3e00fe05026167a1c509d19cc"
dependencies = [
"vcell",
]
[[package]]
name = "zerocopy"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a367f292d93d4eab890745e75a778da40909cab4d6ff8173693812f79c4a2468"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3931cb58c62c13adec22e38686b559c86a30565e16ad6e8510a337cedc611e1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]

View File

@ -0,0 +1,85 @@
[package]
authors = ["Robin Mueller <robin.mueller.m@gmail.com>"]
name = "satrs-stm32h7-nucleo-rtic"
edition = "2021"
version = "0.1.0"
default-run = "satrs-stm32h7-nucleo-rtic"
[lib]
harness = false
# needed for each integration test
[[test]]
name = "integration"
harness = false
[dependencies]
cortex-m = { version = "0.7", features = ["critical-section-single-core"] }
cortex-m-rt = "0.7"
defmt = "0.3"
defmt-brtt = { version = "0.1", default-features = false, features = ["rtt"] }
panic-probe = { version = "0.3", features = ["print-defmt"] }
cortex-m-semihosting = "0.5.0"
stm32h7xx-hal = { version="0.16", features= ["stm32h743v", "ethernet"] }
embedded-alloc = "0.6"
rtic-sync = { version = "1", features = ["defmt-03"] }
[dependencies.smoltcp]
version = "0.11"
default-features = false
features = ["medium-ethernet", "proto-ipv4", "socket-raw", "socket-dhcpv4", "socket-udp", "defmt"]
[dependencies.rtic]
version = "2"
features = ["thumbv7-backend"]
[dependencies.rtic-monotonics]
version = "2"
features = ["cortex-m-systick"]
[dependencies.satrs]
path = "../../satrs"
version = "0.2"
default-features = false
features = ["defmt", "heapless"]
[dev-dependencies]
defmt-test = "0.3"
# cargo build/run
[profile.dev]
codegen-units = 1
debug = 2
debug-assertions = true # <-
incremental = false
opt-level = 's' # <-
overflow-checks = true # <-
# cargo test
[profile.test]
codegen-units = 1
debug = 2
debug-assertions = true # <-
incremental = false
opt-level = 3 # <-
overflow-checks = true # <-
# cargo build/run --release
[profile.release]
codegen-units = 1
debug = 2
debug-assertions = false # <-
incremental = false
lto = 'fat'
opt-level = 3 # <-
overflow-checks = false # <-
# cargo test --release
[profile.bench]
codegen-units = 1
debug = 2
debug-assertions = false # <-
incremental = false
lto = 'fat'
opt-level = 3 # <-
overflow-checks = false # <-

View File

@ -0,0 +1,118 @@
sat-rs example for the STM32H73ZI-Nucleo board
=======
This example application shows how the [sat-rs library](https://egit.irs.uni-stuttgart.de/rust/sat-rs)
can be used on an embedded target.
It also shows how a relatively simple OBSW could be built when no standard runtime is available.
It uses [RTIC](https://rtic.rs/2/book/en/) as the concurrency framework and the
[defmt](https://defmt.ferrous-systems.com/) framework for logging.
The STM32H743ZIT device was picked because it is one of the more powerful Cortex-M based devices
available for STM with which also has a little bit more RAM available and also allows commanding
via TCP/IP.
## Pre-Requisites
Make sure the following tools are installed:
1. [`probe-rs`](https://probe.rs/): Application used to flash and debug the MCU.
2. Optional and recommended: [VS Code](https://code.visualstudio.com/) with
[probe-rs plugin](https://marketplace.visualstudio.com/items?itemName=probe-rs.probe-rs-debugger)
for debugging.
## Preparing Rust and the repository
Building an application requires the `thumbv7em-none-eabihf` cross-compiler toolchain.
If you have not installed it yet, you can do so with
```sh
rustup target add thumbv7em-none-eabihf
```
A default `.cargo` config file is provided for this project, but needs to be copied to have
the correct name. This is so that the config file can be updated or edited for custom needs
without being tracked by git.
```sh
cp def_config.toml config.toml
```
The configuration file will also set the target so it does not always have to be specified with
the `--target` argument.
## Building
After that, assuming that you have a `.cargo/config.toml` setting the correct build target,
you can simply build the application with
```sh
cargo build
```
## Flashing from the command line
You can flash the application from the command line using `probe-rs`:
```sh
probe-rs run --chip STM32H743ZITx
```
## Debugging with VS Code
The STM32F3-Discovery comes with an on-board ST-Link so all that is required to flash and debug
the board is a Mini-USB cable. The code in this repository was debugged using [`probe-rs`](https://probe.rs/docs/tools/debuggerA)
and the VS Code [`probe-rs` plugin](https://marketplace.visualstudio.com/items?itemName=probe-rs.probe-rs-debugger).
Make sure to install this plugin first.
Sample configuration files are provided inside the `vscode` folder.
Use `cp vscode .vscode -r` to use them for your project.
Some sample configuration files for VS Code were provided as well. You can simply use `Run` and `Debug`
to automatically rebuild and flash your application.
The `tasks.json` and `launch.json` files are generic and you can use them immediately by opening
the folder in VS code or adding it to a workspace.
## Commanding with Python
When the SW is running on the Discovery board, you can command the MCU via a serial interface,
using COBS encoded PUS packets.
It is recommended to use a virtual environment to do this. To set up one in the command line,
you can use `python3 -m venv venv` on Unix systems or `py -m venv venv` on Windows systems.
After doing this, you can check the [venv tutorial](https://docs.python.org/3/tutorial/venv.html)
on how to activate the environment and then use the following command to install the required
dependency:
```sh
pip install -r requirements.txt
```
The packets are exchanged using a dedicated serial interface. You can use any generic USB-to-UART
converter device with the TX pin connected to the PA3 pin and the RX pin connected to the PA2 pin.
A default configuration file for the python application is provided and can be used by running
```sh
cp def_tmtc_conf.json tmtc_conf.json
```
After that, you can for example send a ping to the MCU using the following command
```sh
./main.py -p /ping
```
You can configure the blinky frequency using
```sh
./main.py -p /change_blink_freq
```
All these commands will package a PUS telecommand which will be sent to the MCU using the COBS
format as the packet framing format.
## Resources
- [STM32H743ZI Ethernet link checker example](https://github.com/stm32-rs/stm32h7xx-hal/blob/master/examples/ethernet-nucleo-h743zi2.rs)
- [smoltcp DHCP client](https://github.com/smoltcp-rs/smoltcp/blob/main/examples/dhcp_client.rs)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,119 @@
/* Taken from https://github.com/stm32-rs/stm32h7xx-hal/pull/299, adapted slightly to work with */
/* flip-link */
MEMORY
{
/* This file is intended for parts in the STM32H743/743v/753/753v families (RM0433), */
/* with the exception of the STM32H742/742v parts which have a different RAM layout. */
/* - FLASH and RAM are mandatory memory sections. */
/* - The sum of all non-FLASH sections must add to 1060K total device RAM. */
/* - The FLASH section size must match your device, see table below. */
/* FLASH */
/* Flash is divided in two independent banks (except 750xB). */
/* Select the appropriate FLASH size for your device. */
/* - STM32H750xB 128K (only FLASH1) */
/* - STM32H750xB 1M (512K + 512K) */
/* - STM32H743xI/753xI 2M ( 1M + 1M) */
FLASH1 : ORIGIN = 0x08000000, LENGTH = 1M
FLASH2 : ORIGIN = 0x08100000, LENGTH = 1M
/* Data TCM */
/* - Two contiguous 64KB RAMs. */
/* - Used for interrupt handlers, stacks and general RAM. */
/* - Zero wait-states. */
/* - The DTCM is taken as the origin of the base ram. (See below.) */
/* This is also where the interrupt table and such will live, */
/* which is required for deterministic performance. */
/* Need a region called RAM */
/* DTCM : ORIGIN = 0x20000000, LENGTH = 128K */
RAM : ORIGIN = 0x20000000, LENGTH = 128K
/* Instruction TCM */
/* - Used for latency-critical interrupt handlers etc. */
/* - Zero wait-states. */
ITCM : ORIGIN = 0x00000000, LENGTH = 64K
/* AXI SRAM */
/* - AXISRAM is in D1 and accessible by all system masters except BDMA. */
/* - Suitable for application data not stored in DTCM. */
/* - Zero wait-states. */
AXISRAM : ORIGIN = 0x24000000, LENGTH = 512K
/* AHB SRAM */
/* - SRAM1-3 are in D2 and accessible by all system masters except BDMA. */
/* Suitable for use as DMA buffers. */
/* - SRAM4 is in D3 and additionally accessible by the BDMA. Used for BDMA */
/* buffers, for storing application data in lower-power modes. */
/* - Zero wait-states. */
SRAM1 : ORIGIN = 0x30000000, LENGTH = 128K
SRAM2 : ORIGIN = 0x30020000, LENGTH = 128K
SRAM3 : ORIGIN = 0x30040000, LENGTH = 32K
SRAM4 : ORIGIN = 0x38000000, LENGTH = 64K
/* Backup SRAM */
BSRAM : ORIGIN = 0x38800000, LENGTH = 4K
}
/*
/* Assign the memory regions defined above for use. */
/*
/* Provide the mandatory FLASH and RAM definitions for cortex-m-rt's linker script. */
/* These do not work with flip-link */
REGION_ALIAS(FLASH, FLASH1);
/* REGION_ALIAS(RAM, DTCM); */
/* The location of the stack can be overridden using the `_stack_start` symbol. */
/* - Set the stack location at the end of RAM, using all remaining space. */
_stack_start = ORIGIN(RAM) + LENGTH(RAM);
/* The location of the .text section can be overridden using the */
/* `_stext` symbol. By default it will place after .vector_table. */
/* _stext = ORIGIN(FLASH) + 0x40c; */
/* Define sections for placing symbols into the extra memory regions above. */
/* This makes them accessible from code. */
/* - ITCM, DTCM and AXISRAM connect to a 64-bit wide bus -> align to 8 bytes. */
/* - All other memories connect to a 32-bit wide bus -> align to 4 bytes. */
SECTIONS {
.flash2 (NOLOAD) : ALIGN(4) {
*(.flash2 .flash2.*);
. = ALIGN(4);
} > FLASH2
.itcm (NOLOAD) : ALIGN(8) {
*(.itcm .itcm.*);
. = ALIGN(8);
} > ITCM
.axisram (NOLOAD) : ALIGN(8) {
*(.axisram .axisram.*);
. = ALIGN(8);
} > AXISRAM
.sram1 (NOLOAD) : ALIGN(8) {
*(.sram1 .sram1.*);
. = ALIGN(4);
} > SRAM1
.sram2 (NOLOAD) : ALIGN(8) {
*(.sram2 .sram2.*);
. = ALIGN(4);
} > SRAM2
.sram3 (NOLOAD) : ALIGN(4) {
*(.sram3 .sram3.*);
. = ALIGN(4);
} > SRAM3
.sram4 (NOLOAD) : ALIGN(4) {
*(.sram4 .sram4.*);
. = ALIGN(4);
} > SRAM4
.bsram (NOLOAD) : ALIGN(4) {
*(.bsram .bsram.*);
. = ALIGN(4);
} > BSRAM
};

View File

@ -1,4 +1,5 @@
/venv
/.tmtc-history.txt
/log
/.idea/*
!/.idea/runConfigurations

View File

@ -0,0 +1,4 @@
{
"com_if": "udp",
"tcpip_udp_port": 7301
}

View File

@ -1,39 +1,40 @@
#!/usr/bin/env python3
"""Example client for the sat-rs example application"""
import enum
import struct
import logging
import sys
import time
from typing import Optional, cast
from typing import Any, Optional, cast
from prompt_toolkit.history import FileHistory, History
from spacepackets.ecss.tm import CdsShortTimestamp
import tmtccmd
from spacepackets.ecss import PusTelemetry, PusTelecommand, PusVerificator
from spacepackets.ecss import PusTelemetry, PusTelecommand, PusTm, PusVerificator
from spacepackets.ecss.pus_17_test import Service17Tm
from spacepackets.ecss.pus_1_verification import UnpackParams, Service1Tm
from tmtccmd import CcsdsTmtcBackend, TcHandlerBase, ProcedureParamsWrapper
from tmtccmd import TcHandlerBase, ProcedureParamsWrapper
from tmtccmd.core.base import BackendRequest
from tmtccmd.core.ccsds_backend import QueueWrapper
from tmtccmd.logging import add_colorlog_console_logger
from tmtccmd.pus import VerificationWrapper
from tmtccmd.tm import CcsdsTmHandler, SpecificApidHandlerBase
from tmtccmd.com_if import ComInterface
from tmtccmd.tmtc import CcsdsTmHandler, SpecificApidHandlerBase
from tmtccmd.com import ComInterface
from tmtccmd.config import (
CmdTreeNode,
default_json_path,
SetupParams,
TmTcCfgHookBase,
TmtcDefinitionWrapper,
CoreServiceList,
OpCodeEntry,
HookBase,
params_to_procedure_conversion,
)
from tmtccmd.config.com_if import SerialCfgWrapper
from tmtccmd.config.com import SerialCfgWrapper
from tmtccmd.config import PreArgsParsingWrapper, SetupWrapper
from tmtccmd.logging import get_console_logger
from tmtccmd.logging.pus import (
RegularTmtcLogWrapper,
RawTmtcTimedLogWrapper,
TimedLogWhen,
)
from tmtccmd.tc import (
from tmtccmd.tmtc import (
TcQueueEntryType,
ProcedureWrapper,
TcProcedureType,
@ -41,27 +42,26 @@ from tmtccmd.tc import (
SendCbParams,
DefaultPusQueueHelper,
)
from tmtccmd.tm.pus_5_event import Service5Tm
from tmtccmd.util import FileSeqCountProvider, PusFileSeqCountProvider
from tmtccmd.pus.s5_fsfw_event import Service5Tm
from spacepackets.seqcount import FileSeqCountProvider, PusFileSeqCountProvider
from tmtccmd.util.obj_id import ObjectIdDictT
from tmtccmd.util.tmtc_printer import FsfwTmTcPrinter
LOGGER = get_console_logger()
_LOGGER = logging.getLogger()
EXAMPLE_PUS_APID = 0x02
class SatRsConfigHook(TmTcCfgHookBase):
class SatRsConfigHook(HookBase):
def __init__(self, json_cfg_path: str):
super().__init__(json_cfg_path=json_cfg_path)
super().__init__(json_cfg_path)
def assign_communication_interface(self, com_if_key: str) -> Optional[ComInterface]:
from tmtccmd.config.com_if import (
def get_communication_interface(self, com_if_key: str) -> Optional[ComInterface]:
from tmtccmd.config.com import (
create_com_interface_default,
create_com_interface_cfg_default,
)
assert self.cfg_path is not None
cfg = create_com_interface_cfg_default(
com_if_key=com_if_key,
json_cfg_path=self.cfg_path,
@ -76,35 +76,14 @@ class SatRsConfigHook(TmTcCfgHookBase):
cfg.serial_cfg.serial_timeout = 0.5
return create_com_interface_default(cfg)
def get_tmtc_definitions(self) -> TmtcDefinitionWrapper:
from tmtccmd.config.globals import get_default_tmtc_defs
def get_command_definitions(self) -> CmdTreeNode:
"""This function should return the root node of the command definition tree."""
return create_cmd_definition_tree()
defs = get_default_tmtc_defs()
srv_5 = OpCodeEntry()
srv_5.add("0", "Event Test")
defs.add_service(
name=CoreServiceList.SERVICE_5.value,
info="PUS Service 5 Event",
op_code_entry=srv_5,
)
srv_17 = OpCodeEntry()
srv_17.add("0", "Ping Test")
defs.add_service(
name=CoreServiceList.SERVICE_17_ALT,
info="PUS Service 17 Test",
op_code_entry=srv_17,
)
srv_3 = OpCodeEntry()
defs.add_service(
name=CoreServiceList.SERVICE_3,
info="PUS Service 3 Housekeeping",
op_code_entry=srv_3,
)
return defs
def perform_mode_operation(self, tmtc_backend: CcsdsTmtcBackend, mode: int):
LOGGER.info("Mode operation hook was called")
pass
def get_cmd_history(self) -> Optional[History]:
"""Optionlly return a history class for the past command paths which will be used
when prompting a command path from the user in CLI mode."""
return FileHistory(".tmtc-history.txt")
def get_object_ids(self) -> ObjectIdDictT:
from tmtccmd.config.objects import get_core_object_ids
@ -112,74 +91,77 @@ class SatRsConfigHook(TmTcCfgHookBase):
return get_core_object_ids()
def create_cmd_definition_tree() -> CmdTreeNode:
root_node = CmdTreeNode.root_node()
root_node.add_child(CmdTreeNode("ping", "Send PUS ping TC"))
root_node.add_child(CmdTreeNode("change_blink_freq", "Change blink frequency"))
return root_node
class PusHandler(SpecificApidHandlerBase):
def __init__(
self,
file_logger: logging.Logger,
verif_wrapper: VerificationWrapper,
printer: FsfwTmTcPrinter,
raw_logger: RawTmtcTimedLogWrapper,
):
super().__init__(EXAMPLE_PUS_APID, None)
self.printer = printer
self.file_logger = file_logger
self.raw_logger = raw_logger
self.verif_wrapper = verif_wrapper
def handle_tm(self, packet: bytes, _user_args: any):
def handle_tm(self, packet: bytes, _user_args: Any):
try:
tm_packet = PusTelemetry.unpack(packet)
pus_tm = PusTm.unpack(
packet, timestamp_len=CdsShortTimestamp.TIMESTAMP_SIZE
)
except ValueError as e:
LOGGER.warning("Could not generate PUS TM object from raw data")
LOGGER.warning(f"Raw Packet: [{packet.hex(sep=',')}], REPR: {packet!r}")
_LOGGER.warning("Could not generate PUS TM object from raw data")
_LOGGER.warning(f"Raw Packet: [{packet.hex(sep=',')}], REPR: {packet!r}")
raise e
service = tm_packet.service
dedicated_handler = False
service = pus_tm.service
tm_packet = None
if service == 1:
tm_packet = Service1Tm.unpack(data=packet, params=UnpackParams(1, 2))
tm_packet = Service1Tm.unpack(
data=packet, params=UnpackParams(CdsShortTimestamp.TIMESTAMP_SIZE, 1, 2)
)
res = self.verif_wrapper.add_tm(tm_packet)
if res is None:
LOGGER.info(
_LOGGER.info(
f"Received Verification TM[{tm_packet.service}, {tm_packet.subservice}] "
f"with Request ID {tm_packet.tc_req_id.as_u32():#08x}"
)
LOGGER.warning(
_LOGGER.warning(
f"No matching telecommand found for {tm_packet.tc_req_id}"
)
else:
self.verif_wrapper.log_to_console(tm_packet, res)
self.verif_wrapper.log_to_file(tm_packet, res)
dedicated_handler = True
if service == 3:
LOGGER.info("No handling for HK packets implemented")
LOGGER.info(f"Raw packet: 0x[{packet.hex(sep=',')}]")
pus_tm = PusTelemetry.unpack(packet)
_LOGGER.info("No handling for HK packets implemented")
_LOGGER.info(f"Raw packet: 0x[{packet.hex(sep=',')}]")
pus_tm = PusTelemetry.unpack(packet, CdsShortTimestamp.TIMESTAMP_SIZE)
if pus_tm.subservice == 25:
if len(pus_tm.source_data) < 8:
raise ValueError("No addressable ID in HK packet")
json_str = pus_tm.source_data[8:]
dedicated_handler = True
_LOGGER.info("received JSON string: " + json_str.decode("utf-8"))
if service == 5:
tm_packet = Service5Tm.unpack(packet)
tm_packet = Service5Tm.unpack(packet, CdsShortTimestamp.TIMESTAMP_SIZE)
if service == 17:
tm_packet = Service17Tm.unpack(packet)
dedicated_handler = True
tm_packet = Service17Tm.unpack(packet, CdsShortTimestamp.TIMESTAMP_SIZE)
if tm_packet.subservice == 2:
self.printer.file_logger.info("Received Ping Reply TM[17,2]")
LOGGER.info("Received Ping Reply TM[17,2]")
_LOGGER.info("Received Ping Reply TM[17,2]")
else:
self.printer.file_logger.info(
f"Received Test Packet with unknown subservice {tm_packet.subservice}"
)
LOGGER.info(
_LOGGER.info(
f"Received Test Packet with unknown subservice {tm_packet.subservice}"
)
if tm_packet is None:
LOGGER.info(
_LOGGER.info(
f"The service {service} is not implemented in Telemetry Factory"
)
tm_packet = PusTelemetry.unpack(packet)
self.raw_logger.log_tm(tm_packet)
if not dedicated_handler and tm_packet is not None:
self.printer.handle_long_tm_print(packet_if=tm_packet, info_if=tm_packet)
tm_packet = PusTelemetry.unpack(packet, CdsShortTimestamp.TIMESTAMP_SIZE)
self.raw_logger.log_tm(pus_tm)
def make_addressable_id(target_id: int, unique_id: int) -> bytes:
@ -198,8 +180,11 @@ class TcHandler(TcHandlerBase):
self.seq_count_provider = seq_count_provider
self.verif_wrapper = verif_wrapper
self.queue_helper = DefaultPusQueueHelper(
queue_wrapper=None,
queue_wrapper=QueueWrapper.empty(),
tc_sched_timestamp_len=7,
seq_cnt_provider=seq_count_provider,
pus_verificator=verif_wrapper.pus_verificator,
default_pus_apid=EXAMPLE_PUS_APID,
)
def send_cb(self, send_params: SendCbParams):
@ -212,61 +197,74 @@ class TcHandler(TcHandlerBase):
)
self.verif_wrapper.add_tc(pus_tc_wrapper.pus_tc)
raw_tc = pus_tc_wrapper.pus_tc.pack()
LOGGER.info(f"Sending {pus_tc_wrapper.pus_tc}")
_LOGGER.info(f"Sending {pus_tc_wrapper.pus_tc}")
send_params.com_if.send(raw_tc)
elif entry_helper.entry_type == TcQueueEntryType.LOG:
log_entry = entry_helper.to_log_entry()
LOGGER.info(log_entry.log_str)
_LOGGER.info(log_entry.log_str)
def queue_finished_cb(self, helper: ProcedureWrapper):
if helper.proc_type == TcProcedureType.DEFAULT:
def_proc = helper.to_def_procedure()
LOGGER.info(
f"Queue handling finished for service {def_proc.service} and "
f"op code {def_proc.op_code}"
)
def queue_finished_cb(self, info: ProcedureWrapper):
if info.proc_type == TcProcedureType.TREE_COMMANDING:
def_proc = info.to_tree_commanding_procedure()
_LOGGER.info(f"Queue handling finished for command {def_proc.cmd_path}")
def feed_cb(self, helper: ProcedureWrapper, wrapper: FeedWrapper):
def feed_cb(self, info: ProcedureWrapper, wrapper: FeedWrapper):
q = self.queue_helper
q.queue_wrapper = wrapper.queue_wrapper
if helper.proc_type == TcProcedureType.DEFAULT:
def_proc = helper.to_def_procedure()
service = def_proc.service
op_code = def_proc.op_code
if (
service == CoreServiceList.SERVICE_17
or service == CoreServiceList.SERVICE_17_ALT
):
if info.proc_type == TcProcedureType.TREE_COMMANDING:
def_proc = info.to_tree_commanding_procedure()
cmd_path = def_proc.cmd_path
if cmd_path == "/ping":
q.add_log_cmd("Sending PUS ping telecommand")
return q.add_pus_tc(PusTelecommand(service=17, subservice=1))
q.add_pus_tc(PusTelecommand(service=17, subservice=1))
if cmd_path == "/change_blink_freq":
self.create_change_blink_freq_command(q)
def create_change_blink_freq_command(self, q: DefaultPusQueueHelper):
q.add_log_cmd("Changing blink frequency")
while True:
blink_freq = int(
input(
"Please specify new blink frequency in ms. Valid Range [2..10000]: "
)
)
if blink_freq < 2 or blink_freq > 10000:
print(
"Invalid blink frequency. Please specify a value between 2 and 10000."
)
continue
break
app_data = struct.pack("!I", blink_freq)
q.add_pus_tc(PusTelecommand(service=8, subservice=1, app_data=app_data))
def main():
add_colorlog_console_logger(_LOGGER)
tmtccmd.init_printout(False)
hook_obj = SatRsConfigHook(json_cfg_path=default_json_path())
parser_wrapper = PreArgsParsingWrapper()
parser_wrapper.create_default_parent_parser()
parser_wrapper.create_default_parser()
parser_wrapper.add_def_proc_args()
post_args_wrapper = parser_wrapper.parse(hook_obj)
params = SetupParams()
post_args_wrapper = parser_wrapper.parse(hook_obj, params)
proc_wrapper = ProcedureParamsWrapper()
if post_args_wrapper.use_gui:
post_args_wrapper.set_params_without_prompts(params, proc_wrapper)
post_args_wrapper.set_params_without_prompts(proc_wrapper)
else:
post_args_wrapper.set_params_with_prompts(params, proc_wrapper)
post_args_wrapper.set_params_with_prompts(proc_wrapper)
params.apid = EXAMPLE_PUS_APID
setup_args = SetupWrapper(
hook_obj=hook_obj, setup_params=params, proc_param_wrapper=proc_wrapper
)
# Create console logger helper and file loggers
tmtc_logger = RegularTmtcLogWrapper()
printer = FsfwTmTcPrinter(tmtc_logger.logger)
file_logger = tmtc_logger.logger
raw_logger = RawTmtcTimedLogWrapper(when=TimedLogWhen.PER_HOUR, interval=1)
verificator = PusVerificator()
verification_wrapper = VerificationWrapper(verificator, LOGGER, printer.file_logger)
verification_wrapper = VerificationWrapper(verificator, _LOGGER, file_logger)
# Create primary TM handler and add it to the CCSDS Packet Handler
tm_handler = PusHandler(verification_wrapper, printer, raw_logger)
tm_handler = PusHandler(file_logger, verification_wrapper, raw_logger)
ccsds_handler = CcsdsTmHandler(generic_handler=None)
ccsds_handler.add_apid_handler(tm_handler)
@ -288,7 +286,7 @@ def main():
if state.request == BackendRequest.TERMINATION_NO_ERROR:
sys.exit(0)
elif state.request == BackendRequest.DELAY_IDLE:
LOGGER.info("TMTC Client in IDLE mode")
_LOGGER.info("TMTC Client in IDLE mode")
time.sleep(3.0)
elif state.request == BackendRequest.DELAY_LISTENER:
time.sleep(0.8)

View File

@ -0,0 +1,2 @@
tmtccmd == 8.0.1
# -e git+https://github.com/robamu-org/tmtccmd.git@main#egg=tmtccmd

View File

@ -0,0 +1,55 @@
//! Blinks an LED
//!
//! This assumes that LD2 (blue) is connected to pb7 and LD3 (red) is connected
//! to pb14. This assumption is true for the nucleo-h743zi board.
#![no_std]
#![no_main]
use satrs_stm32h7_nucleo_rtic as _;
use stm32h7xx_hal::{block, prelude::*, timer::Timer};
use cortex_m_rt::entry;
#[entry]
fn main() -> ! {
defmt::println!("starting stm32h7 blinky example");
// Get access to the device specific peripherals from the peripheral access crate
let dp = stm32h7xx_hal::stm32::Peripherals::take().unwrap();
// Take ownership over the RCC devices and convert them into the corresponding HAL structs
let rcc = dp.RCC.constrain();
let pwr = dp.PWR.constrain();
let pwrcfg = pwr.freeze();
// Freeze the configuration of all the clocks in the system and
// retrieve the Core Clock Distribution and Reset (CCDR) object
let rcc = rcc.use_hse(8.MHz()).bypass_hse();
let ccdr = rcc.freeze(pwrcfg, &dp.SYSCFG);
// Acquire the GPIOB peripheral
let gpiob = dp.GPIOB.split(ccdr.peripheral.GPIOB);
// Configure gpio B pin 0 as a push-pull output.
let mut ld1 = gpiob.pb0.into_push_pull_output();
// Configure gpio B pin 7 as a push-pull output.
let mut ld2 = gpiob.pb7.into_push_pull_output();
// Configure gpio B pin 14 as a push-pull output.
let mut ld3 = gpiob.pb14.into_push_pull_output();
// Configure the timer to trigger an update every second
let mut timer = Timer::tim1(dp.TIM1, ccdr.peripheral.TIM1, &ccdr.clocks);
timer.start(1.Hz());
// Wait for the timer to trigger an update and change the state of the LED
loop {
ld1.toggle();
ld2.toggle();
ld3.toggle();
block!(timer.wait()).unwrap();
}
}

View File

@ -0,0 +1,11 @@
#![no_main]
#![no_std]
use satrs_stm32h7_nucleo_rtic as _; // global logger + panicking-behavior + memory layout
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::println!("Hello, world!");
satrs_stm32h7_nucleo_rtic::exit()
}

View File

@ -0,0 +1,52 @@
#![no_main]
#![no_std]
use cortex_m_semihosting::debug;
use defmt_brtt as _; // global logger
// TODO(5) adjust HAL import
use stm32h7xx_hal as _; // memory layout
use panic_probe as _;
// same panicking *behavior* as `panic-probe` but doesn't print a panic message
// this prevents the panic message being printed *twice* when `defmt::panic` is invoked
#[defmt::panic_handler]
fn panic() -> ! {
cortex_m::asm::udf()
}
/// Terminates the application and makes a semihosting-capable debug tool exit
/// with status code 0.
pub fn exit() -> ! {
loop {
debug::exit(debug::EXIT_SUCCESS);
}
}
/// Hardfault handler.
///
/// Terminates the application and makes a semihosting-capable debug tool exit
/// with an error. This seems better than the default, which is to spin in a
/// loop.
#[cortex_m_rt::exception]
unsafe fn HardFault(_frame: &cortex_m_rt::ExceptionFrame) -> ! {
loop {
debug::exit(debug::EXIT_FAILURE);
}
}
// defmt-test 0.3.0 has the limitation that this `#[tests]` attribute can only be used
// once within a crate. the module can be in any file but there can only be at most
// one `#[tests]` module in this library crate
#[cfg(test)]
#[defmt_test::tests]
mod unit_tests {
use defmt::assert;
#[test]
fn it_works() {
assert!(true)
}
}

View File

@ -0,0 +1,523 @@
#![no_main]
#![no_std]
extern crate alloc;
use rtic::app;
use rtic_monotonics::systick::prelude::*;
use satrs::pool::{PoolAddr, PoolProvider, StaticHeaplessMemoryPool};
use satrs::static_subpool;
// global logger + panicking-behavior + memory layout
use satrs_stm32h7_nucleo_rtic as _;
use smoltcp::socket::udp::UdpMetadata;
use smoltcp::socket::{dhcpv4, udp};
use core::mem::MaybeUninit;
use embedded_alloc::LlffHeap as Heap;
use smoltcp::iface::{Config, Interface, SocketHandle, SocketSet, SocketStorage};
use smoltcp::wire::{HardwareAddress, IpAddress, IpCidr};
use stm32h7xx_hal::ethernet;
const DEFAULT_BLINK_FREQ_MS: u32 = 1000;
const PORT: u16 = 7301;
const HEAP_SIZE: usize = 131_072;
const TC_SOURCE_CHANNEL_DEPTH: usize = 16;
pub type SharedPool = StaticHeaplessMemoryPool<3>;
pub type TcSourceChannel = rtic_sync::channel::Channel<PoolAddr, TC_SOURCE_CHANNEL_DEPTH>;
pub type TcSourceTx = rtic_sync::channel::Sender<'static, PoolAddr, TC_SOURCE_CHANNEL_DEPTH>;
pub type TcSourceRx = rtic_sync::channel::Receiver<'static, PoolAddr, TC_SOURCE_CHANNEL_DEPTH>;
#[global_allocator]
static HEAP: Heap = Heap::empty();
systick_monotonic!(Mono, 1000);
// We place the memory pool buffers inside the larger AXISRAM.
pub const SUBPOOL_SMALL_NUM_BLOCKS: u16 = 32;
pub const SUBPOOL_SMALL_BLOCK_SIZE: usize = 32;
pub const SUBPOOL_MEDIUM_NUM_BLOCKS: u16 = 16;
pub const SUBPOOL_MEDIUM_BLOCK_SIZE: usize = 128;
pub const SUBPOOL_LARGE_NUM_BLOCKS: u16 = 8;
pub const SUBPOOL_LARGE_BLOCK_SIZE: usize = 2048;
// This data will be held by Net through a mutable reference
pub struct NetStorageStatic<'a> {
socket_storage: [SocketStorage<'a>; 8],
}
// MaybeUninit allows us write code that is correct even if STORE is not
// initialised by the runtime
static mut STORE: MaybeUninit<NetStorageStatic> = MaybeUninit::uninit();
static mut UDP_RX_META: [udp::PacketMetadata; 12] = [udp::PacketMetadata::EMPTY; 12];
static mut UDP_RX: [u8; 2048] = [0; 2048];
static mut UDP_TX_META: [udp::PacketMetadata; 12] = [udp::PacketMetadata::EMPTY; 12];
static mut UDP_TX: [u8; 2048] = [0; 2048];
/// Locally administered MAC address
const MAC_ADDRESS: [u8; 6] = [0x02, 0x00, 0x11, 0x22, 0x33, 0x44];
pub struct Net {
iface: Interface,
ethdev: ethernet::EthernetDMA<4, 4>,
dhcp_handle: SocketHandle,
}
impl Net {
pub fn new(
sockets: &mut SocketSet<'static>,
mut ethdev: ethernet::EthernetDMA<4, 4>,
ethernet_addr: HardwareAddress,
) -> Self {
let config = Config::new(ethernet_addr);
let mut iface = Interface::new(
config,
&mut ethdev,
smoltcp::time::Instant::from_millis(Mono::now().duration_since_epoch().to_millis()),
);
// Create sockets
let dhcp_socket = dhcpv4::Socket::new();
iface.update_ip_addrs(|addrs| {
let _ = addrs.push(IpCidr::new(IpAddress::v4(192, 168, 1, 99), 0));
});
let dhcp_handle = sockets.add(dhcp_socket);
Net {
iface,
ethdev,
dhcp_handle,
}
}
/// Polls on the ethernet interface. You should refer to the smoltcp
/// documentation for poll() to understand how to call poll efficiently
pub fn poll<'a>(&mut self, sockets: &'a mut SocketSet) -> bool {
let uptime = Mono::now().duration_since_epoch();
let timestamp = smoltcp::time::Instant::from_millis(uptime.to_millis());
self.iface.poll(timestamp, &mut self.ethdev, sockets)
}
pub fn poll_dhcp<'a>(&mut self, sockets: &'a mut SocketSet) -> Option<dhcpv4::Event<'a>> {
let opt_event = sockets.get_mut::<dhcpv4::Socket>(self.dhcp_handle).poll();
if let Some(event) = &opt_event {
match event {
dhcpv4::Event::Deconfigured => {
defmt::info!("DHCP lost configuration");
self.iface.update_ip_addrs(|addrs| addrs.clear());
self.iface.routes_mut().remove_default_ipv4_route();
}
dhcpv4::Event::Configured(config) => {
defmt::info!("DHCP configuration acquired");
defmt::info!("IP address: {}", config.address);
self.iface.update_ip_addrs(|addrs| {
addrs.clear();
addrs.push(IpCidr::Ipv4(config.address)).unwrap();
});
if let Some(router) = config.router {
defmt::debug!("Default gateway: {}", router);
self.iface
.routes_mut()
.add_default_ipv4_route(router)
.unwrap();
} else {
defmt::debug!("Default gateway: None");
self.iface.routes_mut().remove_default_ipv4_route();
}
}
}
}
opt_event
}
}
pub struct UdpNet {
udp_handle: SocketHandle,
last_client: Option<UdpMetadata>,
tc_source_tx: TcSourceTx,
}
impl UdpNet {
pub fn new<'sockets>(sockets: &mut SocketSet<'sockets>, tc_source_tx: TcSourceTx) -> Self {
// SAFETY: The RX and TX buffers are passed here and not used anywhere else.
let udp_rx_buffer =
smoltcp::socket::udp::PacketBuffer::new(unsafe { &mut UDP_RX_META[..] }, unsafe {
&mut UDP_RX[..]
});
let udp_tx_buffer =
smoltcp::socket::udp::PacketBuffer::new(unsafe { &mut UDP_TX_META[..] }, unsafe {
&mut UDP_TX[..]
});
let udp_socket = smoltcp::socket::udp::Socket::new(udp_rx_buffer, udp_tx_buffer);
let udp_handle = sockets.add(udp_socket);
Self {
udp_handle,
last_client: None,
tc_source_tx,
}
}
pub fn poll<'sockets>(
&mut self,
sockets: &'sockets mut SocketSet,
shared_pool: &mut SharedPool,
) {
let socket = sockets.get_mut::<udp::Socket>(self.udp_handle);
if !socket.is_open() {
if let Err(e) = socket.bind(PORT) {
defmt::warn!("binding UDP socket failed: {}", e);
}
}
loop {
match socket.recv() {
Ok((data, client)) => {
match shared_pool.add(data) {
Ok(store_addr) => {
if let Err(e) = self.tc_source_tx.try_send(store_addr) {
defmt::warn!("TC source channel is full: {}", e);
}
}
Err(e) => {
defmt::warn!("could not add UDP packet to shared pool: {}", e);
}
}
self.last_client = Some(client);
// TODO: Implement packet wiretapping.
}
Err(e) => match e {
udp::RecvError::Exhausted => {
break;
}
udp::RecvError::Truncated => {
defmt::warn!("UDP packet was truncacted");
}
},
};
}
}
}
#[app(device = stm32h7xx_hal::stm32, peripherals = true)]
mod app {
use core::ptr::addr_of_mut;
use super::*;
use rtic_monotonics::fugit::MillisDurationU32;
use satrs::spacepackets::ecss::tc::PusTcReader;
use stm32h7xx_hal::ethernet::{EthernetMAC, PHY};
use stm32h7xx_hal::gpio::{Output, Pin};
use stm32h7xx_hal::prelude::*;
use stm32h7xx_hal::stm32::Interrupt;
struct BlinkyLeds {
led1: Pin<'B', 7, Output>,
led2: Pin<'B', 14, Output>,
}
#[local]
struct Local {
leds: BlinkyLeds,
link_led: Pin<'B', 0, Output>,
net: Net,
udp: UdpNet,
tc_source_rx: TcSourceRx,
phy: ethernet::phy::LAN8742A<EthernetMAC>,
}
#[shared]
struct Shared {
blink_freq: MillisDurationU32,
eth_link_up: bool,
sockets: SocketSet<'static>,
shared_pool: SharedPool,
}
#[init]
fn init(mut cx: init::Context) -> (Shared, Local) {
defmt::println!("Starting sat-rs demo application for the STM32H743ZIT");
let pwr = cx.device.PWR.constrain();
let pwrcfg = pwr.freeze();
let rcc = cx.device.RCC.constrain();
// Try to keep the clock configuration similar to one used in STM examples:
// https://github.com/STMicroelectronics/STM32CubeH7/blob/master/Projects/NUCLEO-H743ZI/Examples/GPIO/GPIO_EXTI/Src/main.c
let ccdr = rcc
.sys_ck(400.MHz())
.hclk(200.MHz())
.use_hse(8.MHz())
.bypass_hse()
.pclk1(100.MHz())
.pclk2(100.MHz())
.pclk3(100.MHz())
.pclk4(100.MHz())
.freeze(pwrcfg, &cx.device.SYSCFG);
// Initialize the systick interrupt & obtain the token to prove that we did
Mono::start(cx.core.SYST, ccdr.clocks.sys_ck().to_Hz());
// Those are used in the smoltcp of the stm32h7xx-hal , I am not fully sure what they are
// good for.
cx.core.SCB.enable_icache();
cx.core.DWT.enable_cycle_counter();
let gpioa = cx.device.GPIOA.split(ccdr.peripheral.GPIOA);
let gpiob = cx.device.GPIOB.split(ccdr.peripheral.GPIOB);
let gpioc = cx.device.GPIOC.split(ccdr.peripheral.GPIOC);
let gpiog = cx.device.GPIOG.split(ccdr.peripheral.GPIOG);
let link_led = gpiob.pb0.into_push_pull_output();
let mut led1 = gpiob.pb7.into_push_pull_output();
let mut led2 = gpiob.pb14.into_push_pull_output();
// Criss-cross pattern looks cooler.
led1.set_high();
led2.set_low();
let leds = BlinkyLeds { led1, led2 };
let rmii_ref_clk = gpioa.pa1.into_alternate::<11>();
let rmii_mdio = gpioa.pa2.into_alternate::<11>();
let rmii_mdc = gpioc.pc1.into_alternate::<11>();
let rmii_crs_dv = gpioa.pa7.into_alternate::<11>();
let rmii_rxd0 = gpioc.pc4.into_alternate::<11>();
let rmii_rxd1 = gpioc.pc5.into_alternate::<11>();
let rmii_tx_en = gpiog.pg11.into_alternate::<11>();
let rmii_txd0 = gpiog.pg13.into_alternate::<11>();
let rmii_txd1 = gpiob.pb13.into_alternate::<11>();
let mac_addr = smoltcp::wire::EthernetAddress::from_bytes(&MAC_ADDRESS);
/// Ethernet descriptor rings are a global singleton
#[link_section = ".sram3.eth"]
static mut DES_RING: MaybeUninit<ethernet::DesRing<4, 4>> = MaybeUninit::uninit();
let (eth_dma, eth_mac) = ethernet::new(
cx.device.ETHERNET_MAC,
cx.device.ETHERNET_MTL,
cx.device.ETHERNET_DMA,
(
rmii_ref_clk,
rmii_mdio,
rmii_mdc,
rmii_crs_dv,
rmii_rxd0,
rmii_rxd1,
rmii_tx_en,
rmii_txd0,
rmii_txd1,
),
// SAFETY: We do not move the returned DMA struct across thread boundaries, so this
// should be safe according to the docs.
unsafe { DES_RING.assume_init_mut() },
mac_addr,
ccdr.peripheral.ETH1MAC,
&ccdr.clocks,
);
// Initialise ethernet PHY...
let mut lan8742a = ethernet::phy::LAN8742A::new(eth_mac.set_phy_addr(0));
lan8742a.phy_reset();
lan8742a.phy_init();
unsafe {
ethernet::enable_interrupt();
cx.core.NVIC.set_priority(Interrupt::ETH, 196); // Mid prio
cortex_m::peripheral::NVIC::unmask(Interrupt::ETH);
}
// unsafe: mutable reference to static storage, we only do this once
let store = unsafe {
let store_ptr = STORE.as_mut_ptr();
// Initialise the socket_storage field. Using `write` instead of
// assignment via `=` to not call `drop` on the old, uninitialised
// value
addr_of_mut!((*store_ptr).socket_storage).write([SocketStorage::EMPTY; 8]);
// Now that all fields are initialised we can safely use
// assume_init_mut to return a mutable reference to STORE
STORE.assume_init_mut()
};
let (tc_source_tx, tc_source_rx) =
rtic_sync::make_channel!(PoolAddr, TC_SOURCE_CHANNEL_DEPTH);
let mut sockets = SocketSet::new(&mut store.socket_storage[..]);
let net = Net::new(&mut sockets, eth_dma, mac_addr.into());
let udp = UdpNet::new(&mut sockets, tc_source_tx);
let mut shared_pool: SharedPool = StaticHeaplessMemoryPool::new(true);
static_subpool!(
SUBPOOL_SMALL,
SUBPOOL_SMALL_SIZES,
SUBPOOL_SMALL_NUM_BLOCKS as usize,
SUBPOOL_SMALL_BLOCK_SIZE,
link_section = ".axisram"
);
static_subpool!(
SUBPOOL_MEDIUM,
SUBPOOL_MEDIUM_SIZES,
SUBPOOL_MEDIUM_NUM_BLOCKS as usize,
SUBPOOL_MEDIUM_BLOCK_SIZE,
link_section = ".axisram"
);
static_subpool!(
SUBPOOL_LARGE,
SUBPOOL_LARGE_SIZES,
SUBPOOL_LARGE_NUM_BLOCKS as usize,
SUBPOOL_LARGE_BLOCK_SIZE,
link_section = ".axisram"
);
shared_pool
.grow(
SUBPOOL_SMALL.get_mut().unwrap(),
SUBPOOL_SMALL_SIZES.get_mut().unwrap(),
SUBPOOL_SMALL_NUM_BLOCKS,
true,
)
.expect("growing heapless memory pool failed");
shared_pool
.grow(
SUBPOOL_MEDIUM.get_mut().unwrap(),
SUBPOOL_MEDIUM_SIZES.get_mut().unwrap(),
SUBPOOL_MEDIUM_NUM_BLOCKS,
true,
)
.expect("growing heapless memory pool failed");
shared_pool
.grow(
SUBPOOL_LARGE.get_mut().unwrap(),
SUBPOOL_LARGE_SIZES.get_mut().unwrap(),
SUBPOOL_LARGE_NUM_BLOCKS,
true,
)
.expect("growing heapless memory pool failed");
// Set up global allocator. Use AXISRAM for the heap.
#[link_section = ".axisram"]
static mut HEAP_MEM: [MaybeUninit<u8>; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE];
unsafe { HEAP.init(&raw mut HEAP_MEM as usize, HEAP_SIZE) }
eth_link_check::spawn().expect("eth link check failed");
blinky::spawn().expect("spawning blink task failed");
udp_task::spawn().expect("spawning UDP task failed");
tc_source_task::spawn().expect("spawning TC source task failed");
(
Shared {
blink_freq: MillisDurationU32::from_ticks(DEFAULT_BLINK_FREQ_MS),
eth_link_up: false,
sockets,
shared_pool,
},
Local {
link_led,
leds,
net,
udp,
tc_source_rx,
phy: lan8742a,
},
)
}
#[task(local = [leds], shared=[blink_freq])]
async fn blinky(mut cx: blinky::Context) {
let leds = cx.local.leds;
loop {
leds.led1.toggle();
leds.led2.toggle();
let current_blink_freq = cx.shared.blink_freq.lock(|current| *current);
Mono::delay(current_blink_freq).await;
}
}
/// This task checks for the network link.
#[task(local=[link_led, phy], shared=[eth_link_up])]
async fn eth_link_check(mut cx: eth_link_check::Context) {
let phy = cx.local.phy;
let link_led = cx.local.link_led;
loop {
let link_was_up = cx.shared.eth_link_up.lock(|link_up| *link_up);
if phy.poll_link() {
if !link_was_up {
link_led.set_high();
cx.shared.eth_link_up.lock(|link_up| *link_up = true);
defmt::info!("Ethernet link up");
}
} else if link_was_up {
link_led.set_low();
cx.shared.eth_link_up.lock(|link_up| *link_up = false);
defmt::info!("Ethernet link down");
}
Mono::delay(100.millis()).await;
}
}
#[task(binds=ETH, local=[net], shared=[sockets])]
fn eth_isr(mut cx: eth_isr::Context) {
// SAFETY: We do not write the register mentioned inside the docs anywhere else.
unsafe {
ethernet::interrupt_handler();
}
// Check and process ETH frames and DHCP. UDP is checked in a different task.
cx.shared.sockets.lock(|sockets| {
cx.local.net.poll(sockets);
cx.local.net.poll_dhcp(sockets);
});
}
/// This task routes UDP packets.
#[task(local=[udp], shared=[sockets, shared_pool])]
async fn udp_task(mut cx: udp_task::Context) {
loop {
cx.shared.sockets.lock(|sockets| {
cx.shared.shared_pool.lock(|pool| {
cx.local.udp.poll(sockets, pool);
})
});
Mono::delay(40.millis()).await;
}
}
/// This task handles all the incoming telecommands.
#[task(local=[read_buf: [u8; 1024] = [0; 1024], tc_source_rx], shared=[shared_pool])]
async fn tc_source_task(mut cx: tc_source_task::Context) {
loop {
let recv_result = cx.local.tc_source_rx.recv().await;
match recv_result {
Ok(pool_addr) => {
cx.shared.shared_pool.lock(|pool| {
match pool.read(&pool_addr, cx.local.read_buf.as_mut()) {
Ok(packet_len) => {
defmt::info!("received {} bytes in the TC source task", packet_len);
match PusTcReader::new(&cx.local.read_buf[0..packet_len]) {
Ok((packet, _tc_len)) => {
// TODO: Handle packet here or dispatch to dedicated PUS
// handler? Dispatching could simplify some things and make
// the software more scalable..
defmt::info!("received PUS packet: {}", packet);
}
Err(e) => {
defmt::info!("invalid TC format, not a PUS packet: {}", e);
}
}
if let Err(e) = pool.delete(pool_addr) {
defmt::warn!("deleting TC data failed: {}", e);
}
}
Err(e) => {
defmt::warn!("TC packet read failed: {}", e);
}
}
});
}
Err(e) => {
defmt::warn!("TC source reception error: {}", e);
}
};
}
}
}

View File

@ -0,0 +1,16 @@
#![no_std]
#![no_main]
use stm32h7_testapp as _; // memory layout + panic handler
// See https://crates.io/crates/defmt-test/0.3.0 for more documentation (e.g. about the 'state'
// feature)
#[defmt_test::tests]
mod tests {
use defmt::assert;
#[test]
fn it_works() {
assert!(true)
}
}

View File

@ -0,0 +1,2 @@
/settings.json
/.cortex-debug.*

View File

@ -0,0 +1,12 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"rust-lang.rust",
"probe-rs.probe-rs-debugger"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": []
}

View File

@ -0,0 +1,22 @@
{
"version": "0.2.0",
"configurations": [
{
"preLaunchTask": "${defaultBuildTask}",
"type": "probe-rs-debug",
"request": "launch",
"name": "probe-rs Debugging ",
"flashingConfig": {
"flashingEnabled": true
},
"chip": "STM32H743ZITx",
"coreConfigs": [
{
"programBinary": "${workspaceFolder}/target/thumbv7em-none-eabihf/debug/satrs-stm32h7-nucleo-rtic",
"rttEnabled": true,
"svdFile": "STM32H743.svd"
}
]
}
]
}

View File

@ -0,0 +1,20 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "cargo build",
"type": "shell",
"command": "~/.cargo/bin/cargo", // note: full path to the cargo
"args": [
"build"
],
"group": {
"kind": "build",
"isDefault": true
}
},
]
}

View File

@ -0,0 +1,8 @@
[package]
name = "satrs-gen"
version = "0.1.0"
edition = "2024"
[dependencies]
toml = "0.8"
heck = "0.5"

View File

@ -0,0 +1,34 @@
[apid]
Sched = 1
GenericPus = 2
Acs = 3
Cfdp = 4
Tmtc = 5
Eps = 6
[ids]
[ids.Eps]
Pcdu = 0
Subsystem = 1
[ids.Tmtc]
UdpServer = 0
TcpServer = 1
[ids.GenericPus]
PusEventManagement = 0
PusRouting = 1
PusTest = 2
PusAction = 3
PusMode = 4
PusHk = 5
[ids.Sched]
PusSched = 0
[ids.Acs]
Subsystem = 1
Assembly = 2
Mgm0 = 3
Mgm1 = 4

View File

@ -0,0 +1,91 @@
use heck::{ToShoutySnakeCase, ToSnakeCase};
use std::{
collections::BTreeMap,
fs::{self, File},
io::{self, Write},
};
use toml::{Value, map::Map};
fn main() -> io::Result<()> {
// Read the configuration file
let config_str = fs::read_to_string("components.toml").expect("Unable to read file");
let config: Value = toml::from_str(&config_str).expect("Unable to parse TOML");
let mut output = File::create("../satrs-example/src/ids.rs")?;
generate_rust_code(&config, &mut output);
Ok(())
}
fn sort_enum_table(table_map: &Map<String, Value>) -> BTreeMap<u64, &str> {
// Collect entries into a BTreeMap to sort them by key
let mut sorted_entries: BTreeMap<u64, &str> = BTreeMap::new();
for (key, value) in table_map {
if let Some(value) = value.as_integer() {
if !(0..=0x7FF).contains(&value) {
panic!("Invalid APID value: {}", value);
}
sorted_entries.insert(value as u64, key);
}
}
sorted_entries
}
fn generate_rust_code(config: &Value, writer: &mut impl Write) {
writeln!(
writer,
"//! This is an auto-generated configuration module."
)
.unwrap();
writeln!(writer, "use satrs::request::UniqueApidTargetId;").unwrap();
writeln!(writer).unwrap();
// Generate the main module
writeln!(
writer,
"#[derive(Debug, Copy, Clone, PartialEq, Eq, strum::EnumIter)]"
)
.unwrap();
writeln!(writer, "pub enum Apid {{").unwrap();
// Generate constants for the main module
if let Some(apid_table) = config.get("apid").and_then(Value::as_table) {
let sorted_entries = sort_enum_table(apid_table);
// Write the sorted entries to the writer
for (value, key) in sorted_entries {
writeln!(writer, " {} = {},", key, value).unwrap();
}
}
writeln!(writer, "}}").unwrap();
// Generate ID tables.
if let Some(id_tables) = config.get("ids").and_then(Value::as_table) {
for (mod_name, table) in id_tables {
let mod_name_as_snake = mod_name.to_snake_case();
writeln!(writer).unwrap();
writeln!(writer, "pub mod {} {{", mod_name_as_snake).unwrap();
let sorted_entries = sort_enum_table(table.as_table().unwrap());
writeln!(writer, " #[derive(Debug, Copy, Clone, PartialEq, Eq)]").unwrap();
writeln!(writer, " pub enum Id {{").unwrap();
// Write the sorted entries to the writer
for (value, key) in &sorted_entries {
writeln!(writer, " {} = {},", key, value).unwrap();
}
writeln!(writer, " }}").unwrap();
writeln!(writer).unwrap();
for (_value, key) in sorted_entries {
let key_shouting = key.to_shouty_snake_case();
writeln!(
writer,
" pub const {}: super::UniqueApidTargetId = super::UniqueApidTargetId::new(super::Apid::{} as u16, Id::{} as u32);",
key_shouting, mod_name, key
).unwrap();
}
writeln!(writer, "}}").unwrap();
}
}
}

View File

@ -0,0 +1,260 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:java="http://www.yworks.com/xml/yfiles-common/1.0/java" xmlns:sys="http://www.yworks.com/xml/yfiles-common/markup/primitives/2.0" xmlns:x="http://www.yworks.com/xml/yfiles-common/markup/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/graphml" xmlns:yed="http://www.yworks.com/xml/yed/3" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd">
<!--Created by yEd 3.23.2-->
<key attr.name="Description" attr.type="string" for="graph" id="d0"/>
<key for="port" id="d1" yfiles.type="portgraphics"/>
<key for="port" id="d2" yfiles.type="portgeometry"/>
<key for="port" id="d3" yfiles.type="portuserdata"/>
<key attr.name="url" attr.type="string" for="node" id="d4"/>
<key attr.name="description" attr.type="string" for="node" id="d5"/>
<key for="node" id="d6" yfiles.type="nodegraphics"/>
<key for="graphml" id="d7" yfiles.type="resources"/>
<key attr.name="url" attr.type="string" for="edge" id="d8"/>
<key attr.name="description" attr.type="string" for="edge" id="d9"/>
<key for="edge" id="d10" yfiles.type="edgegraphics"/>
<graph edgedefault="directed" id="G">
<data key="d0" xml:space="preserve"/>
<node id="n0">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="360.0" width="479.0" x="771.3047672479152" y="458.0"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="237.5" y="178.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n1">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="177.64799999999997" width="200.75199999999973" x="1037.5527672479152" y="470.15200000000027"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="67.919921875" x="13.264464667588754" xml:space="preserve" y="8.302185845943427">Simulation<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="-0.5" labelRatioY="-0.5" nodeRatioX="-0.433926114471642" nodeRatioY="-0.45326608886143704" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="34.0" width="84.39999999999986" x="1068.8351781652768" y="508.2800000000002"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="37.638671875" x="23.380664062499818" xml:space="preserve" y="8.015625">PCDU<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n3">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="34.0" width="120.39999999999986" x="1068.8351781652768" y="550.4800000000001"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="92.453125" x="13.973437499999818" xml:space="preserve" y="8.015625">Magnetometer<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n4">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="34.0" width="120.39999999999986" x="1068.8351781652768" y="594.9000000000001"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="88.83203125" x="15.783984374999818" xml:space="preserve" y="8.015625">Magnetorquer<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n5">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="34.0" width="120.39999999999986" x="783.4063563305535" y="545.2800000000002"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="85.931640625" x="17.234179687499932" xml:space="preserve" y="8.015625">SimController<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n6">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="34.0" width="120.39999999999986" x="840.5407126611072" y="677.8000000000002"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="105.05078125" x="7.674609374999932" xml:space="preserve" y="8.015625">UDP TC Receiver<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n7">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="34.0" width="120.39999999999986" x="1005.2814253222144" y="677.8000000000002"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="97.111328125" x="11.644335937499932" xml:space="preserve" y="8.015625">UDP TM Sender<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n8">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="34.0" width="120.39999999999986" x="931.6174253222144" y="775.5920000000002"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="38.740234375" x="40.82988281249993" xml:space="preserve" y="8.015625">Client<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<edge id="e0" source="n5" target="n3">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="-5.199999999999932"/>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e1" source="n5" target="n2">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="60.19999999999993" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1023.8695890826383" y="562.2800000000002"/>
<y:Point x="1023.8695890826383" y="525.2800000000002"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e2" source="n5" target="n4">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="48.72964366944643" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1023.8695890826383" y="562.2800000000002"/>
<y:Point x="1023.8695890826383" y="611.9000000000001"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:EdgeLabel alignment="center" configuration="AutoFlippingLabel" distance="2.0" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" preferredPlacement="anywhere" ratio="0.5" textColor="#000000" verticalTextPosition="bottom" visible="true" width="97.955078125" x="12.686124396959713" xml:space="preserve" y="-22.50440429687478">schedule_event<y:LabelModel><y:SmartEdgeLabelModel autoRotationEnabled="false" defaultAngle="0.0" defaultDistance="10.0"/></y:LabelModel><y:ModelParameter><y:SmartEdgeLabelModelParameter angle="6.283185307179586" distance="13.519999999999978" distanceToCenter="true" position="left" ratio="0.11621274698385183" segment="0"/></y:ModelParameter><y:PreferredPlacementDescriptor angle="0.0" angleOffsetOnRightSide="0" angleReference="absolute" angleRotationOnRightSide="co" distance="-1.0" frozen="true" placement="anywhere" side="anywhere" sideReference="relative_to_edge_flow"/></y:EdgeLabel>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e3" source="n6" target="n5">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="0.0" sy="0.0" tx="-5.329643669446341" ty="0.0">
<y:Point x="900.7407126611072" y="628.5400000000002"/>
<y:Point x="838.2767126611071" y="628.5400000000002"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:EdgeLabel alignment="center" configuration="AutoFlippingLabel" distance="2.0" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" preferredPlacement="anywhere" ratio="0.5" textColor="#000000" verticalTextPosition="bottom" visible="true" width="75.923828125" x="-87.89792405764274" xml:space="preserve" y="-40.606550292968564">SimRequest<y:LabelModel><y:SmartEdgeLabelModel autoRotationEnabled="false" defaultAngle="0.0" defaultDistance="10.0"/></y:LabelModel><y:ModelParameter><y:SmartEdgeLabelModelParameter angle="6.283185307179586" distance="49.935999999999936" distanceToCenter="true" position="left" ratio="0.5" segment="0"/></y:ModelParameter><y:PreferredPlacementDescriptor angle="0.0" angleOffsetOnRightSide="0" angleReference="absolute" angleRotationOnRightSide="co" distance="-1.0" frozen="true" placement="anywhere" side="anywhere" sideReference="relative_to_edge_flow"/></y:EdgeLabel>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e4" source="n4" target="n7">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="60.200000000000045" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1223.8814253222142" y="611.9000000000001"/>
<y:Point x="1223.8814253222142" y="694.8000000000002"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e5" source="n3" target="n7">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1223.8814253222142" y="567.4800000000001"/>
<y:Point x="1223.8814253222142" y="694.8000000000002"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e6" source="n2" target="n7">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="11.514125426161627" sy="-2.5781798912005343" tx="45.553752843062284" ty="0.0">
<y:Point x="1223.8814253222142" y="522.7018201087997"/>
<y:Point x="1223.8814253222142" y="694.8000000000002"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:EdgeLabel alignment="center" configuration="AutoFlippingLabel" distance="2.0" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" preferredPlacement="anywhere" ratio="0.5" textColor="#000000" verticalTextPosition="bottom" visible="true" width="60.4140625" x="-2.4087265765670054" xml:space="preserve" y="145.1356018470808">SimReply<y:LabelModel><y:SmartEdgeLabelModel autoRotationEnabled="false" defaultAngle="0.0" defaultDistance="10.0"/></y:LabelModel><y:ModelParameter><y:SmartEdgeLabelModelParameter angle="6.283185307179586" distance="17.97817989120062" distanceToCenter="true" position="right" ratio="0.679561684469248" segment="-1"/></y:ModelParameter><y:PreferredPlacementDescriptor angle="0.0" angleOffsetOnRightSide="0" angleReference="absolute" angleRotationOnRightSide="co" distance="-1.0" frozen="true" placement="anywhere" side="anywhere" sideReference="relative_to_edge_flow"/></y:EdgeLabel>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e7" source="n8" target="n6">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-25.212712661107275" sy="0.0" tx="-11.264000000000124" ty="0.0">
<y:Point x="966.6047126611071" y="731.8000000000002"/>
<y:Point x="889.4767126611071" y="731.8000000000002"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:EdgeLabel alignment="center" configuration="AutoFlippingLabel" distance="2.0" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" preferredPlacement="anywhere" ratio="0.5" textColor="#000000" verticalTextPosition="bottom" visible="true" width="119.751953125" x="-132.27600022951788" xml:space="preserve" y="-32.03587548828091">SimRequest in UDP<y:LabelModel><y:SmartEdgeLabelModel autoRotationEnabled="false" defaultAngle="0.0" defaultDistance="10.0"/></y:LabelModel><y:ModelParameter><y:SmartEdgeLabelModelParameter angle="6.283185307179586" distance="20.73181250000017" distanceToCenter="true" position="left" ratio="0.9386993050513424" segment="1"/></y:ModelParameter><y:PreferredPlacementDescriptor angle="0.0" angleOffsetOnRightSide="0" angleReference="absolute" angleRotationOnRightSide="co" distance="-1.0" frozen="true" placement="anywhere" side="anywhere" sideReference="relative_to_edge_flow"/></y:EdgeLabel>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e8" source="n7" target="n8">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="29.18399999999997" sy="0.0" tx="24.28800000000001" ty="0.0">
<y:Point x="1094.6654253222143" y="731.8000000000002"/>
<y:Point x="1016.1054253222144" y="731.8000000000002"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:EdgeLabel alignment="center" configuration="AutoFlippingLabel" distance="2.0" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" preferredPlacement="anywhere" ratio="0.5" textColor="#000000" verticalTextPosition="bottom" visible="true" width="104.2421875" x="-62.15307370122309" xml:space="preserve" y="34.80927001953137">SimReply in UDP<y:LabelModel><y:SmartEdgeLabelModel autoRotationEnabled="false" defaultAngle="0.0" defaultDistance="10.0"/></y:LabelModel><y:ModelParameter><y:SmartEdgeLabelModelParameter angle="6.283185307179586" distance="23.81218750000005" distanceToCenter="true" position="left" ratio="0.12769857433808468" segment="1"/></y:ModelParameter><y:PreferredPlacementDescriptor angle="0.0" angleOffsetOnRightSide="0" angleReference="absolute" angleRotationOnRightSide="co" distance="-1.0" frozen="true" placement="anywhere" side="anywhere" sideReference="relative_to_edge_flow"/></y:EdgeLabel>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e9" source="n5" target="n1">
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="23.921741802203996" sy="-3.0501798912007416" tx="0.0" ty="-56.27417989120056">
<y:Point x="867.5280981327575" y="502.70182010879967"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:EdgeLabel alignment="center" configuration="AutoFlippingLabel" distance="2.0" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" preferredPlacement="anywhere" ratio="0.5" textColor="#000000" verticalTextPosition="bottom" visible="true" width="29.95703125" x="73.38950633588263" xml:space="preserve" y="-62.699758016200235">step<y:LabelModel><y:SmartEdgeLabelModel autoRotationEnabled="false" defaultAngle="0.0" defaultDistance="10.0"/></y:LabelModel><y:ModelParameter><y:SmartEdgeLabelModelParameter angle="6.283185307179586" distance="11.126187499999986" distanceToCenter="true" position="left" ratio="0.5889387894625147" segment="-1"/></y:ModelParameter><y:PreferredPlacementDescriptor angle="0.0" angleOffsetOnRightSide="0" angleReference="absolute" angleRotationOnRightSide="co" distance="-1.0" frozen="true" placement="anywhere" side="anywhere" sideReference="relative_to_edge_flow"/></y:EdgeLabel>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
</graph>
<data key="d7">
<y:Resources/>
</data>
</graphml>

Binary file not shown.

After

Width:  |  Height:  |  Size: 98 KiB

View File

@ -0,0 +1,650 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:java="http://www.yworks.com/xml/yfiles-common/1.0/java" xmlns:sys="http://www.yworks.com/xml/yfiles-common/markup/primitives/2.0" xmlns:x="http://www.yworks.com/xml/yfiles-common/markup/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/graphml" xmlns:yed="http://www.yworks.com/xml/yed/3" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd">
<!--Created by yEd 3.23.2-->
<key attr.name="Description" attr.type="string" for="graph" id="d0"/>
<key for="port" id="d1" yfiles.type="portgraphics"/>
<key for="port" id="d2" yfiles.type="portgeometry"/>
<key for="port" id="d3" yfiles.type="portuserdata"/>
<key attr.name="url" attr.type="string" for="node" id="d4"/>
<key attr.name="description" attr.type="string" for="node" id="d5"/>
<key for="node" id="d6" yfiles.type="nodegraphics"/>
<key for="graphml" id="d7" yfiles.type="resources"/>
<key attr.name="url" attr.type="string" for="edge" id="d8"/>
<key attr.name="description" attr.type="string" for="edge" id="d9"/>
<key for="edge" id="d10" yfiles.type="edgegraphics"/>
<graph edgedefault="directed" id="G">
<data key="d0"/>
<node id="n0">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="157.79999999999998" width="452.0" x="959.3461887999997" y="585.7236400000005"/>
<y:Fill color2="#000000" hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="145.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="434.0" x="9.0" xml:space="preserve" y="6.399999999999977">&lt;html&gt;
&lt;center&gt;
&lt;h4&gt;ACS Mode Tree&lt;/h4&gt;
&lt;/center&gt;
&lt;table border="1"&gt;
&lt;tr&gt;
&lt;th&gt;Mode&lt;/th&gt;
&lt;th&gt;MGMs&lt;/th&gt;
&lt;th&gt;SUSs&lt;/th&gt;
&lt;th&gt;STR&lt;/th&gt;
&lt;th&gt;MGT&lt;/th&gt;
&lt;th&gt;RWs&lt;/th&gt;
&lt;th&gt;ACS CTRL&lt;/th&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;SAFE&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;td&gt;OFF&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;SAFE&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;IDLE&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;IDLE&lt;/td&gt;
&lt;/tr&gt;
&lt;/table&gt;
&lt;/html&gt;<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="-1.1102230246251565E-16" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n1">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="134.60000000000014" width="452.0" x="959.3461887999997" y="757.2703199999999"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="120.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="428.0" x="12.0" xml:space="preserve" y="7.300000000000068">&lt;html&gt;
&lt;center&gt;
&lt;h4&gt;ACS IDLE Sequence&lt;/h4&gt;
&lt;/center&gt;
&lt;table border="1"&gt;
&lt;tr&gt;
&lt;th&gt;Step&lt;/th&gt;
&lt;th&gt;MGMs&lt;/th&gt;
&lt;th&gt;SUS&lt;/th&gt;
&lt;th&gt;STR&lt;/th&gt;
&lt;th&gt;MGT&lt;/th&gt;
&lt;th&gt;RWs&lt;/th&gt;
&lt;th&gt;ACS CTRL&lt;/th&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;1&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;NORMAL&lt;/td&gt;
&lt;td&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;2&lt;/td&gt;
&lt;td&gt;&lt;/td&gt;
&lt;td&gt;&lt;/td&gt;
&lt;td&gt;&lt;/td&gt;
&lt;td&gt;&lt;/td&gt;
&lt;td&gt;&lt;/td&gt;
&lt;td&gt;SAFE&lt;/td&gt;
&lt;/tr&gt;
&lt;/table&gt;
&lt;/html&gt;<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="32.400000000000034" width="146.79999999999995" x="1128.4" y="313.94999999999993"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="113.94921875" x="16.42539062500009" xml:space="preserve" y="6.0515625000000455">ACS Subsystem<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n3">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="80.39999999999998" x="910.7200000000004" y="407.20000000000005"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="31.9375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="61.837890625" x="9.281054687499932" xml:space="preserve" y="4.03125">MGM
Assembly<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n4">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="32.400000000000034" width="146.79999999999995" x="1133.6000000000001" y="243.3579999999999"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="117.99609375" x="14.401953124999864" xml:space="preserve" y="6.051562499999989">Satellite System<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n5">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="72.40000000000009" x="1021.1200000000003" y="404.99560000000014"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="31.9375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="61.837890625" x="5.2810546875000455" xml:space="preserve" y="4.03125">SUS
Assembly<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n6">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="67.59999999999991" x="1124.92" y="404.99560000000014"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="31.9375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="56.599609375" x="5.5001953124999545" xml:space="preserve" y="4.03125">STR
Manager<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n7">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="67.59999999999991" x="1208.74" y="407.20000000000005"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="31.9375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="56.599609375" x="5.5001953124999545" xml:space="preserve" y="4.03125">MGT
Manager<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n8">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="72.40000000000009" x="1292.56" y="407.20000000000005"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="31.9375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="61.837890625" x="5.2810546875000455" xml:space="preserve" y="4.03125">RW
Assembly<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n9">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="67.59999999999991" x="1381.18" y="407.20000000000005"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="31.9375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="34.732421875" x="16.433789062499955" xml:space="preserve" y="4.03125">ACS
CTRL<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n10">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="946.4000000000004" y="463.3000000000002"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="41.640625" x="1.5396874999999" xml:space="preserve" y="1.0156250000000568">MGM0<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n11">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="946.4000000000004" y="491.32480000000066"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="41.640625" x="1.5396874999999" xml:space="preserve" y="1.015625">MGM1<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n12">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="946.4000000000004" y="519.3496000000011"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="41.640625" x="1.5396874999999" xml:space="preserve" y="1.015625">MGM2<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n13">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="1055.2600000000007" y="463.3000000000002"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="35.65234375" x="4.5338281249999" xml:space="preserve" y="1.015625">SUS0<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n14">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="1055.2600000000007" y="491.32480000000066"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="15.443359375" x="14.6383203124999" xml:space="preserve" y="1.015625">...<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n15">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="1055.2600000000007" y="519.3496000000011"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="43.287109375" x="0.7164453124999" xml:space="preserve" y="1.015625">SUS12<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n16">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="50.0" x="1129.9800000000005" y="482.79120000000023"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="31.9375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="45.0390625" x="2.48046875" xml:space="preserve" y="4.03125">STR
Device<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n17">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="50.0" x="1216.4400000000005" y="482.7912000000002"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="31.9375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="45.0390625" x="2.48046875" xml:space="preserve" y="4.031249999999943">MGT
Device<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n18">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="1341.6661887999999" y="454.7884800000007"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="31.837890625" x="6.4410546874999" xml:space="preserve" y="1.015625">RW0<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n19">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="1341.6661887999999" y="482.3769600000013"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="31.837890625" x="6.4410546874999" xml:space="preserve" y="1.015625">RW1<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n20">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="1341.6661887999999" y="512.3769600000013"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="31.837890625" x="6.4410546874999" xml:space="preserve" y="1.015625">RW2<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n21">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="20.0" width="44.719999999999914" x="1341.6661887999999" y="542.3769600000013"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="31.837890625" x="6.4410546874999" xml:space="preserve" y="1.015625">RW3<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n22">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="32.400000000000034" width="59.75999999999999" x="1305.2" y="313.94999999999993"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="17.3505859375" x="21.20470703125011" xml:space="preserve" y="6.051562499999989">...<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<edge id="e0" source="n4" target="n2">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-5.199999999999818" sy="0.0" tx="0.0" ty="0.0"/>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e1" source="n2" target="n3">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-1.1700000000000728" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1200.63" y="379.7"/>
<y:Point x="950.9200000000003" y="379.7"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e2" source="n2" target="n5">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-1.1700000000000728" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1200.63" y="379.7"/>
<y:Point x="1057.3200000000004" y="379.7"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e3" source="n2" target="n6">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-1.1700000000000728" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1200.63" y="379.7"/>
<y:Point x="1158.72" y="379.7"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e4" source="n2" target="n7">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-1.1700000000000728" sy="0.0" tx="-11.29999999999859" ty="0.0">
<y:Point x="1200.63" y="379.7"/>
<y:Point x="1231.2400000000014" y="379.7"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e5" source="n2" target="n8">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-1.1700000000000728" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1200.63" y="379.7"/>
<y:Point x="1328.76" y="379.7"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e6" source="n2" target="n9">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-1.1700000000000728" sy="0.0" tx="0.0" ty="0.0">
<y:Point x="1200.63" y="379.7"/>
<y:Point x="1414.98" y="379.7"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e7" source="n3" target="n10">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-28.009798234586242" sy="0.0" tx="13.11999999999989" ty="0.0">
<y:Point x="922.9102017654141" y="473.3000000000002"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e8" source="n3" target="n11">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-28.009798234586242" sy="17.706497359551577" tx="0.0" ty="0.0">
<y:Point x="922.9102017654141" y="501.32480000000066"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e9" source="n3" target="n12">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-28.009798234586242" sy="12.053248679775777" tx="0.0" ty="0.0">
<y:Point x="922.9102017654141" y="529.3496000000011"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e10" source="n5" target="n13">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-24.18711646917177" sy="11.599696230174459" tx="-20.855199999999513" ty="-1.9751999999995178">
<y:Point x="1033.1328835308286" y="471.32480000000066"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e11" source="n5" target="n14">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-24.18711646917177" sy="0.0" tx="-20.855199999999513" ty="-3.024800000000596">
<y:Point x="1033.1328835308286" y="498.30000000000007"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e12" source="n6" target="n16">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="0.0" sy="0.0" tx="3.7399999999995543" ty="0.0"/>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e13" source="n7" target="n17">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="0.0" sy="0.0" tx="1.0999999999994543" ty="0.0"/>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e14" source="n8" target="n18">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-23.248152938343992" sy="14.026599270174415" tx="-13.026188800000227" ty="0.33167708069714763">
<y:Point x="1305.511847061656" y="465.1201570806978"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e15" source="n8" target="n19">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-23.248152938343992" sy="0.0" tx="-13.026188800000227" ty="0.0">
<y:Point x="1305.511847061656" y="492.3769600000013"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e16" source="n8" target="n20">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-23.248152938343992" sy="0.0" tx="-13.026188800000227" ty="0.0">
<y:Point x="1305.511847061656" y="522.3769600000013"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e17" source="n8" target="n21">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-23.248152938343992" sy="0.0" tx="-13.026188800000227" ty="-1.1368683772161603E-13">
<y:Point x="1305.511847061656" y="552.3769600000012"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e18" source="n4" target="n22">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-5.199999999999818" sy="16.200000000000045" tx="0.0" ty="0.0">
<y:Point x="1201.8000000000002" y="290.43031999999994"/>
<y:Point x="1335.08" y="290.43031999999994"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
<edge id="e19" source="n5" target="n15">
<data key="d9"/>
<data key="d10">
<y:PolyLineEdge>
<y:Path sx="-24.18711646917177" sy="0.0" tx="-20.855199999999513" ty="0.0">
<y:Point x="1033.1328835308286" y="529.3496000000011"/>
</y:Path>
<y:LineStyle color="#000000" type="line" width="1.0"/>
<y:Arrows source="none" target="standard"/>
<y:BendStyle smoothed="false"/>
</y:PolyLineEdge>
</data>
</edge>
</graph>
<data key="d7">
<y:Resources/>
</data>
</graphml>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,285 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:java="http://www.yworks.com/xml/yfiles-common/1.0/java" xmlns:sys="http://www.yworks.com/xml/yfiles-common/markup/primitives/2.0" xmlns:x="http://www.yworks.com/xml/yfiles-common/markup/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/graphml" xmlns:yed="http://www.yworks.com/xml/yed/3" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd">
<!--Created by yEd 3.23.2-->
<key attr.name="Description" attr.type="string" for="graph" id="d0"/>
<key for="port" id="d1" yfiles.type="portgraphics"/>
<key for="port" id="d2" yfiles.type="portgeometry"/>
<key for="port" id="d3" yfiles.type="portuserdata"/>
<key attr.name="url" attr.type="string" for="node" id="d4"/>
<key attr.name="description" attr.type="string" for="node" id="d5"/>
<key for="node" id="d6" yfiles.type="nodegraphics"/>
<key for="graphml" id="d7" yfiles.type="resources"/>
<key attr.name="url" attr.type="string" for="edge" id="d8"/>
<key attr.name="description" attr.type="string" for="edge" id="d9"/>
<key for="edge" id="d10" yfiles.type="edgegraphics"/>
<graph edgedefault="directed" id="G">
<data key="d0" xml:space="preserve"/>
<node id="n0">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="261.7582999999998" width="631.1152000000001" x="810.8848" y="142.00000000000003"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="16" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="22.625" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="296.09375" x="26.51035059931519" xml:space="preserve" y="9.42072533356739">satrs-example Component Structure<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="-0.5" labelRatioY="-0.5" nodeRatioX="-0.4579944349315068" nodeRatioY="-0.46400983146067415" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n1" yfiles.foldertype="group">
<data key="d4" xml:space="preserve"/>
<data key="d6">
<y:ProxyAutoBoundsNode>
<y:Realizers active="0">
<y:GroupNode>
<y:Geometry height="217.68500000000003" width="166.9147999999998" x="819.9999999999999" y="180.434265625"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" type="line" width="1.0"/>
<y:NodeLabel alignment="left" autoSizePolicy="node_width" borderDistance="5.0" fontFamily="Dialog" fontSize="16" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="41.25" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="166.9147999999998" x="13.181757989188668" xml:space="preserve" y="6.728692946816665">Application
Components<y:LabelModel><y:SmartNodeLabelModel distance="5.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.4210270270270303" labelRatioY="-0.5" nodeRatioX="0.5" nodeRatioY="-0.46908977216245173" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="39" topF="38.90500000000003"/>
</y:GroupNode>
<y:GroupNode>
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
<y:Fill color="#F5F5F5" transparent="false"/>
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" xml:space="preserve" y="0.0">Folder 3</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
</y:Realizers>
</y:ProxyAutoBoundsNode>
</data>
<graph edgedefault="directed" id="n1:">
<node id="n1::n0">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="136.9147999999998" x="834.9999999999999" y="234.33926562500002"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="113.94921875" x="11.482790624999893" xml:space="preserve" y="4.851562500000028">ACS Subsystem<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n1::n1">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="136.9147999999998" x="834.9999999999999" y="270.5687968750001"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="111.884765625" x="12.515017187499893" xml:space="preserve" y="4.8515625">EPS Subsystem<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n1::n2">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="136.9147999999998" x="834.9999999999999" y="306.84403125000006"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="112.923828125" x="11.995485937499893" xml:space="preserve" y="4.8515625">TCS Subsystem<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n1::n3">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="136.9147999999998" x="834.9999999999999" y="343.119265625"/>
<y:Fill color="#FFCC00" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="36.59375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="81.259765625" x="27.827517187499893" xml:space="preserve" y="1.703125">Payload
Subsystem<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
</graph>
</node>
<node id="n2" yfiles.foldertype="group">
<data key="d4" xml:space="preserve"/>
<data key="d6">
<y:ProxyAutoBoundsNode>
<y:Realizers active="0">
<y:GroupNode>
<y:Geometry height="261.7582999999998" width="498.25116669136776" x="971.9147999999997" y="151.36096562500023"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle hasColor="false" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="content" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="false" width="4.0" x="247.12558334568382" y="0.0"/>
<y:Shape type="roundrectangle"/>
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="7" leftF="7.00529999999992" right="3" rightF="3.313607121059931" top="14" topF="14.073299999999762"/>
</y:GroupNode>
<y:GroupNode>
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
<y:Fill color="#F5F5F5" transparent="false"/>
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" xml:space="preserve" y="0.0">Folder 4</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
</y:Realizers>
</y:ProxyAutoBoundsNode>
</data>
<graph edgedefault="directed" id="n2:">
<node id="n2::n0" yfiles.foldertype="group">
<data key="d4" xml:space="preserve"/>
<data key="d6">
<y:ProxyAutoBoundsNode>
<y:Realizers active="0">
<y:GroupNode>
<y:Geometry height="217.68500000000003" width="441.0158999999999" x="993.9200999999996" y="180.434265625"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" type="line" width="1.0"/>
<y:NodeLabel alignment="left" autoSizePolicy="node_width" borderDistance="5.0" fontFamily="Dialog" fontSize="16" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="22.625" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="441.0158999999999" x="16.916359570308032" xml:space="preserve" y="5.969557999968089">Generic Components<y:LabelModel><y:SmartNodeLabelModel distance="5.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.46164229096885634" labelRatioY="-0.5" nodeRatioX="0.5" nodeRatioY="-0.4725770815629552" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
<y:BorderInsets bottom="22" bottomF="22.006468749999954" left="0" leftF="0.0" right="0" rightF="0.0" top="28" topF="27.68500000000006"/>
</y:GroupNode>
<y:GroupNode>
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
<y:Fill color="#F5F5F5" transparent="false"/>
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" xml:space="preserve" y="0.0">Folder 2</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
</y:Realizers>
</y:ProxyAutoBoundsNode>
</data>
<graph edgedefault="directed" id="n2::n0:">
<node id="n2::n0::n0">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="125.0" x="1151.9280499999995" y="281.84403125000006"/>
<y:Fill color="#CCFFFF" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="58.837890625" x="33.0810546875" xml:space="preserve" y="4.8515625">TM Sink<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2::n0::n1">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="125.0" x="1151.9280499999995" y="330.5687968750001"/>
<y:Fill color="#CCFFFF" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="105.7119140625" x="9.64404296875" xml:space="preserve" y="4.8515625">TCP/IP Servers<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2::n0::n2">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="125.0" x="1294.9359999999995" y="281.84403125000006"/>
<y:Fill color="#CCFFFF" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="75.1689453125" x="24.91552734375" xml:space="preserve" y="4.8515625">TC Source<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2::n0::n3">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="125.0" x="1008.9200999999996" y="223.11926562500005"/>
<y:Fill color="#CCFFFF" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="109.9228515625" x="7.53857421875" xml:space="preserve" y="4.8515625">Event Manager<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2::n0::n4">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="125.0" x="1008.9200999999996" y="267.1160312500001"/>
<y:Fill color="#CCFFFF" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="117.7021484375" x="3.64892578125" xml:space="preserve" y="4.8515625">PUS Distribution<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2::n0::n5">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="40.0" width="125.0" x="1151.9280499999995" y="223.11926562500005"/>
<y:Fill color="#CCFFFF" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="dashed" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="36.59375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="84.1650390625" x="20.41748046875" xml:space="preserve" y="1.7031249999999716">Shared
TMTC Pools<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2::n0::n6">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="50.0" width="125.0" x="1008.9200999999996" y="311.1127968750001"/>
<y:Fill color="#CCFFFF" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="36.59375" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="78.12890625" x="23.435546875" xml:space="preserve" y="6.703125">Satellite
Mode Tree<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2::n0::n7">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="30.0" width="125.0" x="1294.9359999999995" y="223.11926562500005"/>
<y:Fill color="#CCFFFF" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="14" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="20.296875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="74.7861328125" x="25.10693359375" xml:space="preserve" y="4.8515625"> PUS Stack<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
</graph>
</node>
</graph>
</node>
<node id="n3">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="57.265600000000006" width="631.1152" x="810.8847999999999" y="411.39428125"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="16" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="41.25" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="261.8125" x="166.89412267941418" xml:space="preserve" y="3.144146301369915">satrs-minisim
Simulator based on asynchronix<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="-0.028136269449041573" nodeRatioY="-0.08493150684931505" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n4">
<data key="d6">
<y:ShapeNode>
<y:Geometry height="50.0" width="631.1152000000002" x="810.8847999999998" y="476.2958625000002"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="16" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="41.25" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="374.8359375" x="110.3824039294143" xml:space="preserve" y="0.12842465753431043">pytmtc
Command-line interface based TMTC handling<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="-0.028136269449041573" nodeRatioY="-0.08493150684931505" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
</graph>
<data key="d7">
<y:Resources/>
</data>
</graphml>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,348 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:java="http://www.yworks.com/xml/yfiles-common/1.0/java" xmlns:sys="http://www.yworks.com/xml/yfiles-common/markup/primitives/2.0" xmlns:x="http://www.yworks.com/xml/yfiles-common/markup/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/graphml" xmlns:yed="http://www.yworks.com/xml/yed/3" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd">
<!--Created by yEd 3.23.2-->
<key attr.name="Description" attr.type="string" for="graph" id="d0"/>
<key for="port" id="d1" yfiles.type="portgraphics"/>
<key for="port" id="d2" yfiles.type="portgeometry"/>
<key for="port" id="d3" yfiles.type="portuserdata"/>
<key attr.name="url" attr.type="string" for="node" id="d4"/>
<key attr.name="description" attr.type="string" for="node" id="d5"/>
<key for="node" id="d6" yfiles.type="nodegraphics"/>
<key for="graphml" id="d7" yfiles.type="resources"/>
<key attr.name="url" attr.type="string" for="edge" id="d8"/>
<key attr.name="description" attr.type="string" for="edge" id="d9"/>
<key for="edge" id="d10" yfiles.type="edgegraphics"/>
<graph edgedefault="directed" id="G">
<data key="d0"/>
<node id="n0">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="174.17919999999998" width="273.1071999999999" x="1000.3040000000003" y="433.61760000000004"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="67.380859375" x="12.06892812499973" xml:space="preserve" y="7.247609374999911">Static Pool<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="-0.5" labelRatioY="-0.5" nodeRatioX="-0.45580882479480683" nodeRatioY="-0.45838992615076934" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n1">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="120.0" x="1141.8400000000001" y="536.1087687499992"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" lineColor="#000000" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="86.142578125" x="16.9287109375" xml:space="preserve" y="21.015625">1 x 128 bytes<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2" yfiles.foldertype="group">
<data key="d4" xml:space="preserve"/>
<data key="d5"/>
<data key="d6">
<y:ProxyAutoBoundsNode>
<y:Realizers active="0">
<y:GroupNode>
<y:Geometry height="90.0" width="164.73919999999998" x="1113.1776" y="451.01919999999996"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle hasColor="false" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="false" width="164.73919999999998" x="0.0" xml:space="preserve" y="0.0">Group 1</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="15" leftF="14.739199999999983" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
<y:GroupNode>
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
<y:Fill color="#F5F5F5" transparent="false"/>
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" xml:space="preserve" y="0.0">Folder 1</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
</y:Realizers>
</y:ProxyAutoBoundsNode>
</data>
<graph edgedefault="directed" id="n2:">
<node id="n2::n0">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="60.0" x="1142.9168" y="466.01919999999996"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="28.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n2::n1">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="120.0" x="1142.9168" y="466.01919999999996"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" backgroundColor="#FFFFFF" fontFamily="Dialog" fontSize="12" fontStyle="plain" height="17.96875" horizontalTextPosition="center" iconTextGap="4" lineColor="#000000" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="78.5078125" x="20.746093749999773" xml:space="preserve" y="4.862813159989173">2 x 64 bytes<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="-0.5" nodeRatioX="-1.1657341758564144E-15" nodeRatioY="-0.41895311400018" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
</graph>
</node>
<node id="n3" yfiles.foldertype="group">
<data key="d4" xml:space="preserve"/>
<data key="d5"/>
<data key="d6">
<y:ProxyAutoBoundsNode>
<y:Realizers active="0">
<y:GroupNode>
<y:Geometry height="89.99999999999994" width="150.0" x="997.9168" y="451.0192"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle hasColor="false" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="false" width="150.0" x="0.0" xml:space="preserve" y="0.0">Group 2</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
<y:GroupNode>
<y:Geometry height="50.0" width="50.0" x="997.9168" y="451.01919999999996"/>
<y:Fill color="#F5F5F5" transparent="false"/>
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" xml:space="preserve" y="0.0">Folder 2</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
</y:Realizers>
</y:ProxyAutoBoundsNode>
</data>
<graph edgedefault="directed" id="n3:">
<node id="n3::n0">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="20.0" x="1092.9168" y="466.0192"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="8.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n3::n1">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="20.0" x="1072.9168" y="466.0192"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="8.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n3::n2">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="20.0" x="1032.9168" y="466.0192"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="8.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n3::n3">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="20.0" x="1052.9168" y="466.0192"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="8.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n3::n4">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="20.0" x="1012.9168" y="466.0192"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="8.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n3::n5">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="120.0" x="1012.9168" y="466.0192"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" backgroundColor="#FFFFFF" fontFamily="Dialog" fontSize="12" fontStyle="plain" height="17.96875" horizontalTextPosition="center" iconTextGap="4" lineColor="#000000" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="78.5078125" x="14.814773932043863" xml:space="preserve" y="4.392671444094276">6 x 16 bytes<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="-0.5" nodeRatioX="-0.04942766514963404" nodeRatioY="-0.426788809265095" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
</graph>
</node>
<node id="n4" yfiles.foldertype="group">
<data key="d4" xml:space="preserve"/>
<data key="d5"/>
<data key="d6">
<y:ProxyAutoBoundsNode>
<y:Realizers active="0">
<y:GroupNode>
<y:Geometry height="90.0" width="150.0" x="997.9168" y="521.1759843749999"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle hasColor="false" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="false" width="150.0" x="0.0" xml:space="preserve" y="0.0">Group 3</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
<y:GroupNode>
<y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
<y:Fill color="#F5F5F5" transparent="false"/>
<y:BorderStyle color="#000000" type="dashed" width="1.0"/>
<y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" xml:space="preserve" y="0.0">Folder 3</y:NodeLabel>
<y:Shape type="roundrectangle"/>
<y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
<y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
<y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
</y:GroupNode>
</y:Realizers>
</y:ProxyAutoBoundsNode>
</data>
<graph edgedefault="directed" id="n4:">
<node id="n4::n0">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="30.0" x="1012.9168" y="536.1759843749999"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="13.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n4::n1">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="30.0" x="1042.9168" y="536.1759843749999"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="13.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n4::n2">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="30.0" x="1072.9168" y="536.1759843749999"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" hasText="false" height="4.0" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="4.0" x="13.0" y="28.0">
<y:LabelModel>
<y:SmartNodeLabelModel distance="4.0"/>
</y:LabelModel>
<y:ModelParameter>
<y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
</y:ModelParameter>
</y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
<node id="n4::n3">
<data key="d5"/>
<data key="d6">
<y:ShapeNode>
<y:Geometry height="60.0" width="120.0" x="1012.9168" y="536.1759843749999"/>
<y:Fill hasColor="false" transparent="false"/>
<y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
<y:NodeLabel alignment="center" autoSizePolicy="content" backgroundColor="#FFFFFF" fontFamily="Dialog" fontSize="12" fontStyle="plain" height="17.96875" horizontalTextPosition="center" iconTextGap="4" lineColor="#000000" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="78.5078125" x="20.74609375" xml:space="preserve" y="4.392671444094276">4 x 32 bytes<y:LabelModel><y:SmartNodeLabelModel distance="4.0"/></y:LabelModel><y:ModelParameter><y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="-0.5" nodeRatioX="0.0" nodeRatioY="-0.426788809265095" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/></y:ModelParameter></y:NodeLabel>
<y:Shape type="rectangle"/>
</y:ShapeNode>
</data>
</node>
</graph>
</node>
</graph>
<data key="d7">
<y:Resources/>
</data>
</graphml>

View File

@ -0,0 +1,256 @@
%PDF-1.4
%<25><><EFBFBD><EFBFBD>
1 0 obj
<<
/Title ()
/Author ()
/Subject ()
/Keywords ()
/Creator (yExport 1.5)
/Producer (org.freehep.graphicsio.pdf.YPDFGraphics2D 1.5)
/CreationDate (D:20240209121153+01'00')
/ModDate (D:20240209121153+01'00')
/Trapped /False
>>
endobj
2 0 obj
<<
/Type /Catalog
/Pages 3 0 R
/ViewerPreferences 4 0 R
/OpenAction [5 0 R /Fit]
>>
endobj
4 0 obj
<<
/FitWindow true
/CenterWindow false
>>
endobj
5 0 obj
<<
/Parent 3 0 R
/Type /Page
/Contents 6 0 R
>>
endobj
6 0 obj
<<
/Length 7 0 R
/Filter [/ASCII85Decode /FlateDecode]
>>
stream
GauHqbDmmZP2*dc,(JOJpjOd%&g0Gt.kP?b'JS!oXFWk;Im?:Y1FF&?pX/3EoVU?*%+=B^)P-:.4Rk3i
s*]RdT>/;PVD+YBq=ssLgOFU_/,o=0j/OX---Sln^>2M2IJ`Ed^O5qQVk*kY?bcTdDuVnJr7h6u29C@I
n%\htn,N!oI>bl:q_/!E8Gq*>`4k*>H&qp+S%%gYRm0Q`^J"46=8=LjUt,*E8fY3j6_j1O)dT5Rr;36d
2TE1sT$toUNk^6OlG0M$`a8\SDu][+2SjlHja.QR?iT*F^H[dDqOmlY/O#F`o1-puZR+(6c_!Z$9=T]T
3D@>hs2JCF[r;<4>?W&7(f(C?hGX+g+/RK5>#oRYmB]N&/Qlj"0&n123'X=7S:R]6PWlg21E6kE\4XeS
2\[`<&[K"H.d`i7:KfE2]p+JeNVt]"Y*L+:.OV5^Fb.U2n0a"#gC<8<]$kU=OJ*NV)=du!KCWJ.>a@;!
AiTJpf@'RJ7&^"(Yb"R/l(pRdQ<kVs%?8)!>V`PO_WeB@PWi!pB+5<W>7FTapG>"eLU:Cra(k/I;;^ah
mAcX]0+W@c9"cLWCDs?VR[8F?6<T5F$_0C:m"MjrBBs4k)B)!QUC!)4)NFtV@<<r9q$g4$m3nNi+q1ru
e]XeUoN<]O"rT6rdIc7nDrJXG_<,>V[+c%pmXpGV6L..t<8>20'_?%rO!N4JZZYrn7!-+)W,<%t>ADS8
/TJAA+-bJKcYS>'s3KC9l,0-0e5A(AITl4'3i\f46/U?l%$3[[D1-a[<d(=Q9X^_,!@p9Wm'TZV7GRh3
rF)Ja082PP(B7n)^+A+X7k=ASO*kK6,(6!VRDEO4*]U9%D@#R--_LN6iTq@`,YPD6Cs8m4P?AfT*5Gd]
[;(LpJ)b!=C%su^D3#$WIOeISK@*am.##XK[S[_P!?^>tVXHJ#qEt3?2Z8-=k2XWs.,17T[bN*>#j]U:
+n?FH0^6*EPO[^;:H%KH3f("]U9q;P]"j#B)eLQ$@1?"IBSn;YH6lYb&!tVhGK>8uB'7MF"FFfqpVGc<
e,4hFfV*0s,_P,I_QT;tW4=HSW2;Ra[gBM4`qdES$e#Amc(@.hj5@*2oF=>L%a<mBs"l[h;q(=d*OOlf
J\oOmWJbKB>#1\>#4"cMCBF9nZ_8(\3B3X.Ce5M5@GC=9ETuK##3d.F9`om9\CVZIc4fpE_>+:BO<J]+
H82#q>$E&a":@K&ZD/,"^/e9SMaTXR4:emV+:YVSV']$J([5Yf)XL^]K57i74uu_XpD*@olnX,&NO9ib
%&N3p%WmY0V3,o$NEeT^eb<HbXfh=$96H48FpusINN>n+-?`G3[[)E[c:Y2<_OIriakDd!l>kAQ%VsAW
AF8/If$8]MoQQX3ZSq>[Jc02#p-mQXojmFp?*'U_akBmhOKg^+ZZ`MGY*I2DWtB:KeU(@CG1Jk?KA87s
@g/HOM6nM9!.9`I^:<BOrbA\h]cSgEhCU?uURj'M1$c"YMq5^c](iQ#&>PQ:2.[P-3c2mhE-_@`J$@fS
Dr,`=a@ib&>8l*Uf6+&oBsDgN[$uSRWT9WU_`,ciCF7Mt7gQI=T/'sF(R6T"e#_EoZA5)r#HW*R#p`]_
;dE]PNpSIdg\6CCQ:6F]TV^1VrK%68^6q0\hmJKElGB.7UIAF+V^67qcJ?fV+8sR\g-Z3Xb'QW-m65AL
UC0S+Tsnq;>ibcqXPfO;:H"8jre($\6sJQ*T!GIQ4Lo&VH+V6@U@aE[<9K#^&i7s"S2Z@R:6kOt\?n?\
@p8!#:7CKqp!R+&Y]'#C/Qrg2RMVR=@b(t;`foH&?N-VC;Id]"Fln?G<NR>6`9P]52`L$-C<*l4plc.7
4LX"1<nT+Nr-?OgD0lC<YFkX:nR7D1lb[?95#;tmOVMWgf]B,h03"K'->^:DU1!shS^`%C-di:.>MJtE
:J,\\RN)(L`nK=]CO(1Tdmj\N7gT%#Z1B<^qo0QCdR_QNI?m?+,6&K!4fE0TH4Xp3/#bs@]Ql.@WaMU]
BWhp*L`O1NZaDS.g0J?#[[Y>lJYclJ%HQ=qZk].NXgD>/K%5%V<?,\IWJYSHNOHKbeii>SSm3KlhX7(m
EbVLh^[9V@pJ^+\Sq#\_][o)h+u?TPo1!NNWGPYC7aKRZ"@1!5)Z=%7Gk!')(2ETj@O9&6$UQ$l,W5#f
q92>l8u!Ra*@P4\#";duf5O:q?Lihd1dQS3oQCa.7'^3sQ.A<TLOPreY-r$(%o1.R9pT8^-Ke$C46_+/
`eS`mioh5qd#K&0oVC7:jQWId]7!m5,a&m0hQ=c"ZPH0.p1eq0dAGT7:YaXJ`ZN^=N>&k8^p7rB.2tdh
>7]pt>(T/ji2PU,j)3m?DJX0V4aUHr-0P!<FjKu<`jQ5I@g'TS-p/qo6bA*h`@>K!O@WDGNGfcFPDn8k
]N0g#G1lYPO'jlnkp+!`qs>Ub7G_G4*lj8smU>/I@Y3o^Fm>;3YP,@#_qiT]gE-XV3s"W]nb.b<p$Y?3
@p.AZZUN>E*hXPKbOH9YrARdUTCN!PnOqQaZPu(cT3X,pj&";:8,VKDOZqD-aSKhVqQ$O/25[h-T_6]R
C9+8bQB+N5[;fCAnV-i0_0PGC#EHtl39hG%(g^PAcZlqVGQ/U8AnYrZ(Kg>P)i3=ig+4SW7aRXtd$=q8
L%9Mtd$HV]Gf>!pdr-2@flXdJ(LPtj,MLHnp\kW*SaaSJb1esgYlB$7O7W0)U9S1#a./*sn^kcE-_o=M
I85TH:.DQ2I1D!Gi$Z1lT>/6I"-Du2cT.EHFQ=[6rO+36h85+pMnhi2=P5BW=f<9C4LP$c2Li\nn2Q>/
86aIt6etQI5`[KZ2E3b_<i]HljKMH:KY"CGZ?V7LVr;CX2Oif;5.FFdQb;jG+0?nF_p@U)<,M@*<4`aK
)7I3"r9K'"d!+\r*2V?N3SEg7/[&sW'\#jaP1H<_Znr1[lc&EWhnM0pMdc"_:8QfFUdiam=V#(qFVU2P
6`9k0jf`3QgpUO`EW+<g2@2a5%OX_YV6HW:\If`r(V&+CI%c@aW^)q#c$?X%X2Vsm_OT3YPd"2`l=-kK
Das:U\!CiR(YnG5i>#L&.%$!kq/NSCgUU3Qm"p'\LpGKB":LoFAXH@$VMV`V[,8V9DI=TQ(@-18S2PtA
#p\$$-S5\jm7qD$]fs/,rG[Ujfqmc#bj$fCNm`;<$&dI'B'Vt#8i)P7BZhobd:A7W,=T4K&,FXOip8p_
LOQ5mBJ%hI`BU#`MR>[AM[6@;S!hjY_\ScsBmPt3:"*f2Dc=`_qlG]U3EaMtbh9gYQsI$c->q1Tn^X<F
B^MnK`hQN!$r;KHG.U1*@D,\s_idP]nd8k`lcJB9?)Sci:d<<iAk*aB)c^K%1m?I&*P6=t5d9rE,m2_$
Vn<<o:eoNRDDM*'FcQa361sg4]H*<Ip6FiBg6!taI_2m/9DeN52='`Z"+09,F'QE1KsjB9h>)I?UQK\,
Up?#_#P95ma[2];_gZ=I=Nu-1=+4sjm!L#3.`Ou)]!rM#B/5^beuNBCAU0$?f$DdgQ%CA>f7M`$=0J%X
#SL]o3a$TV'C?#TA`hfd@]!MG@]tt`ak0Va-H6NQ)O3e[.sNgi/Z@KEodYrunT!k2)NU0T2=8&]Z&LD;
c<'Bue0<IHeB2N,>,\p9X>jgXQ&<0+A6*E=(@bpE05=Rp<EU4MM1k"f?*Z'J_JWF3?gT?.Y$SZE.+,Ca
JE/T+Z'D)kYS9lJ]rKo2IL8aF+jlujC-9UUnMXTFe4`&:/f&qAB\Iin/1n_3=D--;0Ul[BZ1&_8Db2^b
['Hk%40co"!"29l%*"Z3rktlWD2eW.9%^ZRNk3u9Ma+0YGen(&OLn,.GrJr91gVYDbJj.Yg8UN4YgmP3
Y!GJBV;Jg7ITqVTi[KWJoL/WD1pI$.H@n./I[Wi"]__D15CcfGaQP!u1TR[g9*6Yui6*Kp(MI7$T^Bl4
l4#-LRH0"HG"N)]RB]N\G0l5^ge]fV#>g27[#Sj(5X\1A7I["a?*hQ&War9\^@fsYo-*5J)#@>mOb#;?
;rr^6]^*0nWSp<R?kd:ah*%WZO-=Wa0k[Tf`^=_Ta#4<ck0d5oZlbe`(65[J6f8.>=Y/`2oR4nkGg!L3
%J;@oKhW'B3<Gm]mR*t:^!&E,Pp]&mJ/lP,])<P^q`ktSqPu_Ah:VB"opQE8:i>@QNd)%R?^]@8KY8]$
.r78lfYIc>Z5b_;0L8GRq9"\nSE.(2XlM$>`^k0:f5@FBorM*W69'%!m.skVQ$M@lE?!AMY-NVO\nH:c
I]NFkbrYC#-2CHlUBd/88?o[?IRlfq>0bkMj&cg6/OrroTlBHl3H3lDAfm4L&<hpKl\%T0QiHDJP2GYt
U4`DcUq^i\OV:fom2J'(`3G#k[*W\[L/r8*/XHqTASA"9I+f,)`E[XCaqGQPOc=m_YU=#U$X\O5;D-bN
)KjiEYJCg\-^#-k8sCYcRC4CWSN<^;C2_WT">0?Ci>16s["nQ],:-2g"!];L8L@r=lHRtr7#oB4OWs60
q1)H*ql\Sf6AMaE!paTSP<K+=347A-D?f:";sM31R/=_i`c*-TWc/n!ra];sAjc'/K%G]7C_t<Z&M3N\
o2O3_Bl#c^,'p/l2gC*(nr,[5&7Q7Yns@;$M!\[u0nNi882R-/,!R5mP_*HroaS(^qChALk+Wk-X)r5T
L&+!bYbJHM\<[mH\6b2=ZQcl@#jk0<SaoR\E`Q-QN&@sqofX7`lNXo;FpmT-hCRaF]Ma><OaX=s8_V`T
]d`0A9isrCnq?=ce?6$HHMJib`Y-?ooV$!Vh;1M(#kM0aQFH``Rk2hFk/$UJ[U'l"_q\sAX\5RrVE@Z^
Mk1`ZpI_NpdE*>q]jT*IDlNbQDjG._M'_?Y`&ZD]98h)+.paUXcgoPB\@KipB8heU`Q9djs)G2Sq9W]T
;!cin^gFr9$)@J=Hi8H`c,WVn[9%;6O7i2O^*efCg"R1`h]1Y;,lHbK%)+[AqJWJ39CJJ9qBI'trTSA$
SK+&//4\jh6tD.*^G[^]c/)S#$%)V_mk9Z2Z+G_DK;)r/='roX^-%4mLQ5c)]S2rg*Ui2]A4I`kOEEfP
rP96Qn+P%L+/mr]/_^N\Y.DO?ikpHkqrupaO,_jm?!tiu+:HrYnnnmO6a`q30V$j_(GT'QV)'H6A*ldc
]n+r.`R3,V1mAT'D>#PR[kEgOTr$k+/H):Z7Vdi2^R.pdMTcN6WlRK1T?9+C,m@a`P-A9`=38dnZBTZd
cP=(*]@"kFZb#iW6#VQ4_d\oA&(DIWh&*J,mg2-5Pp>"RL,A(Zd9eoZMUH5TY[Zo+774bhDLhCRQZiSA
Q=Qr+h0$%kMf@YrO7f&KrOCIsG@W80QL#&Cb?3Jf'#W#5?RR#ZMAYSa+X:q8PbO[]5KbWkHIi\YIN:MJ
bPjZfHf0IcRJ\7Ioj=lmHp7QL118I/*6;M[ZcG.!PK775p!*trXbdXWmgIVK1/'rqmlP?%X,0V!fFZaW
pAG0$FOWBnG[M7jBoj-eftY.@*`$C;leS_WVs1m,cMY1Ko"+c:%A)'?1\jOeY[1U.l-eL`?eCJ%D:aZ=
%1tOj_N@\O"pcgtpjni0>?sfuP%g;u*1LRhLU5?d="2]P4<Da`>A)#?>K]bs8g?;P!F\B1SoqS!Vtd_+
+KXRk,Qm>c]!^n6[9Gh^q^+A2WmWG)B=8a0?9-NPrM"IH^MUs9NV:R3Y49T!?k];[]8upK4&*tHAr[8M
Kda\eZ1(J+=fk<41YLXa_4(U?Q]QE![nEsZ0dr`,?\/(740cZ?,#.CFA2@`.4f3C`Wjr,m=6d82hPNFq
X4(6OR`5)FgKD3k_no,.?f(lZHhMh]P0Ni-&SjRBLJJe:Ki+JD2"tn,XDnjOEdY6U\3=gNQa_IoJ)fNp
/gPaQeORpaB,oM"hd8$^EQX?1bhWl3"R_K$o0eYZEl0WRKCRSN<WZaegA*;"'Ve\8,$e(3%Ai$21/.O.
CcC!NMX!(O>K`08#kG8R7U0(4Hu16/[/6NY1=/g5+-L:lnoT/.XD[)k8BfhY:P-c6]8?JgYLI7XbY6[[
roOisYrC=ZUfPC!\L[[mk]nkB'!Z=8F#ELjOaDr?6Z^n*W*PkEY!n4!HXsJ1PeHQ(?uEmUNTb%kS2aE`
/00(<b-Y2Ec/>-]8jqg8]Zs.iNL'iq(bpCX1H2(@*UT;Kd,9P<;4Njo>SaWsb]p65_n@Hn_>/B:iP%k"
QbUGtF[(CX&(TOH8baq85o.GU=3dT2r/gCWrFh]eS336gbE25d0bEqP?2/2Fb;^GYm!%J:bY6VTaImZJ
`@(lW',Y-"KJ/mg&R![Bb.B0$Fh>"V3TB#"-%$/O@-Ai_)am0X-r3]@m.FYVbb0d&)mYF@]7ornZ"^uH
*)62G&'m*Fj1F$!Yfg0c(Z.SELp8da&Ij(B"C3.n;/)B>>Vt0kcI?+-a8-aA?#H&WZu$DEPhBtd*\3/B
9TDO0^nR5b;=US1T1%]E(VK:h?b6/V@FP8.CHe+OalME4S^0%)k"kc$o>9/5T]71+Qam+?7[o;7;r[uM
IX9Gd"`tp-\DpHO4uhT]n?V):Xg!!?#;\0u;0Bh1.77$G#/0$?MR^:C9i[Lq<Wmd2Br^U2]5\YY6egKj
pptXT*knOP.DBjDpt@:Xqr#.8V656ensl7ts!SfiSo9H%LZ]12.=Oe!S,La8p',2;qZabrhRa-/\G"@h
F]g('HASO0Q]I\$A0I"%AU`G._[K/s#\&KV-'"Y,h:#m=9>1W),\Xh$k,]d(7=HbkN1NS'[uk?NA<,W7
#`5Muq!/]:LCJIIghG0P7si>'2'O-PEC?"k&8sHOb0<iij1r+L0L"nP&4U=nVQ+=t*b!Z<cE(J:4g?PU
7/!@so4gA1AMQ!$LaNcjXuo5'<ZRNIQ8TqW#+4jj5PQ]biLAeW0ADi+n0k#$W_lBl3=fl1Gg=?=(JV/K
j-?KT3d;r.`S),CA#&+2&KXMH"Zo(?jZRFn\",*kB$Qt;O%:;:Et)d$(ZX*96I[0h@7Z/d]9OoL/JW;"
pIoB:-S^Mu%`YMXam??RE*X-$jK8IK2D@p5_&(s7g?Dh<TR=]:9kE_Zb.+u0Hm#g)*:7NEBp=&kk;^V>
L4-7]P@lBsg(h*0d8sL]]4"5`Y`N"r?349-dU&1*.]aSICOB:PKi0r'mq\\;V*t"Y*ZX-D)kFP:G-PSU
^Ea<VoRBh400\iI^U$@:L!FM5ib6Jf3PYL0$qcn2:*[b?VZFGO(o[JSs(hmNIHP_Gp3QSuf))Z?5M]Om
oi>G5eQ;)4=88EUXhK,IYMS,GH1tWp=)[WZ>CQ8=*m8mOiudM;^3Y]eYdZi[q;Ucj/&Pe6N5s;4@4M%i
#'ed,roNh1=S;^$bh&I>fAFf"Kq.9!7,=GuKQUYDg:\jF]G5l=QkO])Q;EIK7#WMQD(NU;hm_Xfj_8mV
GHng4Kcf-fLEp&9WUV-WJZNb"8pu3L9d8P2[ji$J;-FHdSE%;#3CZs3Bt!*i*dSBu6VHOhi1XMmeKl=V
0QjE(U>DSRoi4I$B6LgdQAGmPb3hD;P1_g@`&[4C:!-?3etmbZl9$sb>KjdOZdnTk!:R9t&YsjH:ER1D
/Lu0Z[gCt\e9OCec8[<e<@%a[He^lN\MNmgjDh1eX"(5f2c(%;3@jDR=si[4C!*+r8&rh3H$&.8GIiao
<e/g9U-c/QCjTaGhGi3pR.Un>X]D8Z.G@kD4MN2/f5e2BdV8k1CsA+>aMJs)e)nHU\*g.\*`TW@,gL\.
I\U'"Bb/2M!S4PL#LM+H<pnMb$Z66DKbghd_k+X/`YmiAB]6q^'(<'he4o%a.#iNf0$HR(VS-ji)-=Ao
jL]lj>%Mt\s+[k]H)eU!T\4h.aq+=HILYd=jo$*_IOQQC`]SXB5-e0KS@IRu%^05]IqW]Ik_YAoa6ruC
Y(<$kM-6I[1L*'"/tQttcaO_:[u9G(`UiK9oBkQ5G4Bs,nU.rn^0NkY?9I*tSo5Fk[hmB"Ms&j6oBjNg
mGdqbnU,]RI(OX;>s."niq`<-q=`hDG4BslGeUk9^0NkY/tQu7So5Fk[u9G(Ms&j6oBkQ5p>YmknU.rn
^:ed*>s.!sSo69RD\pa/Ms&iQkM*uImGds8iq[cfI(OX;]m.7s4Pqc_D_I]#(XKRKkM(pXgVrdOiq`<-
q0):U]6M''GeRHGh_(L=0:m(ucaShqgPb7L`UiKVq=YKJpWg1->U=C14-o2E#/m7[#'sc)YOFFY7JP-u
HBh2=THn&jh*S3M>YciqUiuA":/o"lc7VuV>3DHA`"?1kPKR:0\%=D7/d(kRGC+T&4Lr,Bn&"It/sNg1
ai9l=m4&a"?9?gR&fphF`\V,&jHQ78);R9XZZfVL0qMQbi1"&`ncr#(3bCmYNU>F?CV<XN13;oSk@41S
%.4jRIj<=6n\P/(&pCsLOM0JhnGUiFn)^>\\a$aM<S]MRN`)ud]A8E%Qc46s'!iY/i*q;L/]?7Z5;FQJ
LZb<"dmJ[6_>/B:^-.=8'_eq=D8:Kj.o3UL);$SmG\BM-gQ9BV\/Mdn*6;(J2K-<:j`c,sj>.6pD-ZlO
f;`1a\-j7EL/sOB4Vcca2Bqa``Y(YL$+l[D7(kNW<-Ln1PTua5ed-X.82ua0l-%aS(MA$,IC3"GeF:i^
*#TY9HrGhsbZBl)`Xff/;Z.D<2EPi9=VtE"G,s:m/<fh3%`L(O9:=Hk*D\jjT&tB/L::FBel[F^Z#0(u
MQtl0FJJ/M<34KpjZRaNdU.kN7GY?VI#ME+IH*HmrV!CN:&eS(q7mE`Y.*Qq,'h&U,C.&@&NZNJG^D%#
m@BA]2r;bKr&@A]AaS'C=d5Pr?%0-N=d7(,2t+fT!nl8=0]bOOSV'l?\T-*bCkk)4EL+)[g)9YE9kAE^
f_o@:F%%N5p?/DkYjpEu[jU2(BVAD'UdB1j8U093l,e72oT7<:RX1<a\*WZk[>EYarEV;@pSg)Qao7Y:
m1%FVp5N35in2CNg=VSEkVX<'Z?GMpfdU+Gb*jVHYTN4id-:,UED)PVQ.o55"((qj:WYa+)XmGjAtcGW
%?A4eOf9A0e+UlN&]jQXLLpb9./n+[h&JS#_U<*.q=,M.OO).[;hMhl_[)o]ZhJTpB#3=X%nV_7pZR3L
Wlkd0+gPu=5ku7*%,p]o@Sj4NGqq,<h4[E>-[1$o1[Lt@AP>%GXU<C7;OkPi?,!!+.WNXJ%!p+1<0Zq_
7nV)Y1u?OdH>Al9rGq9"&j#$ANchp;D^L4?/0NeR%%188c;K-&p-@&acD.qE'RmnI]9fVn+[\;XGtBuC
Q,XA;\,0:^<=4b)e$V%1QLHnYgqutDB9oNQ!,q.nMASbLEdAr3N2=#;^h3RH><DIAn.iAW9XK@!7SS[;
O&E4EH:MB-+WfL)oO>R>@E:T?8($'+Y3@LFGC_Kh,O0.kp1&#qQjbrcNO%XTqU-P^)(\B5Y`R;B?348B
.c]<[;gs_RTlFVDdt*7d=3"D^b-Enu_H^Eu]OK->Yh'js;YD^Qjlj?VkP*Bt\o50TMQC2E'f]]F4'Ptf
nrGSTaN<L42@AQ]?]M&Epe.^bs!VAV?G6#5DuJ5_]D(m.HL#cn=O"H:iqYN[n%Yp4QR8.!g:cT4VTD&e
%E!D,l[-[THu9SrEa/k,f4Y%@<rG69)tmpfos_89q!rUaL!8>2h)\R=hj7aH)l*CPIsYJ12`BLL?A-'5
+"c-sf0A5m;ZQ-uEVA/A+jh6UIEQR,N5P3M^L$IPG=gE7(NW`\47W\IbH$'+^O*&*pW.mf7mR*hfU's?
K`.hF_:h[jjk`4oT-pf8ASdc`2fpean+(iiDin.+1TR(GfQX_=+sXSG;!s44<W)-98'`Z6A&qKeCk[l@
(5[#$>FLJD*Ye9PUXi/@RId:(/S9.pmbo_8aP]0Q"8+TtpMbdDZ1$2Afic#>a`]3'-V$7WKFfNW;V633
2=XFZqoW#b`&Gn<njm3IkQXZb&EQ\S*]`)RYS['?S/_D"(A._H,&bH'VI;o&l_X'o\S8,Q44Sc5_<-l,
[md53DP!2n](+VEn0Zk!g\8T!&"U@9Iq,j?gH.C/Fec1]hs5:ZXj)4/Q[b6Fh-HXKUkg^D_eFa!i]V58
*kkJCnPq8?g>f6HXZ*PEkM?uX;Ob:Pr%;c#-V>2RC\#=heh7OB+@r;TkpP/GHW@I8G1Nhs<4>g\NN@A9
ioX\0$YQrA;I'2JFGs4rRCreKoZ;>3f5kM`^fIdZfdH(][0=V5'k@4=9GPim%[rkq=>L0i6,)C;)s2a,
<_nrM'8=1ZG##F5M1<NOhnCaM(q/;/BCINllnJ>P'`4bM@H7bBf)]^<fV7TjL&8GUEf7J_D<HWl/@heK
J::T-QF>S',Q?sMEPkAYK>ALUEMUM3R^./2c)k,Rh=A]hkI017ctbj<LTQoc=g5MlYW%&ec7o!#ICj3,
=TWN3LL'3]o>Y,44q2:/hQ5SESjM93'PA=T;=OH?Pd"E`]-VO"/>ZZ&Da9Aj>lH(%I:$gNnU&JSiWJFn
^l:&f`kSA12b*02ijsWg`P*u`9.hUr(C&`O%\CVN]tqE92XBU3hW`SmhagF)\Q@&W4\ROiDj^:lJ/'gp
]n!tuXn^XeH6h4Dr<gLT__OhK`4m'Z)Njps@!"F:=^>.79#LI8T3Tj&r_Mqaen_bN;%`os?eQ]apr*_*
=W*u&]@^aF"'<6-mMD<5CF<Th]H=!7H;Zi(g,1I]=)*s4SbcnM$0)oooD0a0=khF0IrU0glAD!h+QaXc
bSJjrp3ZiPV+O6l@_9):L3>[j$hBkiMuCQi&CW8m0u_^p,:1R`Z/$Whj@(J,Om!MtZU5r4;^62;Mdni]
XW_V`DgPVn>M*)EWo&hh"tRFKO[UPA;,)@Zir!eCm_G[_f;:MlD`kI847&Q*>1Q;XVIT;7hiAaZA,(3t
i!/ZUEE=!#QV21OO:LV)r!c<S-4XR/mIi?oYDoU\`mNH-`_G+40P#)/YbueY-Is"\ZP_AW7;<'&$:V_C
__G\N6aYp1VrqudM%^EF!)FAr;)ud3U_W@pY"rXZ$MPbDh?3:cTtOVD<c4mQk1t:\DFH^.@T9jVFVdNn
+h]W5I\'prGX8lu.-0o/i[rcj2:gZpluJ"p6dM#PIB8Lj1%;-LV73TXmOK*2r5+Ef(k7_Cqngcg]$G]@
<PR10cGpME\+<bWEUo"EO0p@T:Dh:a*=mHkPOV\t#/(f-jBHgJ'W%"l$Sa7MO4$R2Sl&A4>14)<bhE"4
V6bGbMUIn$<H720g<Uh"C7#r09mMV4MDX'=(<JsVbDLt1f+80M=:b.5MtNsmm$6egP@iF\@rpKEEA2<K
.Ul:<O+[%G@HOG)IRh#>rR%\>#&'Y+#,7i==$b\PQcd4NRfYq9\fRaZ$SkhkOMY3O:A_jZ(!nCY\*D"j
;r'Tr,F;<uH^%aplh;I4CRi+n7c9lZe.+QRHeG$e,o2!2"a7J'AZQu&8]iF/BFjpL7OD2>k(k@tI,)m?
&Do[H)F@eSj$srLMmb^cBP)bmn>6uD`s<bGn*n72+u^")W85\_<pomAn5nli)&+7HWpms/&WSUYU,f&C
#5Ac'b6FZf]Bth(A]g3ZQ;6$>CcPBH_iW^Q&9OpeP^i8RATr]Y8RAZK;&i.\MQGHi*^P7lUqYf9B`/I\
qZ8&2`u8jM;rLjE<=4aV*i+Z4\od.U6pol:BW7($Ur,NE1[b*JD)#'+91GP5KBnu9,J!g<bt<HTar4PG
9"SP)e4Vp:.G?(tDgW`&KH3L"=1F5#f-pPdc#M5b<3Yg0R?d&rcH`u^%DL7/j%m?]0<c#"Z\j&8A=Z>>
i>j2R^V<J)bmI8.G3]Ran2u^b7T[MIlijF^3R+SfBA72s/?(G/3)h87Q"'1kBs7D/5>A6+\`OTi[<lmg
IecDbl*!]>gnF[RrpflHVtto@Tle->l0J>$j)m_ArI._+p*8=1b\E&As5i"M58+"hg]%MM_N%=~>
endstream
endobj
7 0 obj
11983
endobj
3 0 obj
<<
/Parent null
/Type /Pages
/MediaBox [0.0000 0.0000 311.00 209.00]
/Resources 8 0 R
/Kids [5 0 R]
/Count 1
>>
endobj
9 0 obj
[/PDF /Text /ImageC]
endobj
10 0 obj
<<
/S /Transparency
/CS /DeviceRGB
/I true
/K false
>>
endobj
11 0 obj
<<
/Alpha1
<<
/ca 1.0000
/CA 1.0000
/BM /Normal
/AIS false
>>
>>
endobj
8 0 obj
<<
/ProcSet 9 0 R
/ExtGState 11 0 R
>>
endobj
xref
0 12
0000000000 65535 f
0000000015 00000 n
0000000315 00000 n
0000012726 00000 n
0000000445 00000 n
0000000521 00000 n
0000000609 00000 n
0000012702 00000 n
0000013180 00000 n
0000012896 00000 n
0000012935 00000 n
0000013037 00000 n
trailer
<<
/Size 12
/Root 2 0 R
/Info 1 0 R
>>
startxref
13253
%%EOF

BIN
misc/satrs-logo-v2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

View File

@ -5,7 +5,14 @@ High-level documentation of the [sat-rs project](https://absatsw.irs.uni-stuttga
## Building
If you have not done so, install `mdbook` using `cargo install mdbook --locked`.
If you have not done so, install the pre-requisites first:
```sh
cargo install mdbook --locked
cargo install mdbook-linkcheck --locked
```
After that, you can build the book with:
```sh
mdbook build

View File

@ -15,7 +15,7 @@ action commanding could look like.
2. Target ID and Action String based. The target ID is the same as in the first proposal, but
the unique action is identified by a string.
The framework provides an `ActionRequest` abstraction to model both of these cases.
The library provides an `ActionRequest` abstraction to model both of these cases.
## Commanding with ECSS PUS 8

View File

@ -17,14 +17,14 @@ it is still centered around small packets. `sat-rs` provides support for these E
standards and also attempts to fill the gap to the internet protocol by providing the following
components.
1. [UDP TMTC Server](https://docs.rs/satrs-core/0.1.0-alpha.0/satrs_core/hal/host/udp_server/index.html).
1. [UDP TMTC Server](https://docs.rs/satrs/latest/satrs/hal/std/udp_server/index.html).
UDP is already packet based which makes it an excellent fit for exchanging space packets.
2. [TCP TMTC Server Components](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/hal/std/tcp_server/index.html).
TCP is a stream based protocol, so the framework provides building blocks to parse telemetry
2. [TCP TMTC Server Components](https://docs.rs/satrs/latest/satrs/hal/std/tcp_server/index.html).
TCP is a stream based protocol, so the library provides building blocks to parse telemetry
from an arbitrary bytestream. Two concrete implementations are provided:
- [TCP spacepackets server](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/hal/std/tcp_server/struct.TcpSpacepacketsServer.html)
- [TCP spacepackets server](https://docs.rs/satrs/latest/satrs/hal/std/tcp_server/struct.TcpSpacepacketsServer.html)
to parse tightly packed CCSDS Spacepackets.
- [TCP COBS server](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/hal/std/tcp_server/struct.TcpTmtcInCobsServer.html)
- [TCP COBS server](https://docs.rs/satrs/latest/satrs/hal/std/tcp_server/struct.TcpTmtcInCobsServer.html)
to parse generic frames wrapped with the
[COBS protocol](https://en.wikipedia.org/wiki/Consistent_Overhead_Byte_Stuffing).
@ -39,8 +39,12 @@ task might be to store all arriving telemetry persistently. This is especially i
space systems which do not have permanent contact like low-earth-orbit (LEO) satellites.
The most important task of a TC source is to deliver the telecommands to the correct recipients.
For modern component oriented software using message passing, this usually includes staged
demultiplexing components to determine where a command needs to be sent.
For component oriented software using message passing, this usually includes staged demultiplexing
components to determine where a command needs to be sent.
Using a generic concept of a TC source and a TM sink as part of the software design simplifies
the flexibility of the TMTC infrastructure: Newly added TM generators and TC receiver only have to
forward their generated or received packets to those handler objects.
# Low-level protocols and the bridge to the communcation subsystem

View File

@ -3,31 +3,64 @@
Software for space systems oftentimes has different requirements than the software for host
systems or servers. Currently, most space systems are considered embedded systems.
For these systems, the computation power and the available heap are the most important resources
which are constrained. This might make completeley heap based memory management schemes which
For these systems, the computation power and the available heap are important resources
which are also constrained. This might make completeley heap based memory management schemes which
are oftentimes used on host and server based systems unfeasable. Still, completely forbidding
heap allocations might make software development unnecessarilly difficult, especially in a
time where the OBSW might be running on Linux based systems with hundreds of MBs of RAM.
A useful pattern used commonly in space systems is to limit heap allocations to program
A useful pattern commonly used in space systems is to limit heap allocations to program
initialization time and avoid frequent run-time allocations. This prevents issues like
running out of memory (something even Rust can not protect from) or heap fragmentation.
running out of memory (something even Rust can not protect from) or heap fragmentation on systems
without a MMU.
# Using pre-allocated pool structures
A huge candidate for heap allocations is the TMTC and handling. TC, TMs and IPC data are all
A candidate for heap allocations is the TMTC and handling. TC, TMs and IPC data are all
candidates where the data size might vary greatly. The regular solution for host systems
might be to send around this data as a `Vec<u8>` until it is dropped. `sat-rs` provides
another solution to avoid run-time allocations by offering pre-allocated static
pools.
pools. These pools are split into subpools where each subpool can have different page sizes.
For example, a very small telecommand (TC) pool might look like this:
These pools are split into subpools where each subpool can have different page sizes.
For example, a very small TC pool might look like this:
![Example Pool](images/pools/static-pools.png)
TODO: Add image
The core of the pool abstractions is the
[PoolProvider trait](https://docs.rs/satrs/latest/satrs/pool/trait.PoolProvider.html).
This trait specifies the general API a pool structure should have without making assumption
of how the data is stored.
This trait is implemented by a static memory pool implementation.
The code to generate this static pool would look like this:
<!-- Would be nice to test this code sample, but need to wait
for https://github.com/rust-lang/mdBook/issues/706 to be merged.. -->
```rust, ignore
use satrs::pool::{StaticMemoryPool, StaticPoolConfig};
let tc_pool = StaticMemoryPool::new(StaticPoolConfig::new(vec![
(6, 16),
(4, 32),
(2, 64),
(1, 128)
]));
```
It should be noted that the buckets only show the maximum size of data being stored inside them.
The store will keep a separate structure to track the actual size of the data being stored.
A TC entry inside this pool has a store address which can then be sent around without having
to dynamically allocate memory. The same principle can also be applied to the TM and IPC data.
to dynamically allocate memory. The same principle can also be applied to the telemetry (TM) and
inter-process communication (IPC) data.
You can read
- [`StaticPoolConfig` API](https://docs.rs/satrs/latest/satrs/pool/struct.StaticPoolConfig.html)
- [`StaticMemoryPool` API](https://docs.rs/satrs/latest/satrs/pool/struct.StaticMemoryPool.html)
for more details.
In the future, optimized pool structures which use standard containers or are
[`Sync`](https://doc.rust-lang.org/std/marker/trait.Sync.html) by default might be added as well.
# Using special crates to prevent smaller allocations
@ -35,7 +68,7 @@ Another common way to use the heap on host systems is using containers like `Str
to work with data where the size is not known beforehand. The most common solution for embedded
systems is to determine the maximum expected size and then use a pre-allocated `u8` buffer and a
size variable. Alternatively, you can use the following crates for more convenience or a smart
behaviour which at the very least reduce heap allocations:
behaviour which at the very least reduces heap allocations:
1. [`smallvec`](https://docs.rs/smallvec/latest/smallvec/).
2. [`arrayvec`](https://docs.rs/arrayvec/latest/arrayvec/index.html) which also contains an

View File

@ -1,13 +1,14 @@
# Framework Design
# Library Design
Satellites and space systems in general are complex systems with a wide range of requirements for
both the hardware and the software. Consequently, the general design of the framework is centered
both the hardware and the software. Consequently, the general design of the library is centered
around many light-weight components which try to impose as few restrictions as possible on how to
solve certain problems.
solve certain problems. This is also the reason why sat-rs is explicitely called a library
instead of a framework.
There are still a lot of common patterns and architectures across these systems where guidance
of how to solve a problem and a common structure would still be extremely useful to avoid pitfalls
which were already solved and to avoid boilerplate code. This framework tries to provide this
which were already solved and to avoid boilerplate code. This library tries to provide this
structure and guidance the following way:
1. Providing this book which explains the architecture and design patterns in respect to common
@ -18,7 +19,7 @@ structure and guidance the following way:
3. Providing a good test suite. This includes both unittests and integration tests. The integration
tests can also serve as smaller usage examples than the large `satrs-example` application.
This framework has special support for standards used in the space industry. This especially
This library has special support for standards used in the space industry. This especially
includes standards provided by Consultative Committee for Space Data Systems (CCSDS) and European
Cooperation for Space Standardization (ECSS). It does not enforce using any of those standards,
but it is always recommended to use some sort of standard for interoperability.
@ -30,10 +31,10 @@ Flying Laptop Project by the University of Stuttgart with Airbus Defence and Spa
It has flight heritage through the 2 mssions [FLP](https://www.irs.uni-stuttgart.de/en/research/satellitetechnology-and-instruments/smallsatelliteprogram/flying-laptop/)
and [EIVE](https://www.irs.uni-stuttgart.de/en/research/satellitetechnology-and-instruments/smallsatelliteprogram/EIVE/).
Therefore, a lot of the design concepts were ported more or less unchanged to the `sat-rs`
framework.
library.
FLP is a medium-size small satellite with a higher budget and longer development time than EIVE,
which allowed to build a highly reliable system while EIVE is a smaller 6U+ cubesat which had a
shorter development cycle and was built using cheaper COTS components. This framework also tries
shorter development cycle and was built using cheaper COTS components. This library also tries
to accumulate the knowledge of developing the OBSW and operating the satellite for both these
different systems and provide a solution for a wider range of small satellite systems.

View File

@ -1,16 +1,24 @@
# Events
Events can be an extremely important mechanism used for remote systems to monitor unexpected
or expected anomalies and events occuring on these systems. They are oftentimes tied to
Events are an important mechanism used for remote systems to monitor unexpected
or expected anomalies and events occuring on these systems.
One common use case for events on remote systems is to offer a light-weight publish-subscribe
mechanism and IPC mechanism for software and hardware events which are also packaged as telemetry
(TM) or can trigger a system response. They can also be tied to
Fault Detection, Isolation and Recovery (FDIR) operations, which need to happen autonomously.
Events can also be used as a convenient Inter-Process Communication (IPC) mechansism, which is
also observable for the Ground segment. The PUS Service 5 standardizes how the ground interface
for events might look like, but does not specify how other software components might react
to those events. There is the PUS Service 19, which might be used for that purpose, but the
event components recommended by this framework do not really need this service.
The PUS Service 5 standardizes how the ground interface for events might look like, but does not
specify how other software components might react to those events. There is the PUS Service 19,
which might be used for that purpose, but the event components recommended by this framework do not
rely on the present of this service.
The following images shows how the flow of events could look like in a system where components
can generate events, and where other system components might be interested in those events:
![Event flow](images/events/event_man_arch.png)
For the concrete implementation of your own event management and/or event routing system, you
can have a look at the event management documentation inside the
[API documentation](https://docs.rs/satrs/latest/satrs/event_man/index.html) where you can also
find references to all examples.

View File

@ -1,6 +1,6 @@
# sat-rs Example Application
The `sat-rs` framework includes a monolithic example application which can be found inside
The `sat-rs` library includes a monolithic example application which can be found inside
the [`satrs-example`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-example)
subdirectory of the repository. The primary purpose of this example application is to show how
the various components of the sat-rs framework could be used as part of a larger on-board
@ -23,11 +23,11 @@ Some additional explanation is provided for the various components.
The example includes a UDP and TCP server to receive telecommands and poll telemetry from. This
might be an optional component for an OBSW which is only used during the development phase on
ground. The UDP server is strongly based on the
[UDP TC server](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/hal/std/udp_server/struct.UdpTcServer.html).
[UDP TC server](https://docs.rs/satrs/latest/satrs/hal/std/udp_server/struct.UdpTcServer.html).
This server component is wrapped by a TMTC server which handles all telemetry to the last connected
client.
The TCP server is based on the [TCP Spacepacket Server](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/hal/std/tcp_server/struct.TcpSpacepacketsServer.html)
The TCP server is based on the [TCP Spacepacket Server](https://docs.rs/satrs/latest/satrs/hal/std/tcp_server/struct.TcpSpacepacketsServer.html)
class. It parses space packets by using the CCSDS space packet ID as the packet
start delimiter. All available telemetry will be sent back to a client after having read all
telecommands from the client.
@ -51,13 +51,13 @@ services. This currently includes the following services:
- Service 1 for telecommand verification. The verification handling is handled locally: Each
component which generates verification telemetry in some shape or form receives a
[reporter](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/pus/verification/struct.VerificationReporterWithSender.html)
[reporter](https://docs.rs/satrs/latest/satrs/pus/verification/struct.VerificationReporterWithSender.html)
object which can be used to send PUS 1 verification telemetry to the TM funnel.
- Service 3 for housekeeping telemetry handling.
- Service 5 for management and downlink of on-board events.
- Service 8 for handling on-board actions.
- Service 11 for scheduling telecommands to be released at a specific time. This component
uses the [PUS scheduler class](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/pus/scheduler/alloc_mod/struct.PusScheduler.html)
uses the [PUS scheduler class](https://docs.rs/satrs/latest/satrs/pus/scheduler/alloc_mod/struct.PusScheduler.html)
which performs the core logic of scheduling telecommands. All telecommands released by the
scheduler are sent to the central TC source using a message.
- Service 17 for test purposes like pings.
@ -65,10 +65,10 @@ services. This currently includes the following services:
### Event Management Component
An event manager based on the sat-rs
[event manager component](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/event_man/index.html)
[event manager component](https://docs.rs/satrs/latest/satrs/event_man/index.html)
is provided to handle the event IPC and FDIR mechanism. The event message are converted to PUS 5
telemetry by the
[PUS event dispatcher](https://docs.rs/satrs-core/0.1.0-alpha.1/satrs_core/pus/event_man/alloc_mod/struct.PusEventDispatcher.html).
[PUS event dispatcher](https://docs.rs/satrs/latest/satrs/pus/event_man/alloc_mod/struct.PusEventDispatcher.html).
You can read the [events](./events.md) chapter for more in-depth information about event management.

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@ -1,21 +1,24 @@
The sat-rs book
======
This book is the primary information resource for the [sat-rs framework](https://egit.irs.uni-stuttgart.de/rust/sat-rs)
This book is the primary information resource for the [sat-rs library](https://egit.irs.uni-stuttgart.de/rust/sat-rs)
in addition to the regular API documentation. It contains the following resources:
1. Architecture informations and consideration which would exceeds the scope of the regular API.
2. General information on how to build On-Board Software and how `sat-rs` can help to fulfill
2. General information on how to build on-board Software and how `sat-rs` can help to fulfill
the unique requirements of writing software for remote systems.
2. A Getting-Started workshop where a small On-Board Software is built from scratch using
sat-rs components.
# Introduction
The primary goal of the sat-rs framework is to provide re-usable components
The primary goal of the sat-rs library is to provide re-usable components
to write on-board software for remote systems like rovers or satellites. It is specifically written
for the special requirements for these systems.
It should be noted that sat-rs is early-stage software. Important features are missing. New releases
with breaking changes are released regularly, with all changes documented inside respective
changelog files. You should only use this library if your are willing to work in this
environment.
A lot of the architecture and general design considerations are based on the
[FSFW](https://egit.irs.uni-stuttgart.de/fsfw/fsfw) C++ framework which has flight heritage
through the 2 missions [FLP](https://www.irs.uni-stuttgart.de/en/research/satellitetechnology-and-instruments/smallsatelliteprogram/flying-laptop/)
@ -26,4 +29,21 @@ and [EIVE](https://www.irs.uni-stuttgart.de/en/research/satellitetechnology-and-
The [`satrs-example`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-example)
provides various practical usage examples of the `sat-rs` framework. If you are more interested in
the practical application of `sat-rs` inside an application, it is recommended to have a look at
the example application.
the example application. The [`satrs-minisim`](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-minisim)
applicatin complements the example application and can be used to simulate some physical devices
for the `satrs-example` device handlers.
# Flight Heritage
There is an active and continuous effort to get early flight heritage for the sat-rs library.
Currently this library has the following flight heritage:
- Submission as an [OPS-SAT experiment](https://www.esa.int/Enabling_Support/Operations/OPS-SAT)
which has also
[flown on the satellite](https://blogs.esa.int/rocketscience/2024/05/21/ops-sat-reentry-tomorrow-final-experiments-continue/).
The application is strongly based on the sat-rs example application. You can find the repository
of the experiment [here](https://egit.irs.uni-stuttgart.de/rust/ops-sat-rs).
- Development and use of a sat-rs-based [demonstration on-board software](https://egit.irs.uni-stuttgart.de/rust/eurosim-obsw)
alongside a Flight System Simulator in the context of a
[Bachelors Thesis](https://www.researchgate.net/publication/380785984_Design_and_Development_of_a_Hardware-in-the-Loop_EuroSim_Demonstrator)
at [Airbus Netherlands](https://www.airbusdefenceandspacenetherlands.nl/).

View File

@ -1,11 +1,11 @@
# Modes
Modes are an extremely useful concept for complex system in general. They also allow simplified
system reasoning for both system operators and OBSW developers. They model the behaviour of a
component and also provide observability of a system. A few examples of how to model
different components of a space system with modes will be given.
Modes are an extremely useful concept to model complex systems. They allow simplified
system reasoning for both system operators and OBSW developers. They also provide a way to alter
the behaviour of a component and also provide observability of a system. A few examples of how to
model the mode of different components within a space system with modes will be given.
## Modelling a pyhsical devices with modes
## Pyhsical device component with modes
The following simple mode scheme with the following three mode
@ -13,7 +13,8 @@ The following simple mode scheme with the following three mode
- `ON`
- `NORMAL`
can be applied to a large number of simpler devices of a remote system, for example sensors.
can be applied to a large number of simpler device controllers of a remote system, for example
sensors.
1. `OFF` means that a device is physically switched off, and the corresponding software component
does not poll the device regularly.
@ -31,7 +32,7 @@ for the majority of devices:
2. `NORMAL` or `ON` to `OFF`: Any important shutdown configuration or handling must be performed
before powering off the device.
## Modelling a controller with modes
## Controller components with modes
Controller components are not modelling physical devices, but a mode scheme is still the best
way to model most of these components.

View File

@ -1,9 +0,0 @@
Change Log
=======
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
# [unreleased]

View File

@ -1,125 +0,0 @@
[package]
name = "satrs-core"
version = "0.1.0-alpha.2"
edition = "2021"
rust-version = "1.61"
authors = ["Robin Mueller <muellerr@irs.uni-stuttgart.de>"]
description = "Core components of the sat-rs framework to build software for remote systems"
homepage = "https://egit.irs.uni-stuttgart.de/rust/sat-rs"
repository = "https://egit.irs.uni-stuttgart.de/rust/sat-rs"
license = "Apache-2.0"
keywords = ["no-std", "space", "aerospace"]
categories = ["aerospace", "aerospace::space-protocols", "no-std", "hardware-support", "embedded"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
delegate = ">0.7, <=0.10"
paste = "1"
[dependencies.smallvec]
version = "1"
[dependencies.num_enum]
version = ">0.5, <=0.7"
default-features = false
[dependencies.crc]
version = "3"
[dependencies.dyn-clone]
version = "1"
optional = true
[dependencies.hashbrown]
version = "0.14"
optional = true
[dependencies.heapless]
version = "0.7"
optional = true
[dependencies.num-traits]
version = "0.2"
default-features = false
[dependencies.downcast-rs]
version = "1.2"
default-features = false
optional = true
[dependencies.bus]
version = "2.2"
optional = true
[dependencies.crossbeam-channel]
version= "0.5"
default-features = false
optional = true
[dependencies.thiserror]
version = "1"
optional = true
[dependencies.serde]
version = "1"
default-features = false
optional = true
[dependencies.socket2]
version = "0.5.4"
features = ["all"]
optional = true
[dependencies.spacepackets]
version = "0.9.0"
default-features = false
# git = "https://egit.irs.uni-stuttgart.de/rust/spacepackets.git"
# rev = "297cfad22637d3b07a1b27abe56d9a607b5b82a7"
# branch = "main"
[dependencies.cobs]
git = "https://github.com/robamu/cobs.rs.git"
version = "0.2.3"
branch = "all_features"
default-features = false
[dev-dependencies]
serde = "1"
zerocopy = "0.7"
once_cell = "1.13"
serde_json = "1"
rand = "0.8"
tempfile = "3"
[dev-dependencies.postcard]
version = "1"
[features]
default = ["std"]
std = [
"downcast-rs/std",
"alloc",
"bus",
"postcard/use-std",
"crossbeam-channel/std",
"serde/std",
"spacepackets/std",
"num_enum/std",
"thiserror",
"socket2"
]
alloc = [
"serde/alloc",
"spacepackets/alloc",
"hashbrown",
"dyn-clone",
"downcast-rs"
]
serde = ["dep:serde", "spacepackets/serde"]
crossbeam = ["crossbeam-channel"]
heapless = ["dep:heapless"]
doc-images = []
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "doc_cfg", "--generate-link-to-definition"]

View File

@ -1,9 +0,0 @@
[![Crates.io](https://img.shields.io/crates/v/satrs-core)](https://crates.io/crates/satrs-core)
[![docs.rs](https://img.shields.io/docsrs/satrs-core)](https://docs.rs/satrs-core)
satrs-core
======
This crate contains the core components of the sat-rs framework.
You can find more information on [homepage](https://egit.irs.uni-stuttgart.de/rust/sat-rs).

File diff suppressed because it is too large Load Diff

View File

@ -1,770 +0,0 @@
use alloc::string::{String, ToString};
use core::fmt::Display;
use crc::{Crc, CRC_32_CKSUM};
use spacepackets::cfdp::ChecksumType;
use spacepackets::ByteConversionError;
#[cfg(feature = "std")]
use std::error::Error;
use std::path::Path;
#[cfg(feature = "std")]
pub use stdmod::*;
pub const CRC_32: Crc<u32> = Crc::<u32>::new(&CRC_32_CKSUM);
#[derive(Debug, Clone)]
pub enum FilestoreError {
FileDoesNotExist,
FileAlreadyExists,
DirDoesNotExist,
Permission,
IsNotFile,
IsNotDirectory,
ByteConversion(ByteConversionError),
Io {
raw_errno: Option<i32>,
string: String,
},
ChecksumTypeNotImplemented(ChecksumType),
}
impl From<ByteConversionError> for FilestoreError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversion(value)
}
}
impl Display for FilestoreError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
FilestoreError::FileDoesNotExist => {
write!(f, "file does not exist")
}
FilestoreError::FileAlreadyExists => {
write!(f, "file already exists")
}
FilestoreError::DirDoesNotExist => {
write!(f, "directory does not exist")
}
FilestoreError::Permission => {
write!(f, "permission error")
}
FilestoreError::IsNotFile => {
write!(f, "is not a file")
}
FilestoreError::IsNotDirectory => {
write!(f, "is not a directory")
}
FilestoreError::ByteConversion(e) => {
write!(f, "filestore error: {e}")
}
FilestoreError::Io { raw_errno, string } => {
write!(
f,
"filestore generic IO error with raw errno {:?}: {}",
raw_errno, string
)
}
FilestoreError::ChecksumTypeNotImplemented(checksum_type) => {
write!(f, "checksum {:?} not implemented", checksum_type)
}
}
}
}
impl Error for FilestoreError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
FilestoreError::ByteConversion(e) => Some(e),
_ => None,
}
}
}
#[cfg(feature = "std")]
impl From<std::io::Error> for FilestoreError {
fn from(value: std::io::Error) -> Self {
Self::Io {
raw_errno: value.raw_os_error(),
string: value.to_string(),
}
}
}
pub trait VirtualFilestore {
fn create_file(&self, file_path: &str) -> Result<(), FilestoreError>;
fn remove_file(&self, file_path: &str) -> Result<(), FilestoreError>;
/// Truncating a file means deleting all its data so the resulting file is empty.
/// This can be more efficient than removing and re-creating a file.
fn truncate_file(&self, file_path: &str) -> Result<(), FilestoreError>;
fn remove_dir(&self, dir_path: &str, all: bool) -> Result<(), FilestoreError>;
fn create_dir(&self, dir_path: &str) -> Result<(), FilestoreError>;
fn read_data(
&self,
file_path: &str,
offset: u64,
read_len: u64,
buf: &mut [u8],
) -> Result<(), FilestoreError>;
fn write_data(&self, file: &str, offset: u64, buf: &[u8]) -> Result<(), FilestoreError>;
fn filename_from_full_path(path: &str) -> Option<&str>
where
Self: Sized,
{
// Convert the path string to a Path
let path = Path::new(path);
// Extract the file name using the file_name() method
path.file_name().and_then(|name| name.to_str())
}
fn is_file(&self, path: &str) -> bool;
fn is_dir(&self, path: &str) -> bool {
!self.is_file(path)
}
fn exists(&self, path: &str) -> bool;
/// This special function is the CFDP specific abstraction to verify the checksum of a file.
/// This allows to keep OS specific details like reading the whole file in the most efficient
/// manner inside the file system abstraction.
///
/// The passed verification buffer argument will be used by the specific implementation as
/// a buffer to read the file into. It is recommended to use common buffer sizes like
/// 4096 or 8192 bytes.
fn checksum_verify(
&self,
file_path: &str,
checksum_type: ChecksumType,
expected_checksum: u32,
verification_buf: &mut [u8],
) -> Result<bool, FilestoreError>;
}
#[cfg(feature = "std")]
pub mod stdmod {
use super::*;
use std::{
fs::{self, File, OpenOptions},
io::{BufReader, Read, Seek, SeekFrom, Write},
path::Path,
};
#[derive(Default)]
pub struct NativeFilestore {}
impl VirtualFilestore for NativeFilestore {
fn create_file(&self, file_path: &str) -> Result<(), FilestoreError> {
if self.exists(file_path) {
return Err(FilestoreError::FileAlreadyExists);
}
File::create(file_path)?;
Ok(())
}
fn remove_file(&self, file_path: &str) -> Result<(), FilestoreError> {
if !self.exists(file_path) {
return Err(FilestoreError::FileDoesNotExist);
}
if !self.is_file(file_path) {
return Err(FilestoreError::IsNotFile);
}
fs::remove_file(file_path)?;
Ok(())
}
fn truncate_file(&self, file_path: &str) -> Result<(), FilestoreError> {
if !self.exists(file_path) {
return Err(FilestoreError::FileDoesNotExist);
}
if !self.is_file(file_path) {
return Err(FilestoreError::IsNotFile);
}
OpenOptions::new()
.write(true)
.truncate(true)
.open(file_path)?;
Ok(())
}
fn create_dir(&self, dir_path: &str) -> Result<(), FilestoreError> {
fs::create_dir(dir_path).map_err(|e| FilestoreError::Io {
raw_errno: e.raw_os_error(),
string: e.to_string(),
})?;
Ok(())
}
fn remove_dir(&self, dir_path: &str, all: bool) -> Result<(), FilestoreError> {
if !self.exists(dir_path) {
return Err(FilestoreError::DirDoesNotExist);
}
if !self.is_dir(dir_path) {
return Err(FilestoreError::IsNotDirectory);
}
if !all {
fs::remove_dir(dir_path)?;
return Ok(());
}
fs::remove_dir_all(dir_path)?;
Ok(())
}
fn read_data(
&self,
file_name: &str,
offset: u64,
read_len: u64,
buf: &mut [u8],
) -> Result<(), FilestoreError> {
if buf.len() < read_len as usize {
return Err(ByteConversionError::ToSliceTooSmall {
found: buf.len(),
expected: read_len as usize,
}
.into());
}
if !self.exists(file_name) {
return Err(FilestoreError::FileDoesNotExist);
}
if !self.is_file(file_name) {
return Err(FilestoreError::IsNotFile);
}
let mut file = File::open(file_name)?;
file.seek(SeekFrom::Start(offset))?;
file.read_exact(&mut buf[0..read_len as usize])?;
Ok(())
}
fn write_data(&self, file: &str, offset: u64, buf: &[u8]) -> Result<(), FilestoreError> {
if !self.exists(file) {
return Err(FilestoreError::FileDoesNotExist);
}
if !self.is_file(file) {
return Err(FilestoreError::IsNotFile);
}
let mut file = OpenOptions::new().write(true).open(file)?;
file.seek(SeekFrom::Start(offset))?;
file.write_all(buf)?;
Ok(())
}
fn is_file(&self, path: &str) -> bool {
let path = Path::new(path);
path.is_file()
}
fn exists(&self, path: &str) -> bool {
let path = Path::new(path);
if !path.exists() {
return false;
}
true
}
fn checksum_verify(
&self,
file_path: &str,
checksum_type: ChecksumType,
expected_checksum: u32,
verification_buf: &mut [u8],
) -> Result<bool, FilestoreError> {
match checksum_type {
ChecksumType::Modular => {
if self.calc_modular_checksum(file_path)? == expected_checksum {
return Ok(true);
}
Ok(false)
}
ChecksumType::Crc32 => {
let mut digest = CRC_32.digest();
let file_to_check = File::open(file_path)?;
let mut buf_reader = BufReader::new(file_to_check);
loop {
let bytes_read = buf_reader.read(verification_buf)?;
if bytes_read == 0 {
break;
}
digest.update(&verification_buf[0..bytes_read]);
}
if digest.finalize() == expected_checksum {
return Ok(true);
}
Ok(false)
}
ChecksumType::NullChecksum => Ok(true),
_ => Err(FilestoreError::ChecksumTypeNotImplemented(checksum_type)),
}
}
}
impl NativeFilestore {
pub fn calc_modular_checksum(&self, file_path: &str) -> Result<u32, FilestoreError> {
let mut checksum: u32 = 0;
let file = File::open(file_path)?;
let mut buf_reader = BufReader::new(file);
let mut buffer = [0; 4];
loop {
let bytes_read = buf_reader.read(&mut buffer)?;
if bytes_read == 0 {
break;
}
// Perform padding directly in the buffer
(bytes_read..4).for_each(|i| {
buffer[i] = 0;
});
checksum = checksum.wrapping_add(u32::from_be_bytes(buffer));
}
Ok(checksum)
}
}
}
#[cfg(test)]
mod tests {
use std::{fs, path::Path, println};
use super::*;
use alloc::format;
use tempfile::tempdir;
const EXAMPLE_DATA_CFDP: [u8; 15] = [
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
];
const NATIVE_FS: NativeFilestore = NativeFilestore {};
#[test]
fn test_basic_native_filestore_create() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
let result =
NATIVE_FS.create_file(file_path.to_str().expect("getting str for file failed"));
assert!(result.is_ok());
let path = Path::new(&file_path);
assert!(path.exists());
assert!(NATIVE_FS.exists(file_path.to_str().unwrap()));
assert!(NATIVE_FS.is_file(file_path.to_str().unwrap()));
}
#[test]
fn test_basic_native_fs_file_exists() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
assert!(!NATIVE_FS.exists(file_path.to_str().unwrap()));
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.unwrap();
assert!(NATIVE_FS.exists(file_path.to_str().unwrap()));
assert!(NATIVE_FS.is_file(file_path.to_str().unwrap()));
}
#[test]
fn test_basic_native_fs_dir_exists() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let dir_path = tmpdir.path().join("testdir");
assert!(!NATIVE_FS.exists(dir_path.to_str().unwrap()));
NATIVE_FS
.create_dir(dir_path.to_str().expect("getting str for file failed"))
.unwrap();
assert!(NATIVE_FS.exists(dir_path.to_str().unwrap()));
assert!(NATIVE_FS.is_dir(dir_path.as_path().to_str().unwrap()));
}
#[test]
fn test_basic_native_fs_remove_file() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.expect("creating file failed");
assert!(NATIVE_FS.exists(file_path.to_str().unwrap()));
NATIVE_FS
.remove_file(file_path.to_str().unwrap())
.expect("removing file failed");
assert!(!NATIVE_FS.exists(file_path.to_str().unwrap()));
}
#[test]
fn test_basic_native_fs_write() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
assert!(!NATIVE_FS.exists(file_path.to_str().unwrap()));
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.unwrap();
assert!(NATIVE_FS.exists(file_path.to_str().unwrap()));
assert!(NATIVE_FS.is_file(file_path.to_str().unwrap()));
println!("{}", file_path.to_str().unwrap());
let write_data = "hello world\n";
NATIVE_FS
.write_data(file_path.to_str().unwrap(), 0, write_data.as_bytes())
.expect("writing to file failed");
let read_back = fs::read_to_string(file_path).expect("reading back data failed");
assert_eq!(read_back, write_data);
}
#[test]
fn test_basic_native_fs_read() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
assert!(!NATIVE_FS.exists(file_path.to_str().unwrap()));
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.unwrap();
assert!(NATIVE_FS.exists(file_path.to_str().unwrap()));
assert!(NATIVE_FS.is_file(file_path.to_str().unwrap()));
println!("{}", file_path.to_str().unwrap());
let write_data = "hello world\n";
NATIVE_FS
.write_data(file_path.to_str().unwrap(), 0, write_data.as_bytes())
.expect("writing to file failed");
let read_back = fs::read_to_string(file_path).expect("reading back data failed");
assert_eq!(read_back, write_data);
}
#[test]
fn test_truncate_file() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.expect("creating file failed");
fs::write(file_path.clone(), [1, 2, 3, 4]).unwrap();
assert_eq!(fs::read(file_path.clone()).unwrap(), [1, 2, 3, 4]);
NATIVE_FS
.truncate_file(file_path.to_str().unwrap())
.unwrap();
assert_eq!(fs::read(file_path.clone()).unwrap(), []);
}
#[test]
fn test_remove_dir() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let dir_path = tmpdir.path().join("testdir");
assert!(!NATIVE_FS.exists(dir_path.to_str().unwrap()));
NATIVE_FS
.create_dir(dir_path.to_str().expect("getting str for file failed"))
.unwrap();
assert!(NATIVE_FS.exists(dir_path.to_str().unwrap()));
NATIVE_FS
.remove_dir(dir_path.to_str().unwrap(), false)
.unwrap();
assert!(!NATIVE_FS.exists(dir_path.to_str().unwrap()));
}
#[test]
fn test_read_file() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.expect("creating file failed");
fs::write(file_path.clone(), [1, 2, 3, 4]).unwrap();
let read_buf: &mut [u8] = &mut [0; 4];
NATIVE_FS
.read_data(file_path.to_str().unwrap(), 0, 4, read_buf)
.unwrap();
assert_eq!([1, 2, 3, 4], read_buf);
NATIVE_FS
.write_data(file_path.to_str().unwrap(), 4, &[5, 6, 7, 8])
.expect("writing to file failed");
NATIVE_FS
.read_data(file_path.to_str().unwrap(), 2, 4, read_buf)
.unwrap();
assert_eq!([3, 4, 5, 6], read_buf);
}
#[test]
fn test_remove_which_does_not_exist() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
let result = NATIVE_FS.read_data(file_path.to_str().unwrap(), 0, 4, &mut [0; 4]);
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::FileDoesNotExist = error {
assert_eq!(error.to_string(), "file does not exist");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_file_already_exists() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
let result =
NATIVE_FS.create_file(file_path.to_str().expect("getting str for file failed"));
assert!(result.is_ok());
let result =
NATIVE_FS.create_file(file_path.to_str().expect("getting str for file failed"));
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::FileAlreadyExists = error {
assert_eq!(error.to_string(), "file already exists");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_remove_file_with_dir_api() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.unwrap();
let result = NATIVE_FS.remove_dir(file_path.to_str().unwrap(), true);
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::IsNotDirectory = error {
assert_eq!(error.to_string(), "is not a directory");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_remove_dir_remove_all() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let dir_path = tmpdir.path().join("test");
NATIVE_FS
.create_dir(dir_path.to_str().expect("getting str for file failed"))
.unwrap();
let file_path = dir_path.as_path().join("test.txt");
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.unwrap();
let result = NATIVE_FS.remove_dir(dir_path.to_str().unwrap(), true);
assert!(result.is_ok());
assert!(!NATIVE_FS.exists(dir_path.to_str().unwrap()));
}
#[test]
fn test_remove_dir_with_file_api() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test");
NATIVE_FS
.create_dir(file_path.to_str().expect("getting str for file failed"))
.unwrap();
let result = NATIVE_FS.remove_file(file_path.to_str().unwrap());
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::IsNotFile = error {
assert_eq!(error.to_string(), "is not a file");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_remove_dir_which_does_not_exist() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test");
let result = NATIVE_FS.remove_dir(file_path.to_str().unwrap(), true);
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::DirDoesNotExist = error {
assert_eq!(error.to_string(), "directory does not exist");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_remove_file_which_does_not_exist() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
let result = NATIVE_FS.remove_file(file_path.to_str().unwrap());
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::FileDoesNotExist = error {
assert_eq!(error.to_string(), "file does not exist");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_truncate_file_which_does_not_exist() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
let result = NATIVE_FS.truncate_file(file_path.to_str().unwrap());
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::FileDoesNotExist = error {
assert_eq!(error.to_string(), "file does not exist");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_truncate_file_on_directory() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test");
NATIVE_FS.create_dir(file_path.to_str().unwrap()).unwrap();
let result = NATIVE_FS.truncate_file(file_path.to_str().unwrap());
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::IsNotFile = error {
assert_eq!(error.to_string(), "is not a file");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_byte_conversion_error_when_reading() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.unwrap();
let result = NATIVE_FS.read_data(file_path.to_str().unwrap(), 0, 2, &mut []);
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::ByteConversion(byte_conv_error) = error {
if let ByteConversionError::ToSliceTooSmall { found, expected } = byte_conv_error {
assert_eq!(found, 0);
assert_eq!(expected, 2);
} else {
panic!("unexpected error");
}
assert_eq!(
error.to_string(),
format!("filestore error: {}", byte_conv_error)
);
} else {
panic!("unexpected error");
}
}
#[test]
fn test_read_file_on_dir() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let dir_path = tmpdir.path().join("test");
NATIVE_FS
.create_dir(dir_path.to_str().expect("getting str for file failed"))
.unwrap();
let result = NATIVE_FS.read_data(dir_path.to_str().unwrap(), 0, 4, &mut [0; 4]);
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::IsNotFile = error {
assert_eq!(error.to_string(), "is not a file");
} else {
panic!("unexpected error");
}
}
#[test]
fn test_write_file_non_existing() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
let result = NATIVE_FS.write_data(file_path.to_str().unwrap(), 0, &[]);
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::FileDoesNotExist = error {
} else {
panic!("unexpected error");
}
}
#[test]
fn test_write_file_on_dir() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test");
NATIVE_FS.create_dir(file_path.to_str().unwrap()).unwrap();
let result = NATIVE_FS.write_data(file_path.to_str().unwrap(), 0, &[]);
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::IsNotFile = error {
} else {
panic!("unexpected error");
}
}
#[test]
fn test_filename_extraction() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("test.txt");
NATIVE_FS
.create_file(file_path.to_str().expect("getting str for file failed"))
.unwrap();
NativeFilestore::filename_from_full_path(file_path.to_str().unwrap());
}
#[test]
fn test_modular_checksum() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("mod-crc.bin");
fs::write(file_path.as_path(), EXAMPLE_DATA_CFDP).expect("writing test file failed");
// Kind of re-writing the modular checksum impl here which we are trying to test, but the
// numbers/correctness were verified manually using calculators, so this is okay.
let mut checksum: u32 = 0;
let mut buffer: [u8; 4] = [0; 4];
for i in 0..3 {
buffer = EXAMPLE_DATA_CFDP[i * 4..(i + 1) * 4].try_into().unwrap();
checksum = checksum.wrapping_add(u32::from_be_bytes(buffer));
}
buffer[0..3].copy_from_slice(&EXAMPLE_DATA_CFDP[12..15]);
buffer[3] = 0;
checksum = checksum.wrapping_add(u32::from_be_bytes(buffer));
let mut verif_buf: [u8; 32] = [0; 32];
let result = NATIVE_FS.checksum_verify(
file_path.to_str().unwrap(),
ChecksumType::Modular,
checksum,
&mut verif_buf,
);
assert!(result.is_ok());
}
#[test]
fn test_null_checksum_impl() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("mod-crc.bin");
// The file to check does not even need to exist, and the verification buffer can be
// empty: the null checksum is always yields the same result.
let result = NATIVE_FS.checksum_verify(
file_path.to_str().unwrap(),
ChecksumType::NullChecksum,
0,
&mut [],
);
assert!(result.is_ok());
assert!(result.unwrap());
}
#[test]
fn test_checksum_not_implemented() {
let tmpdir = tempdir().expect("creating tmpdir failed");
let file_path = tmpdir.path().join("mod-crc.bin");
// The file to check does not even need to exist, and the verification buffer can be
// empty: the null checksum is always yields the same result.
let result = NATIVE_FS.checksum_verify(
file_path.to_str().unwrap(),
ChecksumType::Crc32Proximity1,
0,
&mut [],
);
assert!(result.is_err());
let error = result.unwrap_err();
if let FilestoreError::ChecksumTypeNotImplemented(cksum_type) = error {
assert_eq!(
error.to_string(),
format!("checksum {:?} not implemented", cksum_type)
);
} else {
panic!("unexpected error");
}
}
}

View File

@ -1,671 +0,0 @@
//! This module contains the implementation of the CFDP high level classes as specified in the
//! CCSDS 727.0-B-5.
use core::{cell::RefCell, fmt::Debug, hash::Hash};
use crc::{Crc, CRC_32_CKSUM};
use hashbrown::HashMap;
use spacepackets::{
cfdp::{
pdu::{FileDirectiveType, PduError, PduHeader},
ChecksumType, ConditionCode, FaultHandlerCode, PduType, TransmissionMode,
},
util::UnsignedByteField,
};
#[cfg(feature = "alloc")]
use alloc::boxed::Box;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
pub mod dest;
#[cfg(feature = "alloc")]
pub mod filestore;
#[cfg(feature = "std")]
pub mod source;
pub mod user;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum EntityType {
Sending,
Receiving,
}
pub enum TimerContext {
CheckLimit {
local_id: UnsignedByteField,
remote_id: UnsignedByteField,
entity_type: EntityType,
},
NakActivity {
expiry_time_seconds: f32,
},
PositiveAck {
expiry_time_seconds: f32,
},
}
/// Generic abstraction for a check timer which is used by 3 mechanisms of the CFDP protocol.
///
/// ## 1. Check limit handling
///
/// The first mechanism is the check limit handling for unacknowledged transfers as specified
/// in 4.6.3.2 and 4.6.3.3 of the CFDP standard.
/// For this mechanism, the timer has different functionality depending on whether
/// the using entity is the sending entity or the receiving entity for the unacknowledged
/// transmission mode.
///
/// For the sending entity, this timer determines the expiry period for declaring a check limit
/// fault after sending an EOF PDU with requested closure. This allows a timeout of the transfer.
/// Also see 4.6.3.2 of the CFDP standard.
///
/// For the receiving entity, this timer determines the expiry period for incrementing a check
/// counter after an EOF PDU is received for an incomplete file transfer. This allows out-of-order
/// reception of file data PDUs and EOF PDUs. Also see 4.6.3.3 of the CFDP standard.
///
/// ## 2. NAK activity limit
///
/// The timer will be used to perform the NAK activity check as specified in 4.6.4.7 of the CFDP
/// standard. The expiration period will be provided by the NAK timer expiration limit of the
/// remote entity configuration.
///
/// ## 3. Positive ACK procedures
///
/// The timer will be used to perform the Positive Acknowledgement Procedures as specified in
/// 4.7. 1of the CFDP standard. The expiration period will be provided by the Positive ACK timer
/// interval of the remote entity configuration.
pub trait CheckTimer: Debug {
fn has_expired(&self) -> bool;
fn reset(&mut self);
}
/// A generic trait which allows CFDP entities to create check timers which are required to
/// implement special procedures in unacknowledged transmission mode, as specified in 4.6.3.2
/// and 4.6.3.3. The [CheckTimer] documentation provides more information about the purpose of the
/// check timer in the context of CFDP.
///
/// This trait also allows the creation of different check timers depending on context and purpose
/// of the timer, the runtime environment (e.g. standard clock timer vs. timer using a RTC) or
/// other factors.
#[cfg(feature = "alloc")]
pub trait CheckTimerCreator {
fn get_check_timer_provider(&self, timer_context: TimerContext) -> Box<dyn CheckTimer>;
}
/// Simple implementation of the [CheckTimerCreator] trait assuming a standard runtime.
/// It also assumes that a second accuracy of the check timer period is sufficient.
#[cfg(feature = "std")]
#[derive(Debug)]
pub struct StdCheckTimer {
expiry_time_seconds: u64,
start_time: std::time::Instant,
}
#[cfg(feature = "std")]
impl StdCheckTimer {
pub fn new(expiry_time_seconds: u64) -> Self {
Self {
expiry_time_seconds,
start_time: std::time::Instant::now(),
}
}
}
#[cfg(feature = "std")]
impl CheckTimer for StdCheckTimer {
fn has_expired(&self) -> bool {
let elapsed_time = self.start_time.elapsed();
if elapsed_time.as_secs() > self.expiry_time_seconds {
return true;
}
false
}
fn reset(&mut self) {
self.start_time = std::time::Instant::now();
}
}
/// This structure models the remote entity configuration information as specified in chapter 8.3
/// of the CFDP standard.
/// Some of the fields which were not considered necessary for the Rust implementation
/// were omitted. Some other fields which are not contained inside the standard but are considered
/// necessary for the Rust implementation are included.
///
/// ## Notes on Positive Acknowledgment Procedures
///
/// The `positive_ack_timer_interval_seconds` and `positive_ack_timer_expiration_limit` will
/// be used for positive acknowledgement procedures as specified in CFDP chapter 4.7. The sending
/// entity will start the timer for any PDUs where an acknowledgment is required (e.g. EOF PDU).
/// Once the expected ACK response has not been received for that interval, as counter will be
/// incremented and the timer will be reset. Once the counter exceeds the
/// `positive_ack_timer_expiration_limit`, a Positive ACK Limit Reached fault will be declared.
///
/// ## Notes on Deferred Lost Segment Procedures
///
/// This procedure will be active if an EOF (No Error) PDU is received in acknowledged mode. After
/// issuing the NAK sequence which has the whole file scope, a timer will be started. The timer is
/// reset when missing segments or missing metadata is received. The timer will be deactivated if
/// all missing data is received. If the timer expires, a new NAK sequence will be issued and a
/// counter will be incremented, which can lead to a NAK Limit Reached fault being declared.
///
/// ## Fields
///
/// * `entity_id` - The ID of the remote entity.
/// * `max_packet_len` - This determines of all PDUs generated for that remote entity in addition
/// to the `max_file_segment_len` attribute which also determines the size of file data PDUs.
/// * `max_file_segment_len` The maximum file segment length which determines the maximum size
/// of file data PDUs in addition to the `max_packet_len` attribute. If this field is set
/// to None, the maximum file segment length will be derived from the maximum packet length.
/// If this has some value which is smaller than the segment value derived from
/// `max_packet_len`, this value will be picked.
/// * `closure_requested_by_default` - If the closure requested field is not supplied as part of
/// the Put Request, it will be determined from this field in the remote configuration.
/// * `crc_on_transmission_by_default` - If the CRC option is not supplied as part of the Put
/// Request, it will be determined from this field in the remote configuration.
/// * `default_transmission_mode` - If the transmission mode is not supplied as part of the
/// Put Request, it will be determined from this field in the remote configuration.
/// * `disposition_on_cancellation` - Determines whether an incomplete received file is discard on
/// transaction cancellation. Defaults to False.
/// * `default_crc_type` - Default checksum type used to calculate for all file transmissions to
/// this remote entity.
/// * `check_limit` - This timer determines the expiry period for incrementing a check counter
/// after an EOF PDU is received for an incomplete file transfer. This allows out-of-order
/// reception of file data PDUs and EOF PDUs. Also see 4.6.3.3 of the CFDP standard. Defaults to
/// 2, so the check limit timer may expire twice.
/// * `positive_ack_timer_interval_seconds`- See the notes on the Positive Acknowledgment
/// Procedures inside the class documentation. Expected as floating point seconds. Defaults to
/// 10 seconds.
/// * `positive_ack_timer_expiration_limit` - See the notes on the Positive Acknowledgment
/// Procedures inside the class documentation. Defaults to 2, so the timer may expire twice.
/// * `immediate_nak_mode` - Specifies whether a NAK sequence should be issued immediately when a
/// file data gap or lost metadata is detected in the acknowledged mode. Defaults to True.
/// * `nak_timer_interval_seconds` - See the notes on the Deferred Lost Segment Procedure inside
/// the class documentation. Expected as floating point seconds. Defaults to 10 seconds.
/// * `nak_timer_expiration_limit` - See the notes on the Deferred Lost Segment Procedure inside
/// the class documentation. Defaults to 2, so the timer may expire two times.
#[derive(Debug, Copy, Clone)]
pub struct RemoteEntityConfig {
pub entity_id: UnsignedByteField,
pub max_packet_len: usize,
pub max_file_segment_len: usize,
pub closure_requested_by_default: bool,
pub crc_on_transmission_by_default: bool,
pub default_transmission_mode: TransmissionMode,
pub default_crc_type: ChecksumType,
pub positive_ack_timer_interval_seconds: f32,
pub positive_ack_timer_expiration_limit: u32,
pub check_limit: u32,
pub disposition_on_cancellation: bool,
pub immediate_nak_mode: bool,
pub nak_timer_interval_seconds: f32,
pub nak_timer_expiration_limit: u32,
}
impl RemoteEntityConfig {
pub fn new_with_default_values(
entity_id: UnsignedByteField,
max_file_segment_len: usize,
max_packet_len: usize,
closure_requested_by_default: bool,
crc_on_transmission_by_default: bool,
default_transmission_mode: TransmissionMode,
default_crc_type: ChecksumType,
) -> Self {
Self {
entity_id,
max_file_segment_len,
max_packet_len,
closure_requested_by_default,
crc_on_transmission_by_default,
default_transmission_mode,
default_crc_type,
check_limit: 2,
positive_ack_timer_interval_seconds: 10.0,
positive_ack_timer_expiration_limit: 2,
disposition_on_cancellation: false,
immediate_nak_mode: true,
nak_timer_interval_seconds: 10.0,
nak_timer_expiration_limit: 2,
}
}
}
pub trait RemoteEntityConfigProvider {
/// Retrieve the remote entity configuration for the given remote ID.
fn get_remote_config(&self, remote_id: u64) -> Option<&RemoteEntityConfig>;
fn get_remote_config_mut(&mut self, remote_id: u64) -> Option<&mut RemoteEntityConfig>;
/// Add a new remote configuration. Return [true] if the configuration was
/// inserted successfully, and [false] if a configuration already exists.
fn add_config(&mut self, cfg: &RemoteEntityConfig) -> bool;
/// Remote a configuration. Returns [true] if the configuration was removed successfully,
/// and [false] if no configuration exists for the given remote ID.
fn remove_config(&mut self, remote_id: u64) -> bool;
}
#[cfg(feature = "std")]
#[derive(Default)]
pub struct StdRemoteEntityConfigProvider {
remote_cfg_table: HashMap<u64, RemoteEntityConfig>,
}
#[cfg(feature = "std")]
impl RemoteEntityConfigProvider for StdRemoteEntityConfigProvider {
fn get_remote_config(&self, remote_id: u64) -> Option<&RemoteEntityConfig> {
self.remote_cfg_table.get(&remote_id)
}
fn get_remote_config_mut(&mut self, remote_id: u64) -> Option<&mut RemoteEntityConfig> {
self.remote_cfg_table.get_mut(&remote_id)
}
fn add_config(&mut self, cfg: &RemoteEntityConfig) -> bool {
self.remote_cfg_table
.insert(cfg.entity_id.value(), *cfg)
.is_some()
}
fn remove_config(&mut self, remote_id: u64) -> bool {
self.remote_cfg_table.remove(&remote_id).is_some()
}
}
/// This trait introduces some callbacks which will be called when a particular CFDP fault
/// handler is called.
///
/// It is passed into the CFDP handlers as part of the [DefaultFaultHandler] and the local entity
/// configuration and provides a way to specify custom user error handlers. This allows to
/// implement some CFDP features like fault handler logging, which would not be possible
/// generically otherwise.
///
/// For each error reported by the [DefaultFaultHandler], the appropriate fault handler callback
/// will be called depending on the [FaultHandlerCode].
pub trait UserFaultHandler {
fn notice_of_suspension_cb(
&mut self,
transaction_id: TransactionId,
cond: ConditionCode,
progress: u64,
);
fn notice_of_cancellation_cb(
&mut self,
transaction_id: TransactionId,
cond: ConditionCode,
progress: u64,
);
fn abandoned_cb(&mut self, transaction_id: TransactionId, cond: ConditionCode, progress: u64);
fn ignore_cb(&mut self, transaction_id: TransactionId, cond: ConditionCode, progress: u64);
}
/// This structure is used to implement the fault handling as specified in chapter 4.8 of the CFDP
/// standard.
///
/// It does so by mapping each applicable [spacepackets::cfdp::ConditionCode] to a fault handler
/// which is denoted by the four [spacepackets::cfdp::FaultHandlerCode]s. This code is used
/// to select the error handling inside the CFDP handler itself in addition to dispatching to a
/// user-provided callback function provided by the [UserFaultHandler].
///
/// Some note on the provided default settings:
///
/// - Checksum failures will be ignored by default. This is because for unacknowledged transfers,
/// cancelling the transfer immediately would interfere with the check limit mechanism specified
/// in chapter 4.6.3.3.
/// - Unsupported checksum types will also be ignored by default. Even if the checksum type is
/// not supported the file transfer might still have worked properly.
///
/// For all other faults, the default fault handling operation will be to cancel the transaction.
/// These defaults can be overriden by using the [Self::set_fault_handler] method.
/// Please note that in any case, fault handler overrides can be specified by the sending CFDP
/// entity.
pub struct DefaultFaultHandler {
handler_array: [FaultHandlerCode; 10],
// Could also change the user fault handler trait to have non mutable methods, but that limits
// flexbility on the user side..
user_fault_handler: RefCell<Box<dyn UserFaultHandler + Send>>,
}
impl DefaultFaultHandler {
fn condition_code_to_array_index(conditon_code: ConditionCode) -> Option<usize> {
Some(match conditon_code {
ConditionCode::PositiveAckLimitReached => 0,
ConditionCode::KeepAliveLimitReached => 1,
ConditionCode::InvalidTransmissionMode => 2,
ConditionCode::FilestoreRejection => 3,
ConditionCode::FileChecksumFailure => 4,
ConditionCode::FileSizeError => 5,
ConditionCode::NakLimitReached => 6,
ConditionCode::InactivityDetected => 7,
ConditionCode::CheckLimitReached => 8,
ConditionCode::UnsupportedChecksumType => 9,
_ => return None,
})
}
pub fn set_fault_handler(
&mut self,
condition_code: ConditionCode,
fault_handler: FaultHandlerCode,
) {
let array_idx = Self::condition_code_to_array_index(condition_code);
if array_idx.is_none() {
return;
}
self.handler_array[array_idx.unwrap()] = fault_handler;
}
pub fn new(user_fault_handler: Box<dyn UserFaultHandler + Send>) -> Self {
let mut init_array = [FaultHandlerCode::NoticeOfCancellation; 10];
init_array
[Self::condition_code_to_array_index(ConditionCode::FileChecksumFailure).unwrap()] =
FaultHandlerCode::IgnoreError;
init_array[Self::condition_code_to_array_index(ConditionCode::UnsupportedChecksumType)
.unwrap()] = FaultHandlerCode::IgnoreError;
Self {
handler_array: init_array,
user_fault_handler: RefCell::new(user_fault_handler),
}
}
pub fn get_fault_handler(&self, condition_code: ConditionCode) -> FaultHandlerCode {
let array_idx = Self::condition_code_to_array_index(condition_code);
if array_idx.is_none() {
return FaultHandlerCode::IgnoreError;
}
self.handler_array[array_idx.unwrap()]
}
pub fn report_fault(
&self,
transaction_id: TransactionId,
condition: ConditionCode,
progress: u64,
) -> FaultHandlerCode {
let array_idx = Self::condition_code_to_array_index(condition);
if array_idx.is_none() {
return FaultHandlerCode::IgnoreError;
}
let fh_code = self.handler_array[array_idx.unwrap()];
let mut handler_mut = self.user_fault_handler.borrow_mut();
match fh_code {
FaultHandlerCode::NoticeOfCancellation => {
handler_mut.notice_of_cancellation_cb(transaction_id, condition, progress);
}
FaultHandlerCode::NoticeOfSuspension => {
handler_mut.notice_of_suspension_cb(transaction_id, condition, progress);
}
FaultHandlerCode::IgnoreError => {
handler_mut.ignore_cb(transaction_id, condition, progress);
}
FaultHandlerCode::AbandonTransaction => {
handler_mut.abandoned_cb(transaction_id, condition, progress);
}
}
fh_code
}
}
pub struct IndicationConfig {
pub eof_sent: bool,
pub eof_recv: bool,
pub file_segment_recv: bool,
pub transaction_finished: bool,
pub suspended: bool,
pub resumed: bool,
}
impl Default for IndicationConfig {
fn default() -> Self {
Self {
eof_sent: true,
eof_recv: true,
file_segment_recv: true,
transaction_finished: true,
suspended: true,
resumed: true,
}
}
}
pub struct LocalEntityConfig {
pub id: UnsignedByteField,
pub indication_cfg: IndicationConfig,
pub default_fault_handler: DefaultFaultHandler,
}
/// The CFDP transaction ID of a CFDP transaction consists of the source entity ID and the sequence
/// number of that transfer which is also determined by the CFDP source entity.
#[derive(Debug, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct TransactionId {
source_id: UnsignedByteField,
seq_num: UnsignedByteField,
}
impl TransactionId {
pub fn new(source_id: UnsignedByteField, seq_num: UnsignedByteField) -> Self {
Self { source_id, seq_num }
}
pub fn source_id(&self) -> &UnsignedByteField {
&self.source_id
}
pub fn seq_num(&self) -> &UnsignedByteField {
&self.seq_num
}
}
impl Hash for TransactionId {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.source_id.value().hash(state);
self.seq_num.value().hash(state);
}
}
impl PartialEq for TransactionId {
fn eq(&self, other: &Self) -> bool {
self.source_id.value() == other.source_id.value()
&& self.seq_num.value() == other.seq_num.value()
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum TransactionStep {
Idle = 0,
TransactionStart = 1,
ReceivingFileDataPdus = 2,
ReceivingFileDataPdusWithCheckLimitHandling = 3,
SendingAckPdu = 4,
TransferCompletion = 5,
SendingFinishedPdu = 6,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum State {
Idle = 0,
Busy = 1,
Suspended = 2,
}
pub const CRC_32: Crc<u32> = Crc::<u32>::new(&CRC_32_CKSUM);
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum PacketTarget {
SourceEntity,
DestEntity,
}
/// This is a helper struct which contains base information about a particular PDU packet.
/// This is also necessary information for CFDP packet routing. For example, some packet types
/// like file data PDUs can only be used by CFDP source entities.
pub struct PacketInfo<'raw_packet> {
pdu_type: PduType,
pdu_directive: Option<FileDirectiveType>,
target: PacketTarget,
raw_packet: &'raw_packet [u8],
}
impl<'raw> PacketInfo<'raw> {
pub fn new(raw_packet: &'raw [u8]) -> Result<Self, PduError> {
let (pdu_header, header_len) = PduHeader::from_bytes(raw_packet)?;
if pdu_header.pdu_type() == PduType::FileData {
return Ok(Self {
pdu_type: pdu_header.pdu_type(),
pdu_directive: None,
target: PacketTarget::DestEntity,
raw_packet,
});
}
if pdu_header.pdu_datafield_len() < 1 {
return Err(PduError::FormatError);
}
// Route depending on PDU type and directive type if applicable. Retrieve directive type
// from the raw stream for better performance (with sanity and directive code check).
// The routing is based on section 4.5 of the CFDP standard which specifies the PDU forwarding
// procedure.
let directive = FileDirectiveType::try_from(raw_packet[header_len]).map_err(|_| {
PduError::InvalidDirectiveType {
found: raw_packet[header_len],
expected: None,
}
})?;
let packet_target = match directive {
// Section c) of 4.5.3: These PDUs should always be targeted towards the file sender a.k.a.
// the source handler
FileDirectiveType::NakPdu
| FileDirectiveType::FinishedPdu
| FileDirectiveType::KeepAlivePdu => PacketTarget::SourceEntity,
// Section b) of 4.5.3: These PDUs should always be targeted towards the file receiver a.k.a.
// the destination handler
FileDirectiveType::MetadataPdu
| FileDirectiveType::EofPdu
| FileDirectiveType::PromptPdu => PacketTarget::DestEntity,
// Section a): Recipient depends of the type of PDU that is being acknowledged. We can simply
// extract the PDU type from the raw stream. If it is an EOF PDU, this packet is passed to
// the source handler, for a Finished PDU, it is passed to the destination handler.
FileDirectiveType::AckPdu => {
let acked_directive = FileDirectiveType::try_from(raw_packet[header_len + 1])
.map_err(|_| PduError::InvalidDirectiveType {
found: raw_packet[header_len],
expected: None,
})?;
if acked_directive == FileDirectiveType::EofPdu {
PacketTarget::SourceEntity
} else if acked_directive == FileDirectiveType::FinishedPdu {
PacketTarget::DestEntity
} else {
// TODO: Maybe a better error? This might be confusing..
return Err(PduError::InvalidDirectiveType {
found: raw_packet[header_len + 1],
expected: None,
});
}
}
};
Ok(Self {
pdu_type: pdu_header.pdu_type(),
pdu_directive: Some(directive),
target: packet_target,
raw_packet,
})
}
pub fn pdu_type(&self) -> PduType {
self.pdu_type
}
pub fn pdu_directive(&self) -> Option<FileDirectiveType> {
self.pdu_directive
}
pub fn target(&self) -> PacketTarget {
self.target
}
pub fn raw_packet(&self) -> &[u8] {
self.raw_packet
}
}
#[cfg(test)]
mod tests {
use spacepackets::cfdp::{
lv::Lv,
pdu::{
eof::EofPdu,
file_data::FileDataPdu,
metadata::{MetadataGenericParams, MetadataPduCreator},
CommonPduConfig, FileDirectiveType, PduHeader, WritablePduPacket,
},
PduType,
};
use crate::cfdp::PacketTarget;
use super::PacketInfo;
fn generic_pdu_header() -> PduHeader {
let pdu_conf = CommonPduConfig::default();
PduHeader::new_no_file_data(pdu_conf, 0)
}
#[test]
fn test_metadata_pdu_info() {
let mut buf: [u8; 128] = [0; 128];
let pdu_header = generic_pdu_header();
let metadata_params = MetadataGenericParams::default();
let src_file_name = "hello.txt";
let dest_file_name = "hello-dest.txt";
let src_lv = Lv::new_from_str(src_file_name).unwrap();
let dest_lv = Lv::new_from_str(dest_file_name).unwrap();
let metadata_pdu =
MetadataPduCreator::new_no_opts(pdu_header, metadata_params, src_lv, dest_lv);
metadata_pdu
.write_to_bytes(&mut buf)
.expect("writing metadata PDU failed");
let packet_info = PacketInfo::new(&buf).expect("creating packet info failed");
assert_eq!(packet_info.pdu_type(), PduType::FileDirective);
assert!(packet_info.pdu_directive().is_some());
assert_eq!(
packet_info.pdu_directive().unwrap(),
FileDirectiveType::MetadataPdu
);
assert_eq!(packet_info.target(), PacketTarget::DestEntity);
}
#[test]
fn test_filedata_pdu_info() {
let mut buf: [u8; 128] = [0; 128];
let pdu_header = generic_pdu_header();
let file_data_pdu = FileDataPdu::new_no_seg_metadata(pdu_header, 0, &[]);
file_data_pdu
.write_to_bytes(&mut buf)
.expect("writing file data PDU failed");
let packet_info = PacketInfo::new(&buf).expect("creating packet info failed");
assert_eq!(packet_info.pdu_type(), PduType::FileData);
assert!(packet_info.pdu_directive().is_none());
assert_eq!(packet_info.target(), PacketTarget::DestEntity);
}
#[test]
fn test_eof_pdu_info() {
let mut buf: [u8; 128] = [0; 128];
let pdu_header = generic_pdu_header();
let eof_pdu = EofPdu::new_no_error(pdu_header, 0, 0);
eof_pdu
.write_to_bytes(&mut buf)
.expect("writing file data PDU failed");
let packet_info = PacketInfo::new(&buf).expect("creating packet info failed");
assert_eq!(packet_info.pdu_type(), PduType::FileDirective);
assert!(packet_info.pdu_directive().is_some());
assert_eq!(
packet_info.pdu_directive().unwrap(),
FileDirectiveType::EofPdu
);
}
}

View File

@ -1,15 +0,0 @@
#![allow(dead_code)]
use spacepackets::util::UnsignedByteField;
pub struct SourceHandler {
id: UnsignedByteField,
}
impl SourceHandler {
pub fn new(id: impl Into<UnsignedByteField>) -> Self {
Self { id: id.into() }
}
}
#[cfg(test)]
mod tests {}

View File

@ -1,96 +0,0 @@
use spacepackets::{
cfdp::{
pdu::{
file_data::SegmentMetadata,
finished::{DeliveryCode, FileStatus},
},
tlv::{msg_to_user::MsgToUserTlv, WritableTlv},
ConditionCode,
},
util::UnsignedByteField,
};
use super::TransactionId;
#[derive(Debug, Copy, Clone)]
pub struct TransactionFinishedParams {
pub id: TransactionId,
pub condition_code: ConditionCode,
pub delivery_code: DeliveryCode,
pub file_status: FileStatus,
}
#[derive(Debug)]
pub struct MetadataReceivedParams<'src_file, 'dest_file, 'msgs_to_user> {
pub id: TransactionId,
pub source_id: UnsignedByteField,
pub file_size: u64,
pub src_file_name: &'src_file str,
pub dest_file_name: &'dest_file str,
pub msgs_to_user: &'msgs_to_user [MsgToUserTlv<'msgs_to_user>],
}
#[cfg(feature = "alloc")]
#[derive(Debug)]
pub struct OwnedMetadataRecvdParams {
pub id: TransactionId,
pub source_id: UnsignedByteField,
pub file_size: u64,
pub src_file_name: alloc::string::String,
pub dest_file_name: alloc::string::String,
pub msgs_to_user: alloc::vec::Vec<alloc::vec::Vec<u8>>,
}
#[cfg(feature = "alloc")]
impl From<MetadataReceivedParams<'_, '_, '_>> for OwnedMetadataRecvdParams {
fn from(value: MetadataReceivedParams) -> Self {
Self::from(&value)
}
}
#[cfg(feature = "alloc")]
impl From<&MetadataReceivedParams<'_, '_, '_>> for OwnedMetadataRecvdParams {
fn from(value: &MetadataReceivedParams) -> Self {
Self {
id: value.id,
source_id: value.source_id,
file_size: value.file_size,
src_file_name: value.src_file_name.into(),
dest_file_name: value.dest_file_name.into(),
msgs_to_user: value.msgs_to_user.iter().map(|tlv| tlv.to_vec()).collect(),
}
}
}
#[derive(Debug)]
pub struct FileSegmentRecvdParams<'seg_meta> {
pub id: TransactionId,
pub offset: u64,
pub length: usize,
pub segment_metadata: Option<&'seg_meta SegmentMetadata<'seg_meta>>,
}
pub trait CfdpUser {
fn transaction_indication(&mut self, id: &TransactionId);
fn eof_sent_indication(&mut self, id: &TransactionId);
fn transaction_finished_indication(&mut self, finished_params: &TransactionFinishedParams);
fn metadata_recvd_indication(&mut self, md_recvd_params: &MetadataReceivedParams);
fn file_segment_recvd_indication(&mut self, segment_recvd_params: &FileSegmentRecvdParams);
// TODO: The standard does not strictly specify how the report information looks..
fn report_indication(&mut self, id: &TransactionId);
fn suspended_indication(&mut self, id: &TransactionId, condition_code: ConditionCode);
fn resumed_indication(&mut self, id: &TransactionId, progress: u64);
fn fault_indication(
&mut self,
id: &TransactionId,
condition_code: ConditionCode,
progress: u64,
);
fn abandoned_indication(
&mut self,
id: &TransactionId,
condition_code: ConditionCode,
progress: u64,
);
fn eof_recvd_indication(&mut self, id: &TransactionId);
}

View File

@ -1,281 +0,0 @@
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
#[cfg(feature = "alloc")]
use hashbrown::HashSet;
use spacepackets::PacketId;
use crate::tmtc::ReceivesTcCore;
pub trait PacketIdLookup {
fn validate(&self, packet_id: u16) -> bool;
}
#[cfg(feature = "alloc")]
impl PacketIdLookup for Vec<u16> {
fn validate(&self, packet_id: u16) -> bool {
self.contains(&packet_id)
}
}
#[cfg(feature = "alloc")]
impl PacketIdLookup for HashSet<u16> {
fn validate(&self, packet_id: u16) -> bool {
self.contains(&packet_id)
}
}
impl PacketIdLookup for [u16] {
fn validate(&self, packet_id: u16) -> bool {
self.binary_search(&packet_id).is_ok()
}
}
impl PacketIdLookup for &[u16] {
fn validate(&self, packet_id: u16) -> bool {
self.binary_search(&packet_id).is_ok()
}
}
#[cfg(feature = "alloc")]
impl PacketIdLookup for Vec<PacketId> {
fn validate(&self, packet_id: u16) -> bool {
self.contains(&PacketId::from(packet_id))
}
}
#[cfg(feature = "alloc")]
impl PacketIdLookup for HashSet<PacketId> {
fn validate(&self, packet_id: u16) -> bool {
self.contains(&PacketId::from(packet_id))
}
}
impl PacketIdLookup for [PacketId] {
fn validate(&self, packet_id: u16) -> bool {
self.binary_search(&PacketId::from(packet_id)).is_ok()
}
}
impl PacketIdLookup for &[PacketId] {
fn validate(&self, packet_id: u16) -> bool {
self.binary_search(&PacketId::from(packet_id)).is_ok()
}
}
/// This function parses a given buffer for tightly packed CCSDS space packets. It uses the
/// [PacketId] field of the CCSDS packets to detect the start of a CCSDS space packet and then
/// uses the length field of the packet to extract CCSDS packets.
///
/// This function is also able to deal with broken tail packets at the end as long a the parser
/// can read the full 7 bytes which constitue a space packet header plus one byte minimal size.
/// If broken tail packets are detected, they are moved to the front of the buffer, and the write
/// index for future write operations will be written to the `next_write_idx` argument.
///
/// The parser will write all packets which were decoded successfully to the given `tc_receiver`
/// and return the number of packets found. If the [ReceivesTcCore::pass_tc] calls fails, the
/// error will be returned.
pub fn parse_buffer_for_ccsds_space_packets<E>(
buf: &mut [u8],
packet_id_lookup: &(impl PacketIdLookup + ?Sized),
tc_receiver: &mut (impl ReceivesTcCore<Error = E> + ?Sized),
next_write_idx: &mut usize,
) -> Result<u32, E> {
*next_write_idx = 0;
let mut packets_found = 0;
let mut current_idx = 0;
let buf_len = buf.len();
loop {
if current_idx + 7 >= buf.len() {
break;
}
let packet_id = u16::from_be_bytes(buf[current_idx..current_idx + 2].try_into().unwrap());
if packet_id_lookup.validate(packet_id) {
let length_field =
u16::from_be_bytes(buf[current_idx + 4..current_idx + 6].try_into().unwrap());
let packet_size = length_field + 7;
if (current_idx + packet_size as usize) <= buf_len {
tc_receiver.pass_tc(&buf[current_idx..current_idx + packet_size as usize])?;
packets_found += 1;
} else {
// Move packet to start of buffer if applicable.
if current_idx > 0 {
buf.copy_within(current_idx.., 0);
*next_write_idx = buf.len() - current_idx;
}
}
current_idx += packet_size as usize;
continue;
}
current_idx += 1;
}
Ok(packets_found)
}
#[cfg(test)]
mod tests {
use spacepackets::{
ecss::{tc::PusTcCreator, WritablePusPacket},
PacketId, SpHeader,
};
use crate::encoding::tests::TcCacher;
use super::parse_buffer_for_ccsds_space_packets;
const TEST_APID_0: u16 = 0x02;
const TEST_APID_1: u16 = 0x10;
const TEST_PACKET_ID_0: PacketId = PacketId::const_tc(true, TEST_APID_0);
const TEST_PACKET_ID_1: PacketId = PacketId::const_tc(true, TEST_APID_1);
#[test]
fn test_basic() {
let mut sph = SpHeader::tc_unseg(TEST_APID_0, 0, 0).unwrap();
let ping_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
let mut buffer: [u8; 32] = [0; 32];
let packet_len = ping_tc
.write_to_bytes(&mut buffer)
.expect("writing packet failed");
let valid_packet_ids = [TEST_PACKET_ID_0];
let mut tc_cacher = TcCacher::default();
let mut next_write_idx = 0;
let parse_result = parse_buffer_for_ccsds_space_packets(
&mut buffer,
valid_packet_ids.as_slice(),
&mut tc_cacher,
&mut next_write_idx,
);
assert!(parse_result.is_ok());
let parsed_packets = parse_result.unwrap();
assert_eq!(parsed_packets, 1);
assert_eq!(tc_cacher.tc_queue.len(), 1);
assert_eq!(
tc_cacher.tc_queue.pop_front().unwrap(),
buffer[..packet_len]
);
}
#[test]
fn test_multi_packet() {
let mut sph = SpHeader::tc_unseg(TEST_APID_0, 0, 0).unwrap();
let ping_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
let action_tc = PusTcCreator::new_simple(&mut sph, 8, 0, None, true);
let mut buffer: [u8; 32] = [0; 32];
let packet_len_ping = ping_tc
.write_to_bytes(&mut buffer)
.expect("writing packet failed");
let packet_len_action = action_tc
.write_to_bytes(&mut buffer[packet_len_ping..])
.expect("writing packet failed");
let valid_packet_ids = [TEST_PACKET_ID_0];
let mut tc_cacher = TcCacher::default();
let mut next_write_idx = 0;
let parse_result = parse_buffer_for_ccsds_space_packets(
&mut buffer,
valid_packet_ids.as_slice(),
&mut tc_cacher,
&mut next_write_idx,
);
assert!(parse_result.is_ok());
let parsed_packets = parse_result.unwrap();
assert_eq!(parsed_packets, 2);
assert_eq!(tc_cacher.tc_queue.len(), 2);
assert_eq!(
tc_cacher.tc_queue.pop_front().unwrap(),
buffer[..packet_len_ping]
);
assert_eq!(
tc_cacher.tc_queue.pop_front().unwrap(),
buffer[packet_len_ping..packet_len_ping + packet_len_action]
);
}
#[test]
fn test_multi_apid() {
let mut sph = SpHeader::tc_unseg(TEST_APID_0, 0, 0).unwrap();
let ping_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
sph = SpHeader::tc_unseg(TEST_APID_1, 0, 0).unwrap();
let action_tc = PusTcCreator::new_simple(&mut sph, 8, 0, None, true);
let mut buffer: [u8; 32] = [0; 32];
let packet_len_ping = ping_tc
.write_to_bytes(&mut buffer)
.expect("writing packet failed");
let packet_len_action = action_tc
.write_to_bytes(&mut buffer[packet_len_ping..])
.expect("writing packet failed");
let valid_packet_ids = [TEST_PACKET_ID_0, TEST_PACKET_ID_1];
let mut tc_cacher = TcCacher::default();
let mut next_write_idx = 0;
let parse_result = parse_buffer_for_ccsds_space_packets(
&mut buffer,
valid_packet_ids.as_slice(),
&mut tc_cacher,
&mut next_write_idx,
);
assert!(parse_result.is_ok());
let parsed_packets = parse_result.unwrap();
assert_eq!(parsed_packets, 2);
assert_eq!(tc_cacher.tc_queue.len(), 2);
assert_eq!(
tc_cacher.tc_queue.pop_front().unwrap(),
buffer[..packet_len_ping]
);
assert_eq!(
tc_cacher.tc_queue.pop_front().unwrap(),
buffer[packet_len_ping..packet_len_ping + packet_len_action]
);
}
#[test]
fn test_split_packet_multi() {
let mut sph = SpHeader::tc_unseg(TEST_APID_0, 0, 0).unwrap();
let ping_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
sph = SpHeader::tc_unseg(TEST_APID_1, 0, 0).unwrap();
let action_tc = PusTcCreator::new_simple(&mut sph, 8, 0, None, true);
let mut buffer: [u8; 32] = [0; 32];
let packet_len_ping = ping_tc
.write_to_bytes(&mut buffer)
.expect("writing packet failed");
let packet_len_action = action_tc
.write_to_bytes(&mut buffer[packet_len_ping..])
.expect("writing packet failed");
let valid_packet_ids = [TEST_PACKET_ID_0, TEST_PACKET_ID_1];
let mut tc_cacher = TcCacher::default();
let mut next_write_idx = 0;
let parse_result = parse_buffer_for_ccsds_space_packets(
&mut buffer[..packet_len_ping + packet_len_action - 4],
valid_packet_ids.as_slice(),
&mut tc_cacher,
&mut next_write_idx,
);
assert!(parse_result.is_ok());
let parsed_packets = parse_result.unwrap();
assert_eq!(parsed_packets, 1);
assert_eq!(tc_cacher.tc_queue.len(), 1);
// The broken packet was moved to the start, so the next write index should be after the
// last segment missing 4 bytes.
assert_eq!(next_write_idx, packet_len_action - 4);
}
#[test]
fn test_one_split_packet() {
let mut sph = SpHeader::tc_unseg(TEST_APID_0, 0, 0).unwrap();
let ping_tc = PusTcCreator::new_simple(&mut sph, 17, 1, None, true);
let mut buffer: [u8; 32] = [0; 32];
let packet_len_ping = ping_tc
.write_to_bytes(&mut buffer)
.expect("writing packet failed");
let valid_packet_ids = [TEST_PACKET_ID_0, TEST_PACKET_ID_1];
let mut tc_cacher = TcCacher::default();
let mut next_write_idx = 0;
let parse_result = parse_buffer_for_ccsds_space_packets(
&mut buffer[..packet_len_ping - 4],
valid_packet_ids.as_slice(),
&mut tc_cacher,
&mut next_write_idx,
);
assert_eq!(next_write_idx, 0);
assert!(parse_result.is_ok());
let parsed_packets = parse_result.unwrap();
assert_eq!(parsed_packets, 0);
assert_eq!(tc_cacher.tc_queue.len(), 0);
}
}

View File

@ -1,710 +0,0 @@
//! Event management and forwarding
//!
//! This module provides components to perform event routing. The most important component for this
//! task is the [EventManager]. It receives all events and then routes them to event subscribers
//! where appropriate. One common use case for satellite systems is to offer a light-weight
//! publish-subscribe mechanism and IPC mechanism for software and hardware events which are also
//! packaged as telemetry (TM) or can trigger a system response.
//!
//! It is recommended to read the
//! [sat-rs book chapter](https://absatsw.irs.uni-stuttgart.de/projects/sat-rs/book/events.html)
//! about events first:
//!
//! The event manager has a listener table abstracted by the [ListenerTable], which maps
//! listener groups identified by [ListenerKey]s to a [sender ID][ChannelId].
//! It also contains a sender table abstracted by the [SenderTable] which maps these sender IDs
//! to a concrete [SendEventProvider]s. A simple approach would be to use one send event provider
//! for each OBSW thread and then subscribe for all interesting events for a particular thread
//! using the send event provider ID.
//!
//! This can be done with the [EventManager] like this:
//!
//! 1. Provide a concrete [EventReceiver] implementation. This abstraction allow to use different
//! message queue backends. A straightforward implementation where dynamic memory allocation is
//! not a big concern could use [std::sync::mpsc::channel] to do this and is provided in
//! form of the [MpscEventReceiver].
//! 2. To set up event creators, create channel pairs using some message queue implementation.
//! Each event creator gets a (cloned) sender component which allows it to send events to the
//! manager.
//! 3. The event manager receives the receiver component as part of a [EventReceiver]
//! implementation so all events are routed to the manager.
//! 4. Create the [send event providers][SendEventProvider]s which allow routing events to
//! subscribers. You can now use their [sender IDs][SendEventProvider::id] to subscribe for
//! event groups, for example by using the [EventManager::subscribe_single] method.
//! 5. Add the send provider as well using the [EventManager::add_sender] call so the event
//! manager can route listener groups to a the send provider.
//!
//! Some components like a PUS Event Service or PUS Event Action Service might require all
//! events to package them as telemetry or start actions where applicable.
//! Other components might only be interested in certain events. For example, a thermal system
//! handler might only be interested in temperature events generated by a thermal sensor component.
//!
//! # Examples
//!
//! You can check [integration test](https://egit.irs.uni-stuttgart.de/rust/sat-rs/src/branch/main/satrs-core/tests/pus_events.rs)
//! for a concrete example using multi-threading where events are routed to
//! different threads.
use crate::events::{EventU16, EventU32, GenericEvent, LargestEventRaw, LargestGroupIdRaw};
use crate::params::{Params, ParamsHeapless};
#[cfg(feature = "alloc")]
use alloc::boxed::Box;
#[cfg(feature = "alloc")]
use alloc::vec;
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
use core::slice::Iter;
#[cfg(feature = "alloc")]
use hashbrown::HashMap;
use crate::ChannelId;
#[cfg(feature = "std")]
pub use stdmod::*;
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
pub enum ListenerKey {
Single(LargestEventRaw),
Group(LargestGroupIdRaw),
All,
}
pub type EventWithHeaplessAuxData<Event> = (Event, Option<ParamsHeapless>);
pub type EventU32WithHeaplessAuxData = EventWithHeaplessAuxData<EventU32>;
pub type EventU16WithHeaplessAuxData = EventWithHeaplessAuxData<EventU16>;
pub type EventWithAuxData<Event> = (Event, Option<Params>);
pub type EventU32WithAuxData = EventWithAuxData<EventU32>;
pub type EventU16WithAuxData = EventWithAuxData<EventU16>;
pub trait SendEventProvider<Provider: GenericEvent, AuxDataProvider = Params> {
type Error;
fn id(&self) -> ChannelId;
fn send_no_data(&self, event: Provider) -> Result<(), Self::Error> {
self.send(event, None)
}
fn send(&self, event: Provider, aux_data: Option<AuxDataProvider>) -> Result<(), Self::Error>;
}
/// Generic abstraction for an event receiver.
pub trait EventReceiver<Event: GenericEvent, AuxDataProvider = Params> {
/// This function has to be provided by any event receiver. A receive call may or may not return
/// an event.
///
/// To allow returning arbitrary additional auxiliary data, a mutable slice is passed to the
/// [Self::receive] call as well. Receivers can write data to this slice, but care must be taken
/// to avoid panics due to size missmatches or out of bound writes.
fn receive(&self) -> Option<(Event, Option<AuxDataProvider>)>;
}
pub trait ListenerTable {
fn get_listeners(&self) -> Vec<ListenerKey>;
fn contains_listener(&self, key: &ListenerKey) -> bool;
fn get_listener_ids(&self, key: &ListenerKey) -> Option<Iter<ChannelId>>;
fn add_listener(&mut self, key: ListenerKey, sender_id: ChannelId) -> bool;
fn remove_duplicates(&mut self, key: &ListenerKey);
}
pub trait SenderTable<SendProviderError, Event: GenericEvent = EventU32, AuxDataProvider = Params> {
fn contains_send_event_provider(&self, id: &ChannelId) -> bool;
fn get_send_event_provider(
&self,
id: &ChannelId,
) -> Option<&dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>>;
fn add_send_event_provider(
&mut self,
send_provider: Box<
dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>,
>,
) -> bool;
}
/// Generic event manager implementation.
///
/// # Generics
///
/// * `SendProviderError`: [SendEventProvider] error type
/// * `Event`: Concrete event provider, currently either [EventU32] or [EventU16]
/// * `AuxDataProvider`: Concrete auxiliary data provider, currently either [Params] or
/// [ParamsHeapless]
pub struct EventManager<SendProviderError, Event: GenericEvent = EventU32, AuxDataProvider = Params>
{
listener_table: Box<dyn ListenerTable>,
sender_table: Box<dyn SenderTable<SendProviderError, Event, AuxDataProvider>>,
event_receiver: Box<dyn EventReceiver<Event, AuxDataProvider>>,
}
/// Safety: It is safe to implement [Send] because all fields in the [EventManager] are [Send]
/// as well
#[cfg(feature = "std")]
unsafe impl<E, Event: GenericEvent + Send, AuxDataProvider: Send> Send
for EventManager<E, Event, AuxDataProvider>
{
}
#[cfg(feature = "std")]
pub type EventManagerWithMpscQueue<Event, AuxDataProvider> = EventManager<
std::sync::mpsc::SendError<(Event, Option<AuxDataProvider>)>,
Event,
AuxDataProvider,
>;
#[derive(Debug)]
pub enum EventRoutingResult<Event: GenericEvent, AuxDataProvider> {
/// No event was received
Empty,
/// An event was received and routed.
/// The first tuple entry will contain the number of recipients.
Handled(u32, Event, Option<AuxDataProvider>),
}
#[derive(Debug)]
pub enum EventRoutingError<E> {
SendError(E),
NoSendersForKey(ListenerKey),
NoSenderForId(ChannelId),
}
#[derive(Debug)]
pub struct EventRoutingErrorsWithResult<Event: GenericEvent, AuxDataProvider, E> {
pub result: EventRoutingResult<Event, AuxDataProvider>,
pub errors: [Option<EventRoutingError<E>>; 3],
}
impl<E, Event: GenericEvent + Copy> EventManager<E, Event> {
pub fn remove_duplicates(&mut self, key: &ListenerKey) {
self.listener_table.remove_duplicates(key)
}
/// Subscribe for a unique event.
pub fn subscribe_single(&mut self, event: &Event, sender_id: ChannelId) {
self.update_listeners(ListenerKey::Single(event.raw_as_largest_type()), sender_id);
}
/// Subscribe for an event group.
pub fn subscribe_group(&mut self, group_id: LargestGroupIdRaw, sender_id: ChannelId) {
self.update_listeners(ListenerKey::Group(group_id), sender_id);
}
/// Subscribe for all events received by the manager.
///
/// For example, this can be useful for a handler component which sends every event as
/// a telemetry packet.
pub fn subscribe_all(&mut self, sender_id: ChannelId) {
self.update_listeners(ListenerKey::All, sender_id);
}
}
impl<E: 'static, Event: GenericEvent + Copy + 'static, AuxDataProvider: Clone + 'static>
EventManager<E, Event, AuxDataProvider>
{
/// Create an event manager where the sender table will be the [DefaultSenderTableProvider]
/// and the listener table will be the [DefaultListenerTableProvider].
pub fn new(event_receiver: Box<dyn EventReceiver<Event, AuxDataProvider>>) -> Self {
let listener_table: Box<DefaultListenerTableProvider> = Box::default();
let sender_table: Box<DefaultSenderTableProvider<E, Event, AuxDataProvider>> =
Box::default();
Self::new_custom_tables(listener_table, sender_table, event_receiver)
}
}
impl<E, Event: GenericEvent + Copy, AuxDataProvider: Clone>
EventManager<E, Event, AuxDataProvider>
{
pub fn new_custom_tables(
listener_table: Box<dyn ListenerTable>,
sender_table: Box<dyn SenderTable<E, Event, AuxDataProvider>>,
event_receiver: Box<dyn EventReceiver<Event, AuxDataProvider>>,
) -> Self {
EventManager {
listener_table,
sender_table,
event_receiver,
}
}
pub fn add_sender(
&mut self,
send_provider: impl SendEventProvider<Event, AuxDataProvider, Error = E> + 'static,
) {
if !self
.sender_table
.contains_send_event_provider(&send_provider.id())
{
self.sender_table
.add_send_event_provider(Box::new(send_provider));
}
}
fn update_listeners(&mut self, key: ListenerKey, sender_id: ChannelId) {
self.listener_table.add_listener(key, sender_id);
}
/// This function will use the cached event receiver and try to receive one event.
/// If an event was received, it will try to route that event to all subscribed event listeners.
/// If this works without any issues, the [EventRoutingResult] will contain context information
/// about the routed event.
///
/// This function will track up to 3 errors returned as part of the
/// [EventRoutingErrorsWithResult] error struct.
pub fn try_event_handling(
&self,
) -> Result<
EventRoutingResult<Event, AuxDataProvider>,
EventRoutingErrorsWithResult<Event, AuxDataProvider, E>,
> {
let mut err_idx = 0;
let mut err_slice = [None, None, None];
let mut num_recipients = 0;
let mut add_error = |error: EventRoutingError<E>| {
if err_idx < 3 {
err_slice[err_idx] = Some(error);
err_idx += 1;
}
};
let mut send_handler =
|key: &ListenerKey, event: Event, aux_data: &Option<AuxDataProvider>| {
if self.listener_table.contains_listener(key) {
if let Some(ids) = self.listener_table.get_listener_ids(key) {
for id in ids {
if let Some(sender) = self.sender_table.get_send_event_provider(id) {
if let Err(e) = sender.send(event, aux_data.clone()) {
add_error(EventRoutingError::SendError(e));
} else {
num_recipients += 1;
}
} else {
add_error(EventRoutingError::NoSenderForId(*id));
}
}
} else {
add_error(EventRoutingError::NoSendersForKey(*key));
}
}
};
if let Some((event, aux_data)) = self.event_receiver.receive() {
let single_key = ListenerKey::Single(event.raw_as_largest_type());
send_handler(&single_key, event, &aux_data);
let group_key = ListenerKey::Group(event.group_id_as_largest_type());
send_handler(&group_key, event, &aux_data);
send_handler(&ListenerKey::All, event, &aux_data);
if err_idx > 0 {
return Err(EventRoutingErrorsWithResult {
result: EventRoutingResult::Handled(num_recipients, event, aux_data),
errors: err_slice,
});
}
return Ok(EventRoutingResult::Handled(num_recipients, event, aux_data));
}
Ok(EventRoutingResult::Empty)
}
}
#[derive(Default)]
pub struct DefaultListenerTableProvider {
listeners: HashMap<ListenerKey, Vec<ChannelId>>,
}
pub struct DefaultSenderTableProvider<
SendProviderError,
Event: GenericEvent = EventU32,
AuxDataProvider = Params,
> {
senders: HashMap<
ChannelId,
Box<dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>>,
>,
}
impl<SendProviderError, Event: GenericEvent, AuxDataProvider> Default
for DefaultSenderTableProvider<SendProviderError, Event, AuxDataProvider>
{
fn default() -> Self {
Self {
senders: HashMap::new(),
}
}
}
impl ListenerTable for DefaultListenerTableProvider {
fn get_listeners(&self) -> Vec<ListenerKey> {
let mut key_list = Vec::new();
for key in self.listeners.keys() {
key_list.push(*key);
}
key_list
}
fn contains_listener(&self, key: &ListenerKey) -> bool {
self.listeners.contains_key(key)
}
fn get_listener_ids(&self, key: &ListenerKey) -> Option<Iter<ChannelId>> {
self.listeners.get(key).map(|vec| vec.iter())
}
fn add_listener(&mut self, key: ListenerKey, sender_id: ChannelId) -> bool {
if let Some(existing_list) = self.listeners.get_mut(&key) {
existing_list.push(sender_id);
} else {
let new_list = vec![sender_id];
self.listeners.insert(key, new_list);
}
true
}
fn remove_duplicates(&mut self, key: &ListenerKey) {
if let Some(list) = self.listeners.get_mut(key) {
list.sort_unstable();
list.dedup();
}
}
}
impl<SendProviderError, Event: GenericEvent, AuxDataProvider>
SenderTable<SendProviderError, Event, AuxDataProvider>
for DefaultSenderTableProvider<SendProviderError, Event, AuxDataProvider>
{
fn contains_send_event_provider(&self, id: &ChannelId) -> bool {
self.senders.contains_key(id)
}
fn get_send_event_provider(
&self,
id: &ChannelId,
) -> Option<&dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>> {
self.senders
.get(id)
.filter(|sender| sender.id() == *id)
.map(|v| v.as_ref())
}
fn add_send_event_provider(
&mut self,
send_provider: Box<
dyn SendEventProvider<Event, AuxDataProvider, Error = SendProviderError>,
>,
) -> bool {
let id = send_provider.id();
if self.senders.contains_key(&id) {
return false;
}
self.senders.insert(id, send_provider).is_none()
}
}
#[cfg(feature = "std")]
pub mod stdmod {
use super::*;
use crate::event_man::{EventReceiver, EventWithAuxData};
use crate::events::{EventU16, EventU32, GenericEvent};
use crate::params::Params;
use std::sync::mpsc::{Receiver, SendError, Sender};
pub struct MpscEventReceiver<Event: GenericEvent + Send = EventU32> {
mpsc_receiver: Receiver<(Event, Option<Params>)>,
}
impl<Event: GenericEvent + Send> MpscEventReceiver<Event> {
pub fn new(receiver: Receiver<(Event, Option<Params>)>) -> Self {
Self {
mpsc_receiver: receiver,
}
}
}
impl<Event: GenericEvent + Send> EventReceiver<Event> for MpscEventReceiver<Event> {
fn receive(&self) -> Option<EventWithAuxData<Event>> {
if let Ok(event_and_data) = self.mpsc_receiver.try_recv() {
return Some(event_and_data);
}
None
}
}
pub type MpscEventU32Receiver = MpscEventReceiver<EventU32>;
pub type MpscEventU16Receiver = MpscEventReceiver<EventU16>;
#[derive(Clone)]
pub struct MpscEventSendProvider<Event: GenericEvent + Send> {
id: u32,
sender: Sender<(Event, Option<Params>)>,
}
impl<Event: GenericEvent + Send> MpscEventSendProvider<Event> {
pub fn new(id: u32, sender: Sender<(Event, Option<Params>)>) -> Self {
Self { id, sender }
}
}
impl<Event: GenericEvent + Send> SendEventProvider<Event> for MpscEventSendProvider<Event> {
type Error = SendError<(Event, Option<Params>)>;
fn id(&self) -> u32 {
self.id
}
fn send(&self, event: Event, aux_data: Option<Params>) -> Result<(), Self::Error> {
self.sender.send((event, aux_data))
}
}
pub type MpscEventU32SendProvider = MpscEventSendProvider<EventU32>;
pub type MpscEventU16SendProvider = MpscEventSendProvider<EventU16>;
}
#[cfg(test)]
mod tests {
use super::*;
use crate::event_man::EventManager;
use crate::events::{EventU32, GenericEvent, Severity};
use crate::params::ParamsRaw;
use alloc::boxed::Box;
use std::format;
use std::sync::mpsc::{channel, Receiver, SendError, Sender};
#[derive(Clone)]
struct MpscEventSenderQueue {
id: u32,
mpsc_sender: Sender<EventU32WithAuxData>,
}
impl MpscEventSenderQueue {
fn new(id: u32, mpsc_sender: Sender<EventU32WithAuxData>) -> Self {
Self { id, mpsc_sender }
}
}
impl SendEventProvider<EventU32> for MpscEventSenderQueue {
type Error = SendError<EventU32WithAuxData>;
fn id(&self) -> u32 {
self.id
}
fn send(&self, event: EventU32, aux_data: Option<Params>) -> Result<(), Self::Error> {
self.mpsc_sender.send((event, aux_data))
}
}
fn check_next_event(
expected: EventU32,
receiver: &Receiver<EventU32WithAuxData>,
) -> Option<Params> {
if let Ok(event) = receiver.try_recv() {
assert_eq!(event.0, expected);
return event.1;
}
None
}
fn check_handled_event(
res: EventRoutingResult<EventU32, Params>,
expected: EventU32,
expected_num_sent: u32,
) {
assert!(matches!(res, EventRoutingResult::Handled { .. }));
if let EventRoutingResult::Handled(num_recipients, event, _aux_data) = res {
assert_eq!(event, expected);
assert_eq!(num_recipients, expected_num_sent);
}
}
fn generic_event_man() -> (
Sender<EventU32WithAuxData>,
EventManager<SendError<EventU32WithAuxData>>,
) {
let (event_sender, manager_queue) = channel();
let event_man_receiver = MpscEventReceiver::new(manager_queue);
(
event_sender,
EventManager::new(Box::new(event_man_receiver)),
)
}
#[test]
fn test_basic() {
let (event_sender, mut event_man) = generic_event_man();
let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap();
let event_grp_1_0 = EventU32::new(Severity::HIGH, 1, 0).unwrap();
let (single_event_sender, single_event_receiver) = channel();
let single_event_listener = MpscEventSenderQueue::new(0, single_event_sender);
event_man.subscribe_single(&event_grp_0, single_event_listener.id());
event_man.add_sender(single_event_listener);
let (group_event_sender_0, group_event_receiver_0) = channel();
let group_event_listener = MpscEventSenderQueue {
id: 1,
mpsc_sender: group_event_sender_0,
};
event_man.subscribe_group(event_grp_1_0.group_id(), group_event_listener.id());
event_man.add_sender(group_event_listener);
// Test event with one listener
event_sender
.send((event_grp_0, None))
.expect("Sending single error failed");
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_grp_0, 1);
check_next_event(event_grp_0, &single_event_receiver);
// Test event which is sent to all group listeners
event_sender
.send((event_grp_1_0, None))
.expect("Sending group error failed");
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_grp_1_0, 1);
check_next_event(event_grp_1_0, &group_event_receiver_0);
}
#[test]
fn test_with_basic_aux_data() {
let (event_sender, mut event_man) = generic_event_man();
let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap();
let (single_event_sender, single_event_receiver) = channel();
let single_event_listener = MpscEventSenderQueue::new(0, single_event_sender);
event_man.subscribe_single(&event_grp_0, single_event_listener.id());
event_man.add_sender(single_event_listener);
event_sender
.send((event_grp_0, Some(Params::Heapless((2_u32, 3_u32).into()))))
.expect("Sending group error failed");
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_grp_0, 1);
let aux = check_next_event(event_grp_0, &single_event_receiver);
assert!(aux.is_some());
let aux = aux.unwrap();
if let Params::Heapless(ParamsHeapless::Raw(ParamsRaw::U32Pair(pair))) = aux {
assert_eq!(pair.0, 2);
assert_eq!(pair.1, 3);
} else {
panic!("{}", format!("Unexpected auxiliary value type {:?}", aux));
}
}
/// Test listening for multiple groups
#[test]
fn test_multi_group() {
let (event_sender, mut event_man) = generic_event_man();
let res = event_man.try_event_handling();
assert!(res.is_ok());
let hres = res.unwrap();
assert!(matches!(hres, EventRoutingResult::Empty));
let event_grp_0 = EventU32::new(Severity::INFO, 0, 0).unwrap();
let event_grp_1_0 = EventU32::new(Severity::HIGH, 1, 0).unwrap();
let (event_grp_0_sender, event_grp_0_receiver) = channel();
let event_grp_0_and_1_listener = MpscEventSenderQueue {
id: 0,
mpsc_sender: event_grp_0_sender,
};
event_man.subscribe_group(event_grp_0.group_id(), event_grp_0_and_1_listener.id());
event_man.subscribe_group(event_grp_1_0.group_id(), event_grp_0_and_1_listener.id());
event_man.add_sender(event_grp_0_and_1_listener);
event_sender
.send((event_grp_0, None))
.expect("Sending Event Group 0 failed");
event_sender
.send((event_grp_1_0, None))
.expect("Sendign Event Group 1 failed");
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_grp_0, 1);
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_grp_1_0, 1);
check_next_event(event_grp_0, &event_grp_0_receiver);
check_next_event(event_grp_1_0, &event_grp_0_receiver);
}
/// Test listening to the same event from multiple listeners. Also test listening
/// to both group and single events from one listener
#[test]
fn test_listening_to_same_event_and_multi_type() {
let (event_sender, mut event_man) = generic_event_man();
let event_0 = EventU32::new(Severity::INFO, 0, 5).unwrap();
let event_1 = EventU32::new(Severity::HIGH, 1, 0).unwrap();
let (event_0_tx_0, event_0_rx_0) = channel();
let (event_0_tx_1, event_0_rx_1) = channel();
let event_listener_0 = MpscEventSenderQueue {
id: 0,
mpsc_sender: event_0_tx_0,
};
let event_listener_1 = MpscEventSenderQueue {
id: 1,
mpsc_sender: event_0_tx_1,
};
let event_listener_0_sender_id = event_listener_0.id();
event_man.subscribe_single(&event_0, event_listener_0_sender_id);
event_man.add_sender(event_listener_0);
let event_listener_1_sender_id = event_listener_1.id();
event_man.subscribe_single(&event_0, event_listener_1_sender_id);
event_man.add_sender(event_listener_1);
event_sender
.send((event_0, None))
.expect("Triggering Event 0 failed");
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_0, 2);
check_next_event(event_0, &event_0_rx_0);
check_next_event(event_0, &event_0_rx_1);
event_man.subscribe_group(event_1.group_id(), event_listener_0_sender_id);
event_sender
.send((event_0, None))
.expect("Triggering Event 0 failed");
event_sender
.send((event_1, None))
.expect("Triggering Event 1 failed");
// 3 Events messages will be sent now
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_0, 2);
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_1, 1);
// Both the single event and the group event should arrive now
check_next_event(event_0, &event_0_rx_0);
check_next_event(event_1, &event_0_rx_0);
// Do double insertion and then remove duplicates
event_man.subscribe_group(event_1.group_id(), event_listener_0_sender_id);
event_man.remove_duplicates(&ListenerKey::Group(event_1.group_id()));
event_sender
.send((event_1, None))
.expect("Triggering Event 1 failed");
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_1, 1);
}
#[test]
fn test_all_events_listener() {
let (event_sender, manager_queue) = channel();
let event_man_receiver = MpscEventReceiver::new(manager_queue);
let mut event_man: EventManager<SendError<EventU32WithAuxData>> =
EventManager::new(Box::new(event_man_receiver));
let event_0 = EventU32::new(Severity::INFO, 0, 5).unwrap();
let event_1 = EventU32::new(Severity::HIGH, 1, 0).unwrap();
let (event_0_tx_0, all_events_rx) = channel();
let all_events_listener = MpscEventSenderQueue {
id: 0,
mpsc_sender: event_0_tx_0,
};
event_man.subscribe_all(all_events_listener.id());
event_man.add_sender(all_events_listener);
event_sender
.send((event_0, None))
.expect("Triggering event 0 failed");
event_sender
.send((event_1, None))
.expect("Triggering event 1 failed");
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_0, 1);
let res = event_man.try_event_handling();
assert!(res.is_ok());
check_handled_event(res.unwrap(), event_1, 1);
check_next_event(event_0, &all_events_rx);
check_next_event(event_1, &all_events_rx);
}
}

View File

@ -1,16 +0,0 @@
pub type CollectionIntervalFactor = u32;
pub type UniqueId = u32;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum HkRequest {
OneShot(UniqueId),
Enable(UniqueId),
Disable(UniqueId),
ModifyCollectionInterval(UniqueId, CollectionIntervalFactor),
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct TargetedHkRequest {
target: u32,
hk_request: HkRequest,
}

View File

@ -1,50 +0,0 @@
//! # Core components of the sat-rs framework
//!
//! You can find more information about the sat-rs framework on the
//! [homepage](https://egit.irs.uni-stuttgart.de/rust/sat-rs).
//!
//! ## Overview
//!
//! The core modules of this crate include
//!
//! - The [event manager][event_man] module which provides a publish and
//! and subscribe to route events.
//! - The [pus] module which provides special support for projects using
//! the [ECSS PUS C standard](https://ecss.nl/standard/ecss-e-st-70-41c-space-engineering-telemetry-and-telecommand-packet-utilization-15-april-2016/).
#![no_std]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "alloc")]
extern crate downcast_rs;
#[cfg(any(feature = "std", test))]
extern crate std;
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub mod cfdp;
pub mod encoding;
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub mod event_man;
pub mod events;
#[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
pub mod executable;
pub mod hal;
pub mod hk;
pub mod mode;
pub mod objects;
pub mod params;
pub mod pool;
pub mod power;
pub mod pus;
pub mod request;
pub mod res_code;
pub mod seq_count;
pub mod tmtc;
pub use spacepackets;
// Generic channel ID type.
pub type ChannelId = u32;

View File

@ -1,94 +0,0 @@
use crate::tmtc::TargetId;
use core::mem::size_of;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use spacepackets::ByteConversionError;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ModeAndSubmode {
mode: u32,
submode: u16,
}
impl ModeAndSubmode {
pub const fn new_mode_only(mode: u32) -> Self {
Self { mode, submode: 0 }
}
pub const fn new(mode: u32, submode: u16) -> Self {
Self { mode, submode }
}
pub fn raw_len() -> usize {
size_of::<u32>() + size_of::<u16>()
}
pub fn from_be_bytes(buf: &[u8]) -> Result<Self, ByteConversionError> {
if buf.len() < 6 {
return Err(ByteConversionError::FromSliceTooSmall {
expected: 6,
found: buf.len(),
});
}
Ok(Self {
mode: u32::from_be_bytes(buf[0..4].try_into().unwrap()),
submode: u16::from_be_bytes(buf[4..6].try_into().unwrap()),
})
}
pub fn mode(&self) -> u32 {
self.mode
}
pub fn submode(&self) -> u16 {
self.submode
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct ModeCommand {
pub address: TargetId,
pub mode_submode: ModeAndSubmode,
}
impl ModeCommand {
pub const fn new(address: TargetId, mode_submode: ModeAndSubmode) -> Self {
Self {
address,
mode_submode,
}
}
pub fn address(&self) -> TargetId {
self.address
}
pub fn mode_submode(&self) -> ModeAndSubmode {
self.mode_submode
}
pub fn mode(&self) -> u32 {
self.mode_submode.mode
}
pub fn submode(&self) -> u16 {
self.mode_submode.submode
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum ModeRequest {
SetMode(ModeAndSubmode),
ReadMode,
AnnounceMode,
AnnounceModeRecursive,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct TargetedModeRequest {
target_id: TargetId,
mode_request: ModeRequest,
}

View File

@ -1,307 +0,0 @@
//! # Module providing addressable object support and a manager for them
//!
//! Each addressable object can be identified using an [object ID][ObjectId].
//! The [system object][ManagedSystemObject] trait also allows storing these objects into the
//! [object manager][ObjectManager]. They can then be retrieved and casted back to a known type
//! using the object ID.
//!
//! # Examples
//!
//! ```rust
//! use std::any::Any;
//! use std::error::Error;
//! use satrs_core::objects::{ManagedSystemObject, ObjectId, ObjectManager, SystemObject};
//!
//! struct ExampleSysObj {
//! id: ObjectId,
//! dummy: u32,
//! was_initialized: bool,
//! }
//!
//! impl ExampleSysObj {
//! fn new(id: ObjectId, dummy: u32) -> ExampleSysObj {
//! ExampleSysObj {
//! id,
//! dummy,
//! was_initialized: false,
//! }
//! }
//! }
//!
//! impl SystemObject for ExampleSysObj {
//! type Error = ();
//! fn get_object_id(&self) -> &ObjectId {
//! &self.id
//! }
//!
//! fn initialize(&mut self) -> Result<(), Self::Error> {
//! self.was_initialized = true;
//! Ok(())
//! }
//! }
//!
//! impl ManagedSystemObject for ExampleSysObj {}
//!
//! let mut obj_manager = ObjectManager::default();
//! let obj_id = ObjectId { id: 0, name: "Example 0"};
//! let example_obj = ExampleSysObj::new(obj_id, 42);
//! obj_manager.insert(Box::new(example_obj));
//! let obj_back_casted: Option<&ExampleSysObj> = obj_manager.get_ref(&obj_id);
//! let example_obj = obj_back_casted.unwrap();
//! assert_eq!(example_obj.id, obj_id);
//! assert_eq!(example_obj.dummy, 42);
//! ```
use crate::tmtc::TargetId;
#[cfg(feature = "alloc")]
use alloc::boxed::Box;
#[cfg(feature = "alloc")]
pub use alloc_mod::*;
#[cfg(feature = "alloc")]
use downcast_rs::Downcast;
#[cfg(feature = "alloc")]
use hashbrown::HashMap;
#[cfg(feature = "std")]
use std::error::Error;
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
pub struct ObjectId {
pub id: TargetId,
pub name: &'static str,
}
#[cfg(feature = "alloc")]
pub mod alloc_mod {
use super::*;
/// Each object which is stored inside the [object manager][ObjectManager] needs to implemented
/// this trait
pub trait SystemObject: Downcast {
type Error;
fn get_object_id(&self) -> &ObjectId;
fn initialize(&mut self) -> Result<(), Self::Error>;
}
downcast_rs::impl_downcast!(SystemObject assoc Error);
pub trait ManagedSystemObject: SystemObject + Send {}
downcast_rs::impl_downcast!(ManagedSystemObject assoc Error);
/// Helper module to manage multiple [ManagedSystemObjects][ManagedSystemObject] by mapping them
/// using an [object ID][ObjectId]
#[cfg(feature = "alloc")]
pub struct ObjectManager<E> {
obj_map: HashMap<ObjectId, Box<dyn ManagedSystemObject<Error = E>>>,
}
#[cfg(feature = "alloc")]
impl<E: 'static> Default for ObjectManager<E> {
fn default() -> Self {
Self::new()
}
}
#[cfg(feature = "alloc")]
impl<E: 'static> ObjectManager<E> {
pub fn new() -> Self {
ObjectManager {
obj_map: HashMap::new(),
}
}
pub fn insert(&mut self, sys_obj: Box<dyn ManagedSystemObject<Error = E>>) -> bool {
let obj_id = sys_obj.get_object_id();
if self.obj_map.contains_key(obj_id) {
return false;
}
self.obj_map.insert(*obj_id, sys_obj).is_none()
}
/// Initializes all System Objects in the hash map and returns the number of successful
/// initializations
pub fn initialize(&mut self) -> Result<u32, Box<dyn Error>> {
let mut init_success = 0;
for val in self.obj_map.values_mut() {
if val.initialize().is_ok() {
init_success += 1
}
}
Ok(init_success)
}
/// Retrieve a reference to an object stored inside the manager. The type to retrieve needs to
/// be explicitly passed as a generic parameter or specified on the left hand side of the
/// expression.
pub fn get_ref<T: ManagedSystemObject<Error = E>>(&self, key: &ObjectId) -> Option<&T> {
self.obj_map.get(key).and_then(|o| o.downcast_ref::<T>())
}
/// Retrieve a mutable reference to an object stored inside the manager. The type to retrieve
/// needs to be explicitly passed as a generic parameter or specified on the left hand side
/// of the expression.
pub fn get_mut<T: ManagedSystemObject<Error = E>>(
&mut self,
key: &ObjectId,
) -> Option<&mut T> {
self.obj_map
.get_mut(key)
.and_then(|o| o.downcast_mut::<T>())
}
}
}
#[cfg(test)]
mod tests {
use crate::objects::{ManagedSystemObject, ObjectId, ObjectManager, SystemObject};
use std::boxed::Box;
use std::string::String;
use std::sync::{Arc, Mutex};
use std::thread;
struct ExampleSysObj {
id: ObjectId,
dummy: u32,
was_initialized: bool,
}
impl ExampleSysObj {
fn new(id: ObjectId, dummy: u32) -> ExampleSysObj {
ExampleSysObj {
id,
dummy,
was_initialized: false,
}
}
}
impl SystemObject for ExampleSysObj {
type Error = ();
fn get_object_id(&self) -> &ObjectId {
&self.id
}
fn initialize(&mut self) -> Result<(), Self::Error> {
self.was_initialized = true;
Ok(())
}
}
impl ManagedSystemObject for ExampleSysObj {}
struct OtherExampleObject {
id: ObjectId,
string: String,
was_initialized: bool,
}
impl SystemObject for OtherExampleObject {
type Error = ();
fn get_object_id(&self) -> &ObjectId {
&self.id
}
fn initialize(&mut self) -> Result<(), Self::Error> {
self.was_initialized = true;
Ok(())
}
}
impl ManagedSystemObject for OtherExampleObject {}
#[test]
fn test_obj_manager_simple() {
let mut obj_manager = ObjectManager::default();
let expl_obj_id = ObjectId {
id: 0,
name: "Example 0",
};
let example_obj = ExampleSysObj::new(expl_obj_id, 42);
assert!(obj_manager.insert(Box::new(example_obj)));
let res = obj_manager.initialize();
assert!(res.is_ok());
assert_eq!(res.unwrap(), 1);
let obj_back_casted: Option<&ExampleSysObj> = obj_manager.get_ref(&expl_obj_id);
assert!(obj_back_casted.is_some());
let expl_obj_back_casted = obj_back_casted.unwrap();
assert_eq!(expl_obj_back_casted.dummy, 42);
assert!(expl_obj_back_casted.was_initialized);
let second_obj_id = ObjectId {
id: 12,
name: "Example 1",
};
let second_example_obj = OtherExampleObject {
id: second_obj_id,
string: String::from("Hello Test"),
was_initialized: false,
};
assert!(obj_manager.insert(Box::new(second_example_obj)));
let res = obj_manager.initialize();
assert!(res.is_ok());
assert_eq!(res.unwrap(), 2);
let obj_back_casted: Option<&OtherExampleObject> = obj_manager.get_ref(&second_obj_id);
assert!(obj_back_casted.is_some());
let expl_obj_back_casted = obj_back_casted.unwrap();
assert_eq!(expl_obj_back_casted.string, String::from("Hello Test"));
assert!(expl_obj_back_casted.was_initialized);
let existing_obj_id = ObjectId {
id: 12,
name: "Example 1",
};
let invalid_obj = OtherExampleObject {
id: existing_obj_id,
string: String::from("Hello Test"),
was_initialized: false,
};
assert!(!obj_manager.insert(Box::new(invalid_obj)));
}
#[test]
fn object_man_threaded() {
let obj_manager = Arc::new(Mutex::new(ObjectManager::new()));
let expl_obj_id = ObjectId {
id: 0,
name: "Example 0",
};
let example_obj = ExampleSysObj::new(expl_obj_id, 42);
let second_obj_id = ObjectId {
id: 12,
name: "Example 1",
};
let second_example_obj = OtherExampleObject {
id: second_obj_id,
string: String::from("Hello Test"),
was_initialized: false,
};
let mut obj_man_handle = obj_manager.lock().expect("Mutex lock failed");
assert!(obj_man_handle.insert(Box::new(example_obj)));
assert!(obj_man_handle.insert(Box::new(second_example_obj)));
let res = obj_man_handle.initialize();
std::mem::drop(obj_man_handle);
assert!(res.is_ok());
assert_eq!(res.unwrap(), 2);
let obj_man_0 = obj_manager.clone();
let jh0 = thread::spawn(move || {
let locked_man = obj_man_0.lock().expect("Mutex lock failed");
let obj_back_casted: Option<&ExampleSysObj> = locked_man.get_ref(&expl_obj_id);
assert!(obj_back_casted.is_some());
let expl_obj_back_casted = obj_back_casted.unwrap();
assert_eq!(expl_obj_back_casted.dummy, 42);
assert!(expl_obj_back_casted.was_initialized);
std::mem::drop(locked_man)
});
let jh1 = thread::spawn(move || {
let locked_man = obj_manager.lock().expect("Mutex lock failed");
let obj_back_casted: Option<&OtherExampleObject> = locked_man.get_ref(&second_obj_id);
assert!(obj_back_casted.is_some());
let expl_obj_back_casted = obj_back_casted.unwrap();
assert_eq!(expl_obj_back_casted.string, String::from("Hello Test"));
assert!(expl_obj_back_casted.was_initialized);
std::mem::drop(locked_man)
});
jh0.join().expect("Joining thread 0 failed");
jh1.join().expect("Joining thread 1 failed");
}
}

View File

@ -1,679 +0,0 @@
//! Parameter types and enums.
//!
//! This module contains various helper types.
//!
//! # Primtive Parameter Wrappers and Enumeration
//!
//! This module includes wrapper for primitive rust types using the newtype pattern.
//! This was also done for pairs and triplets of these primitive types.
//! The [WritableToBeBytes] was implemented for all those types as well, which allows to easily
//! convert them into a network friendly raw byte format. The [ParamsRaw] enumeration groups
//! all newtypes and implements the [WritableToBeBytes] trait itself.
//!
//! ## Example for primitive type wrapper
//!
//! ```
//! use satrs_core::params::{ParamsRaw, ToBeBytes, U32Pair, WritableToBeBytes};
//!
//! let u32_pair = U32Pair(0x1010, 25);
//! assert_eq!(u32_pair.0, 0x1010);
//! assert_eq!(u32_pair.1, 25);
//! // Convert to raw stream
//! let raw_buf = u32_pair.to_be_bytes();
//! assert_eq!(raw_buf, [0, 0, 0x10, 0x10, 0, 0, 0, 25]);
//!
//! // Convert to enum variant
//! let params_raw: ParamsRaw = u32_pair.into();
//! assert_eq!(params_raw, (0x1010_u32, 25_u32).into());
//!
//! // Convert to stream using the enum variant
//! let mut other_raw_buf: [u8; 8] = [0; 8];
//! params_raw.write_to_be_bytes(&mut other_raw_buf).expect("Writing parameter to buffer failed");
//! assert_eq!(other_raw_buf, [0, 0, 0x10, 0x10, 0, 0, 0, 25]);
//!
//! // Create a pair from a raw stream
//! let u32_pair_from_stream: U32Pair = raw_buf.as_slice().try_into().unwrap();
//! assert_eq!(u32_pair_from_stream.0, 0x1010);
//! assert_eq!(u32_pair_from_stream.1, 25);
//! ```
//!
//! # Generic Parameter Enumeration
//!
//! The module also contains generic parameter enumerations.
//! This includes the [ParamsHeapless] enumeration for contained values which do not require heap
//! allocation, and the [Params] which enumerates [ParamsHeapless] and some additional types which
//! require [alloc] support but allow for more flexbility.
#[cfg(feature = "alloc")]
use crate::pool::StoreAddr;
#[cfg(feature = "alloc")]
use alloc::string::{String, ToString};
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
use core::fmt::Debug;
use core::mem::size_of;
use paste::paste;
use spacepackets::ecss::{EcssEnumU16, EcssEnumU32, EcssEnumU64, EcssEnumU8};
use spacepackets::util::UnsignedEnum;
use spacepackets::ByteConversionError;
#[cfg(feature = "alloc")]
pub use alloc_mod::*;
pub use spacepackets::util::ToBeBytes;
/// Generic trait which is used for objects which can be converted into a raw network (big) endian
/// byte format.
pub trait WritableToBeBytes {
fn raw_len(&self) -> usize;
/// Writes the object to a raw buffer in network endianness (big)
fn write_to_be_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError>;
}
macro_rules! param_to_be_bytes_impl {
($Newtype: ident) => {
impl WritableToBeBytes for $Newtype {
#[inline]
fn raw_len(&self) -> usize {
size_of::<<Self as ToBeBytes>::ByteArray>()
}
fn write_to_be_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
let raw_len = self.raw_len();
if buf.len() < raw_len {
return Err(ByteConversionError::ToSliceTooSmall {
found: buf.len(),
expected: raw_len,
});
}
buf[0..raw_len].copy_from_slice(&self.to_be_bytes());
Ok(raw_len)
}
}
};
}
macro_rules! primitive_newtypes_with_eq {
($($ty: ty,)+) => {
$(
paste! {
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct [<$ty:upper>](pub $ty);
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct [<$ty:upper Pair>](pub $ty, pub $ty);
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct [<$ty:upper Triplet>](pub $ty, pub $ty, pub $ty);
param_to_be_bytes_impl!([<$ty:upper>]);
param_to_be_bytes_impl!([<$ty:upper Pair>]);
param_to_be_bytes_impl!([<$ty:upper Triplet>]);
impl From<$ty> for [<$ty:upper>] {
fn from(v: $ty) -> Self {
Self(v)
}
}
impl From<($ty, $ty)> for [<$ty:upper Pair>] {
fn from(v: ($ty, $ty)) -> Self {
Self(v.0, v.1)
}
}
impl From<($ty, $ty, $ty)> for [<$ty:upper Triplet>] {
fn from(v: ($ty, $ty, $ty)) -> Self {
Self(v.0, v.1, v.2)
}
}
}
)+
}
}
macro_rules! primitive_newtypes {
($($ty: ty,)+) => {
$(
paste! {
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct [<$ty:upper>](pub $ty);
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct [<$ty:upper Pair>](pub $ty, pub $ty);
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct [<$ty:upper Triplet>](pub $ty, pub $ty, pub $ty);
param_to_be_bytes_impl!([<$ty:upper>]);
param_to_be_bytes_impl!([<$ty:upper Pair>]);
param_to_be_bytes_impl!([<$ty:upper Triplet>]);
impl From<$ty> for [<$ty:upper>] {
fn from(v: $ty) -> Self {
Self(v)
}
}
impl From<($ty, $ty)> for [<$ty:upper Pair>] {
fn from(v: ($ty, $ty)) -> Self {
Self(v.0, v.1)
}
}
impl From<($ty, $ty, $ty)> for [<$ty:upper Triplet>] {
fn from(v: ($ty, $ty, $ty)) -> Self {
Self(v.0, v.1, v.2)
}
}
}
)+
}
}
primitive_newtypes_with_eq!(u8, u16, u32, u64, i8, i16, i32, i64,);
primitive_newtypes!(f32, f64,);
macro_rules! scalar_byte_conversions_impl {
($($ty: ty,)+) => {
$(
paste! {
impl ToBeBytes for [<$ty:upper>] {
type ByteArray = [u8; size_of::<$ty>()];
fn written_len(&self) -> usize {
size_of::<Self::ByteArray>()
}
fn to_be_bytes(&self) -> Self::ByteArray {
self.0.to_be_bytes()
}
}
impl TryFrom<&[u8]> for [<$ty:upper>] {
type Error = ByteConversionError;
fn try_from(v: &[u8]) -> Result<Self, Self::Error> {
if v.len() < size_of::<$ty>() {
return Err(ByteConversionError::FromSliceTooSmall{
expected: size_of::<$ty>(),
found: v.len()
});
}
Ok([<$ty:upper>]($ty::from_be_bytes(v[0..size_of::<$ty>()].try_into().unwrap())))
}
}
}
)+
}
}
macro_rules! pair_byte_conversions_impl {
($($ty: ty,)+) => {
$(
paste! {
impl ToBeBytes for [<$ty:upper Pair>] {
type ByteArray = [u8; size_of::<$ty>() * 2];
fn written_len(&self) -> usize {
size_of::<Self::ByteArray>()
}
fn to_be_bytes(&self) -> Self::ByteArray {
let mut array = [0; size_of::<$ty>() * 2];
array[0..size_of::<$ty>()].copy_from_slice(&self.0.to_be_bytes());
array[
size_of::<$ty>()..2 * size_of::<$ty>()
].copy_from_slice(&self.1.to_be_bytes());
array
}
}
impl TryFrom<&[u8]> for [<$ty:upper Pair>] {
type Error = ByteConversionError;
fn try_from(v: &[u8]) -> Result<Self, Self::Error> {
if v.len() < 2 * size_of::<$ty>() {
return Err(ByteConversionError::FromSliceTooSmall{
expected: 2 * size_of::<$ty>(),
found: v.len()
});
}
Ok([<$ty:upper Pair>](
$ty::from_be_bytes(v[0..size_of::<$ty>()].try_into().unwrap()),
$ty::from_be_bytes(v[size_of::<$ty>()..2 * size_of::<$ty>()].try_into().unwrap())
))
}
}
}
)+
}
}
macro_rules! triplet_to_be_bytes_impl {
($($ty: ty,)+) => {
$(
paste! {
impl ToBeBytes for [<$ty:upper Triplet>] {
type ByteArray = [u8; size_of::<$ty>() * 3];
fn written_len(&self) -> usize {
size_of::<Self::ByteArray>()
}
fn to_be_bytes(&self) -> Self::ByteArray {
let mut array = [0; size_of::<$ty>() * 3];
array[0..size_of::<$ty>()].copy_from_slice(&self.0.to_be_bytes());
array[
size_of::<$ty>()..2 * size_of::<$ty>()
].copy_from_slice(&self.1.to_be_bytes());
array[
2 * size_of::<$ty>()..3 * size_of::<$ty>()
].copy_from_slice(&self.2.to_be_bytes());
array
}
}
impl TryFrom<&[u8]> for [<$ty:upper Triplet>] {
type Error = ByteConversionError;
fn try_from(v: &[u8]) -> Result<Self, Self::Error> {
if v.len() < 3 * size_of::<$ty>() {
return Err(ByteConversionError::FromSliceTooSmall{
expected: 3 * size_of::<$ty>(),
found: v.len()
});
}
Ok([<$ty:upper Triplet>](
$ty::from_be_bytes(v[0..size_of::<$ty>()].try_into().unwrap()),
$ty::from_be_bytes(v[size_of::<$ty>()..2 * size_of::<$ty>()].try_into().unwrap()),
$ty::from_be_bytes(v[2 * size_of::<$ty>()..3 * size_of::<$ty>()].try_into().unwrap())
))
}
}
}
)+
}
}
scalar_byte_conversions_impl!(u8, u16, u32, u64, i8, i16, i32, i64, f32, f64,);
impl ToBeBytes for U8Pair {
type ByteArray = [u8; 2];
fn written_len(&self) -> usize {
size_of::<Self::ByteArray>()
}
fn to_be_bytes(&self) -> Self::ByteArray {
let mut array = [0; 2];
array[0] = self.0;
array[1] = self.1;
array
}
}
impl ToBeBytes for I8Pair {
type ByteArray = [u8; 2];
fn written_len(&self) -> usize {
size_of::<Self::ByteArray>()
}
fn to_be_bytes(&self) -> Self::ByteArray {
let mut array = [0; 2];
array[0] = self.0 as u8;
array[1] = self.1 as u8;
array
}
}
impl ToBeBytes for U8Triplet {
type ByteArray = [u8; 3];
fn written_len(&self) -> usize {
size_of::<Self::ByteArray>()
}
fn to_be_bytes(&self) -> Self::ByteArray {
let mut array = [0; 3];
array[0] = self.0;
array[1] = self.1;
array[2] = self.2;
array
}
}
impl ToBeBytes for I8Triplet {
type ByteArray = [u8; 3];
fn written_len(&self) -> usize {
size_of::<Self::ByteArray>()
}
fn to_be_bytes(&self) -> Self::ByteArray {
let mut array = [0; 3];
array[0] = self.0 as u8;
array[1] = self.1 as u8;
array[2] = self.2 as u8;
array
}
}
pair_byte_conversions_impl!(u16, u32, u64, i16, i32, i64, f32, f64,);
triplet_to_be_bytes_impl!(u16, u32, u64, i16, i32, i64, f32, f64,);
/// Generic enumeration for additonal parameters only consisting of primitive data types.
///
/// All contained variants and the enum itself implement the [WritableToBeBytes] trait, which
/// allows to easily convert them into a network-friendly format.
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum ParamsRaw {
U8(U8),
U8Pair(U8Pair),
U8Triplet(U8Triplet),
I8(I8),
I8Pair(I8Pair),
I8Triplet(I8Triplet),
U16(U16),
U16Pair(U16Pair),
U16Triplet(U16Triplet),
I16(I16),
I16Pair(I16Pair),
I16Triplet(I16Triplet),
U32(U32),
U32Pair(U32Pair),
U32Triplet(U32Triplet),
I32(I32),
I32Pair(I32Pair),
I32Triplet(I32Triplet),
F32(F32),
F32Pair(F32Pair),
F32Triplet(F32Triplet),
U64(U64),
I64(I64),
F64(F64),
}
impl WritableToBeBytes for ParamsRaw {
fn raw_len(&self) -> usize {
match self {
ParamsRaw::U8(v) => v.raw_len(),
ParamsRaw::U8Pair(v) => v.raw_len(),
ParamsRaw::U8Triplet(v) => v.raw_len(),
ParamsRaw::I8(v) => v.raw_len(),
ParamsRaw::I8Pair(v) => v.raw_len(),
ParamsRaw::I8Triplet(v) => v.raw_len(),
ParamsRaw::U16(v) => v.raw_len(),
ParamsRaw::U16Pair(v) => v.raw_len(),
ParamsRaw::U16Triplet(v) => v.raw_len(),
ParamsRaw::I16(v) => v.raw_len(),
ParamsRaw::I16Pair(v) => v.raw_len(),
ParamsRaw::I16Triplet(v) => v.raw_len(),
ParamsRaw::U32(v) => v.raw_len(),
ParamsRaw::U32Pair(v) => v.raw_len(),
ParamsRaw::U32Triplet(v) => v.raw_len(),
ParamsRaw::I32(v) => v.raw_len(),
ParamsRaw::I32Pair(v) => v.raw_len(),
ParamsRaw::I32Triplet(v) => v.raw_len(),
ParamsRaw::F32(v) => v.raw_len(),
ParamsRaw::F32Pair(v) => v.raw_len(),
ParamsRaw::F32Triplet(v) => v.raw_len(),
ParamsRaw::U64(v) => v.raw_len(),
ParamsRaw::I64(v) => v.raw_len(),
ParamsRaw::F64(v) => v.raw_len(),
}
}
fn write_to_be_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
match self {
ParamsRaw::U8(v) => v.write_to_be_bytes(buf),
ParamsRaw::U8Pair(v) => v.write_to_be_bytes(buf),
ParamsRaw::U8Triplet(v) => v.write_to_be_bytes(buf),
ParamsRaw::I8(v) => v.write_to_be_bytes(buf),
ParamsRaw::I8Pair(v) => v.write_to_be_bytes(buf),
ParamsRaw::I8Triplet(v) => v.write_to_be_bytes(buf),
ParamsRaw::U16(v) => v.write_to_be_bytes(buf),
ParamsRaw::U16Pair(v) => v.write_to_be_bytes(buf),
ParamsRaw::U16Triplet(v) => v.write_to_be_bytes(buf),
ParamsRaw::I16(v) => v.write_to_be_bytes(buf),
ParamsRaw::I16Pair(v) => v.write_to_be_bytes(buf),
ParamsRaw::I16Triplet(v) => v.write_to_be_bytes(buf),
ParamsRaw::U32(v) => v.write_to_be_bytes(buf),
ParamsRaw::U32Pair(v) => v.write_to_be_bytes(buf),
ParamsRaw::U32Triplet(v) => v.write_to_be_bytes(buf),
ParamsRaw::I32(v) => v.write_to_be_bytes(buf),
ParamsRaw::I32Pair(v) => v.write_to_be_bytes(buf),
ParamsRaw::I32Triplet(v) => v.write_to_be_bytes(buf),
ParamsRaw::F32(v) => v.write_to_be_bytes(buf),
ParamsRaw::F32Pair(v) => v.write_to_be_bytes(buf),
ParamsRaw::F32Triplet(v) => v.write_to_be_bytes(buf),
ParamsRaw::U64(v) => v.write_to_be_bytes(buf),
ParamsRaw::I64(v) => v.write_to_be_bytes(buf),
ParamsRaw::F64(v) => v.write_to_be_bytes(buf),
}
}
}
macro_rules! params_raw_from_newtype {
($($newtype: ident,)+) => {
$(
impl From<$newtype> for ParamsRaw {
fn from(v: $newtype) -> Self {
Self::$newtype(v)
}
}
)+
}
}
params_raw_from_newtype!(
U8, U8Pair, U8Triplet, U16, U16Pair, U16Triplet, U32, U32Pair, U32Triplet, I8, I8Pair,
I8Triplet, I16, I16Pair, I16Triplet, I32, I32Pair, I32Triplet, F32, F32Pair, F32Triplet, U64,
I64, F64,
);
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum EcssEnumParams {
U8(EcssEnumU8),
U16(EcssEnumU16),
U32(EcssEnumU32),
U64(EcssEnumU64),
}
macro_rules! writable_as_be_bytes_ecss_enum_impl {
($EnumIdent: ident) => {
impl WritableToBeBytes for $EnumIdent {
fn raw_len(&self) -> usize {
self.size()
}
fn write_to_be_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
<Self as UnsignedEnum>::write_to_be_bytes(self, buf).map(|_| self.raw_len())
}
}
};
}
writable_as_be_bytes_ecss_enum_impl!(EcssEnumU8);
writable_as_be_bytes_ecss_enum_impl!(EcssEnumU16);
writable_as_be_bytes_ecss_enum_impl!(EcssEnumU32);
writable_as_be_bytes_ecss_enum_impl!(EcssEnumU64);
impl WritableToBeBytes for EcssEnumParams {
fn raw_len(&self) -> usize {
match self {
EcssEnumParams::U8(e) => e.raw_len(),
EcssEnumParams::U16(e) => e.raw_len(),
EcssEnumParams::U32(e) => e.raw_len(),
EcssEnumParams::U64(e) => e.raw_len(),
}
}
fn write_to_be_bytes(&self, buf: &mut [u8]) -> Result<usize, ByteConversionError> {
match self {
EcssEnumParams::U8(e) => WritableToBeBytes::write_to_be_bytes(e, buf),
EcssEnumParams::U16(e) => WritableToBeBytes::write_to_be_bytes(e, buf),
EcssEnumParams::U32(e) => WritableToBeBytes::write_to_be_bytes(e, buf),
EcssEnumParams::U64(e) => WritableToBeBytes::write_to_be_bytes(e, buf),
}
}
}
/// Generic enumeration for parameters which do not rely on heap allocations.
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum ParamsHeapless {
Raw(ParamsRaw),
EcssEnum(EcssEnumParams),
}
macro_rules! from_conversions_for_raw {
($(($raw_ty: ty, $TargetPath: path),)+) => {
$(
impl From<$raw_ty> for ParamsRaw {
fn from(val: $raw_ty) -> Self {
$TargetPath(val.into())
}
}
impl From<$raw_ty> for ParamsHeapless {
fn from(val: $raw_ty) -> Self {
ParamsHeapless::Raw(val.into())
}
}
)+
};
}
from_conversions_for_raw!(
(u8, Self::U8),
((u8, u8), Self::U8Pair),
((u8, u8, u8), Self::U8Triplet),
(i8, Self::I8),
((i8, i8), Self::I8Pair),
((i8, i8, i8), Self::I8Triplet),
(u16, Self::U16),
((u16, u16), Self::U16Pair),
((u16, u16, u16), Self::U16Triplet),
(i16, Self::I16),
((i16, i16), Self::I16Pair),
((i16, i16, i16), Self::I16Triplet),
(u32, Self::U32),
((u32, u32), Self::U32Pair),
((u32, u32, u32), Self::U32Triplet),
(i32, Self::I32),
((i32, i32), Self::I32Pair),
((i32, i32, i32), Self::I32Triplet),
(f32, Self::F32),
((f32, f32), Self::F32Pair),
((f32, f32, f32), Self::F32Triplet),
(u64, Self::U64),
(f64, Self::F64),
);
#[cfg(feature = "alloc")]
mod alloc_mod {
use super::*;
/// Generic enumeration for additional parameters, including parameters which rely on heap
/// allocations.
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Debug, Clone)]
pub enum Params {
Heapless(ParamsHeapless),
Store(StoreAddr),
Vec(Vec<u8>),
String(String),
}
impl From<StoreAddr> for Params {
fn from(x: StoreAddr) -> Self {
Self::Store(x)
}
}
impl From<ParamsHeapless> for Params {
fn from(x: ParamsHeapless) -> Self {
Self::Heapless(x)
}
}
impl From<Vec<u8>> for Params {
fn from(val: Vec<u8>) -> Self {
Self::Vec(val)
}
}
/// Converts a byte slice into the [Params::Vec] variant
impl From<&[u8]> for Params {
fn from(val: &[u8]) -> Self {
Self::Vec(val.to_vec())
}
}
impl From<String> for Params {
fn from(val: String) -> Self {
Self::String(val)
}
}
/// Converts a string slice into the [Params::String] variant
impl From<&str> for Params {
fn from(val: &str) -> Self {
Self::String(val.to_string())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_u32_pair() {
let u32_pair = U32Pair(4, 8);
assert_eq!(u32_pair.0, 4);
assert_eq!(u32_pair.1, 8);
let raw = u32_pair.to_be_bytes();
let mut u32_conv_back = u32::from_be_bytes(raw[0..4].try_into().unwrap());
assert_eq!(u32_conv_back, 4);
u32_conv_back = u32::from_be_bytes(raw[4..8].try_into().unwrap());
assert_eq!(u32_conv_back, 8);
}
#[test]
fn basic_signed_test_pair() {
let i8_pair = I8Pair(-3, -16);
assert_eq!(i8_pair.0, -3);
assert_eq!(i8_pair.1, -16);
let raw = i8_pair.to_be_bytes();
let mut i8_conv_back = i8::from_be_bytes(raw[0..1].try_into().unwrap());
assert_eq!(i8_conv_back, -3);
i8_conv_back = i8::from_be_bytes(raw[1..2].try_into().unwrap());
assert_eq!(i8_conv_back, -16);
}
#[test]
fn basic_signed_test_triplet() {
let i8_triplet = I8Triplet(-3, -16, -126);
assert_eq!(i8_triplet.0, -3);
assert_eq!(i8_triplet.1, -16);
assert_eq!(i8_triplet.2, -126);
let raw = i8_triplet.to_be_bytes();
let mut i8_conv_back = i8::from_be_bytes(raw[0..1].try_into().unwrap());
assert_eq!(i8_conv_back, -3);
i8_conv_back = i8::from_be_bytes(raw[1..2].try_into().unwrap());
assert_eq!(i8_conv_back, -16);
i8_conv_back = i8::from_be_bytes(raw[2..3].try_into().unwrap());
assert_eq!(i8_conv_back, -126);
}
#[test]
fn conversion_test_string() {
let param: Params = "Test String".into();
if let Params::String(str) = param {
assert_eq!(str, String::from("Test String"));
} else {
panic!("Params type is not String")
}
}
#[test]
fn conversion_from_slice() {
let test_slice: [u8; 5] = [0; 5];
let vec_param: Params = test_slice.as_slice().into();
if let Params::Vec(vec) = vec_param {
assert_eq!(vec, test_slice.to_vec());
} else {
panic!("Params type is not a vector")
}
}
}

View File

@ -1,939 +0,0 @@
//! # Pool implementation providing memory pools for packet storage.
//!
//! # Example for the [StaticMemoryPool]
//!
//! ```
//! use satrs_core::pool::{PoolProvider, StaticMemoryPool, StaticPoolConfig};
//!
//! // 4 buckets of 4 bytes, 2 of 8 bytes and 1 of 16 bytes
//! let pool_cfg = StaticPoolConfig::new(vec![(4, 4), (2, 8), (1, 16)]);
//! let mut local_pool = StaticMemoryPool::new(pool_cfg);
//! let mut read_buf: [u8; 16] = [0; 16];
//! let mut addr;
//! {
//! // Add new data to the pool
//! let mut example_data = [0; 4];
//! example_data[0] = 42;
//! let res = local_pool.add(&example_data);
//! assert!(res.is_ok());
//! addr = res.unwrap();
//! }
//!
//! {
//! // Read the store data back
//! let res = local_pool.read(&addr, &mut read_buf);
//! assert!(res.is_ok());
//! let read_bytes = res.unwrap();
//! assert_eq!(read_bytes, 4);
//! assert_eq!(read_buf[0], 42);
//! // Modify the stored data
//! let res = local_pool.modify(&addr, |buf| {
//! buf[0] = 12;
//! });
//! assert!(res.is_ok());
//! }
//!
//! {
//! // Read the modified data back
//! let res = local_pool.read(&addr, &mut read_buf);
//! assert!(res.is_ok());
//! let read_bytes = res.unwrap();
//! assert_eq!(read_bytes, 4);
//! assert_eq!(read_buf[0], 12);
//! }
//!
//! // Delete the stored data
//! local_pool.delete(addr);
//!
//! // Get a free element in the pool with an appropriate size
//! {
//! let res = local_pool.free_element(12, |buf| {
//! buf[0] = 7;
//! });
//! assert!(res.is_ok());
//! addr = res.unwrap();
//! }
//!
//! // Read back the data
//! {
//! // Read the store data back
//! let res = local_pool.read(&addr, &mut read_buf);
//! assert!(res.is_ok());
//! let read_bytes = res.unwrap();
//! assert_eq!(read_bytes, 12);
//! assert_eq!(read_buf[0], 7);
//! }
//! ```
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub use alloc_mod::*;
use core::fmt::{Display, Formatter};
use delegate::delegate;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use spacepackets::ByteConversionError;
#[cfg(feature = "std")]
use std::error::Error;
type NumBlocks = u16;
pub type StoreAddr = u64;
/// Simple address type used for transactions with the local pool.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct StaticPoolAddr {
pub(crate) pool_idx: u16,
pub(crate) packet_idx: NumBlocks,
}
impl StaticPoolAddr {
pub const INVALID_ADDR: u32 = 0xFFFFFFFF;
pub fn raw(&self) -> u32 {
((self.pool_idx as u32) << 16) | self.packet_idx as u32
}
}
impl From<StaticPoolAddr> for StoreAddr {
fn from(value: StaticPoolAddr) -> Self {
((value.pool_idx as u64) << 16) | value.packet_idx as u64
}
}
impl From<StoreAddr> for StaticPoolAddr {
fn from(value: StoreAddr) -> Self {
Self {
pool_idx: ((value >> 16) & 0xff) as u16,
packet_idx: (value & 0xff) as u16,
}
}
}
impl Display for StaticPoolAddr {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
write!(
f,
"StoreAddr(pool index: {}, packet index: {})",
self.pool_idx, self.packet_idx
)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum StoreIdError {
InvalidSubpool(u16),
InvalidPacketIdx(u16),
}
impl Display for StoreIdError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
StoreIdError::InvalidSubpool(pool) => {
write!(f, "invalid subpool, index: {pool}")
}
StoreIdError::InvalidPacketIdx(packet_idx) => {
write!(f, "invalid packet index: {packet_idx}")
}
}
}
}
#[cfg(feature = "std")]
impl Error for StoreIdError {}
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum StoreError {
/// Requested data block is too large
DataTooLarge(usize),
/// The store is full. Contains the index of the full subpool
StoreFull(u16),
/// Store ID is invalid. This also includes partial errors where only the subpool is invalid
InvalidStoreId(StoreIdError, Option<StoreAddr>),
/// Valid subpool and packet index, but no data is stored at the given address
DataDoesNotExist(StoreAddr),
ByteConversionError(spacepackets::ByteConversionError),
LockError,
/// Internal or configuration errors
InternalError(u32),
}
impl Display for StoreError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
StoreError::DataTooLarge(size) => {
write!(f, "data to store with size {size} is too large")
}
StoreError::StoreFull(u16) => {
write!(f, "store is too full. index for full subpool: {u16}")
}
StoreError::InvalidStoreId(id_e, addr) => {
write!(f, "invalid store ID: {id_e}, address: {addr:?}")
}
StoreError::DataDoesNotExist(addr) => {
write!(f, "no data exists at address {addr:?}")
}
StoreError::InternalError(e) => {
write!(f, "internal error: {e}")
}
StoreError::ByteConversionError(e) => {
write!(f, "store error: {e}")
}
StoreError::LockError => {
write!(f, "lock error")
}
}
}
}
impl From<ByteConversionError> for StoreError {
fn from(value: ByteConversionError) -> Self {
Self::ByteConversionError(value)
}
}
#[cfg(feature = "std")]
impl Error for StoreError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
if let StoreError::InvalidStoreId(e, _) = self {
return Some(e);
}
None
}
}
/// Generic trait for pool providers where the data can be modified and read in-place. This
/// generally means that a shared pool structure has to be wrapped inside a lock structure.
pub trait PoolProvider {
/// Add new data to the pool. The provider should attempt to reserve a memory block with the
/// appropriate size and then copy the given data to the block. Yields a [StoreAddr] which can
/// be used to access the data stored in the pool
fn add(&mut self, data: &[u8]) -> Result<StoreAddr, StoreError>;
/// The provider should attempt to reserve a free memory block with the appropriate size first.
/// It then executes a user-provided closure and passes a mutable reference to that memory
/// block to the closure. This allows the user to write data to the memory block.
/// The function should yield a [StoreAddr] which can be used to access the data stored in the
/// pool.
fn free_element<W: FnMut(&mut [u8])>(
&mut self,
len: usize,
writer: W,
) -> Result<StoreAddr, StoreError>;
/// Modify data added previously using a given [StoreAddr]. The provider should use the store
/// address to determine if a memory block exists for that address. If it does, it should
/// call the user-provided closure and pass a mutable reference to the memory block
/// to the closure. This allows the user to modify the memory block.
fn modify<U: FnMut(&mut [u8])>(
&mut self,
addr: &StoreAddr,
updater: U,
) -> Result<(), StoreError>;
/// The provider should copy the data from the memory block to the user-provided buffer if
/// it exists.
fn read(&self, addr: &StoreAddr, buf: &mut [u8]) -> Result<usize, StoreError>;
/// Delete data inside the pool given a [StoreAddr].
fn delete(&mut self, addr: StoreAddr) -> Result<(), StoreError>;
fn has_element_at(&self, addr: &StoreAddr) -> Result<bool, StoreError>;
/// Retrieve the length of the data at the given store address.
fn len_of_data(&self, addr: &StoreAddr) -> Result<usize, StoreError>;
#[cfg(feature = "alloc")]
fn read_as_vec(&self, addr: &StoreAddr) -> Result<alloc::vec::Vec<u8>, StoreError> {
let mut vec = alloc::vec![0; self.len_of_data(addr)?];
self.read(addr, &mut vec)?;
Ok(vec)
}
}
pub trait PoolProviderWithGuards: PoolProvider {
/// This function behaves like [PoolProvider::read], but consumes the provided address
/// and returns a RAII conformant guard object.
///
/// Unless the guard [PoolRwGuard::release] method is called, the data for the
/// given address will be deleted automatically when the guard is dropped.
/// This can prevent memory leaks. Users can read the data and release the guard
/// if the data in the store is valid for further processing. If the data is faulty, no
/// manual deletion is necessary when returning from a processing function prematurely.
fn read_with_guard(&mut self, addr: StoreAddr) -> PoolGuard<Self>;
/// This function behaves like [PoolProvider::modify], but consumes the provided
/// address and returns a RAII conformant guard object.
///
/// Unless the guard [PoolRwGuard::release] method is called, the data for the
/// given address will be deleted automatically when the guard is dropped.
/// This can prevent memory leaks. Users can read (and modify) the data and release the guard
/// if the data in the store is valid for further processing. If the data is faulty, no
/// manual deletion is necessary when returning from a processing function prematurely.
fn modify_with_guard(&mut self, addr: StoreAddr) -> PoolRwGuard<Self>;
}
pub struct PoolGuard<'a, MemProvider: PoolProvider + ?Sized> {
pool: &'a mut MemProvider,
pub addr: StoreAddr,
no_deletion: bool,
deletion_failed_error: Option<StoreError>,
}
/// This helper object
impl<'a, MemProvider: PoolProvider> PoolGuard<'a, MemProvider> {
pub fn new(pool: &'a mut MemProvider, addr: StoreAddr) -> Self {
Self {
pool,
addr,
no_deletion: false,
deletion_failed_error: None,
}
}
pub fn read(&self, buf: &mut [u8]) -> Result<usize, StoreError> {
self.pool.read(&self.addr, buf)
}
#[cfg(feature = "alloc")]
pub fn read_as_vec(&self) -> Result<alloc::vec::Vec<u8>, StoreError> {
self.pool.read_as_vec(&self.addr)
}
/// Releasing the pool guard will disable the automatic deletion of the data when the guard
/// is dropped.
pub fn release(&mut self) {
self.no_deletion = true;
}
}
impl<MemProvider: PoolProvider + ?Sized> Drop for PoolGuard<'_, MemProvider> {
fn drop(&mut self) {
if !self.no_deletion {
if let Err(e) = self.pool.delete(self.addr) {
self.deletion_failed_error = Some(e);
}
}
}
}
pub struct PoolRwGuard<'a, MemProvider: PoolProvider + ?Sized> {
guard: PoolGuard<'a, MemProvider>,
}
impl<'a, MemProvider: PoolProvider> PoolRwGuard<'a, MemProvider> {
pub fn new(pool: &'a mut MemProvider, addr: StoreAddr) -> Self {
Self {
guard: PoolGuard::new(pool, addr),
}
}
pub fn update<U: FnMut(&mut [u8])>(&mut self, updater: &mut U) -> Result<(), StoreError> {
self.guard.pool.modify(&self.guard.addr, updater)
}
delegate!(
to self.guard {
pub fn read(&self, buf: &mut [u8]) -> Result<usize, StoreError>;
/// Releasing the pool guard will disable the automatic deletion of the data when the guard
/// is dropped.
pub fn release(&mut self);
}
);
}
#[cfg(feature = "alloc")]
mod alloc_mod {
use super::{PoolGuard, PoolProvider, PoolProviderWithGuards, PoolRwGuard, StaticPoolAddr};
use crate::pool::{NumBlocks, StoreAddr, StoreError, StoreIdError};
use alloc::vec;
use alloc::vec::Vec;
use spacepackets::ByteConversionError;
#[cfg(feature = "std")]
use std::sync::{Arc, RwLock};
#[cfg(feature = "std")]
pub type SharedStaticMemoryPool = Arc<RwLock<StaticMemoryPool>>;
type PoolSize = usize;
const STORE_FREE: PoolSize = PoolSize::MAX;
pub const POOL_MAX_SIZE: PoolSize = STORE_FREE - 1;
/// Configuration structure of the [static memory pool][StaticMemoryPool]
///
/// # Parameters
///
/// * `cfg`: Vector of tuples which represent a subpool. The first entry in the tuple specifies the
/// number of memory blocks in the subpool, the second entry the size of the blocks
#[derive(Clone)]
pub struct StaticPoolConfig {
cfg: Vec<(NumBlocks, usize)>,
}
impl StaticPoolConfig {
pub fn new(cfg: Vec<(NumBlocks, usize)>) -> Self {
StaticPoolConfig { cfg }
}
pub fn cfg(&self) -> &Vec<(NumBlocks, usize)> {
&self.cfg
}
pub fn sanitize(&mut self) -> usize {
self.cfg
.retain(|&(bucket_num, size)| bucket_num > 0 && size < POOL_MAX_SIZE);
self.cfg
.sort_unstable_by(|(_, sz0), (_, sz1)| sz0.partial_cmp(sz1).unwrap());
self.cfg.len()
}
}
/// Pool implementation providing sub-pools with fixed size memory blocks.
///
/// This is a simple memory pool implementation which pre-allocates all sub-pools using a given pool
/// configuration. After the pre-allocation, no dynamic memory allocation will be performed
/// during run-time. This makes the implementation suitable for real-time applications and
/// embedded environments. The pool implementation will also track the size of the data stored
/// inside it.
///
/// Transactions with the [pool][StaticMemoryPool] are done using a generic
/// [address][StoreAddr] type.
/// Adding any data to the pool will yield a store address. Modification and read operations are
/// done using a reference to a store address. Deletion will consume the store address.
pub struct StaticMemoryPool {
pool_cfg: StaticPoolConfig,
pool: Vec<Vec<u8>>,
sizes_lists: Vec<Vec<PoolSize>>,
}
impl StaticMemoryPool {
/// Create a new local pool from the [given configuration][StaticPoolConfig]. This function
/// will sanitize the given configuration as well.
pub fn new(mut cfg: StaticPoolConfig) -> StaticMemoryPool {
let subpools_num = cfg.sanitize();
let mut local_pool = StaticMemoryPool {
pool_cfg: cfg,
pool: Vec::with_capacity(subpools_num),
sizes_lists: Vec::with_capacity(subpools_num),
};
for &(num_elems, elem_size) in local_pool.pool_cfg.cfg.iter() {
let next_pool_len = elem_size * num_elems as usize;
local_pool.pool.push(vec![0; next_pool_len]);
let next_sizes_list_len = num_elems as usize;
local_pool
.sizes_lists
.push(vec![STORE_FREE; next_sizes_list_len]);
}
local_pool
}
fn addr_check(&self, addr: &StaticPoolAddr) -> Result<usize, StoreError> {
self.validate_addr(addr)?;
let pool_idx = addr.pool_idx as usize;
let size_list = self.sizes_lists.get(pool_idx).unwrap();
let curr_size = size_list[addr.packet_idx as usize];
if curr_size == STORE_FREE {
return Err(StoreError::DataDoesNotExist(StoreAddr::from(*addr)));
}
Ok(curr_size)
}
fn validate_addr(&self, addr: &StaticPoolAddr) -> Result<(), StoreError> {
let pool_idx = addr.pool_idx as usize;
if pool_idx >= self.pool_cfg.cfg.len() {
return Err(StoreError::InvalidStoreId(
StoreIdError::InvalidSubpool(addr.pool_idx),
Some(StoreAddr::from(*addr)),
));
}
if addr.packet_idx >= self.pool_cfg.cfg[addr.pool_idx as usize].0 {
return Err(StoreError::InvalidStoreId(
StoreIdError::InvalidPacketIdx(addr.packet_idx),
Some(StoreAddr::from(*addr)),
));
}
Ok(())
}
fn reserve(&mut self, data_len: usize) -> Result<StaticPoolAddr, StoreError> {
let subpool_idx = self.find_subpool(data_len, 0)?;
let (slot, size_slot_ref) = self.find_empty(subpool_idx)?;
*size_slot_ref = data_len;
Ok(StaticPoolAddr {
pool_idx: subpool_idx,
packet_idx: slot,
})
}
fn find_subpool(&self, req_size: usize, start_at_subpool: u16) -> Result<u16, StoreError> {
for (i, &(_, elem_size)) in self.pool_cfg.cfg.iter().enumerate() {
if i < start_at_subpool as usize {
continue;
}
if elem_size >= req_size {
return Ok(i as u16);
}
}
Err(StoreError::DataTooLarge(req_size))
}
fn write(&mut self, addr: &StaticPoolAddr, data: &[u8]) -> Result<(), StoreError> {
let packet_pos = self.raw_pos(addr).ok_or(StoreError::InternalError(0))?;
let subpool = self
.pool
.get_mut(addr.pool_idx as usize)
.ok_or(StoreError::InternalError(1))?;
let pool_slice = &mut subpool[packet_pos..packet_pos + data.len()];
pool_slice.copy_from_slice(data);
Ok(())
}
fn find_empty(&mut self, subpool: u16) -> Result<(u16, &mut usize), StoreError> {
if let Some(size_list) = self.sizes_lists.get_mut(subpool as usize) {
for (i, elem_size) in size_list.iter_mut().enumerate() {
if *elem_size == STORE_FREE {
return Ok((i as u16, elem_size));
}
}
} else {
return Err(StoreError::InvalidStoreId(
StoreIdError::InvalidSubpool(subpool),
None,
));
}
Err(StoreError::StoreFull(subpool))
}
fn raw_pos(&self, addr: &StaticPoolAddr) -> Option<usize> {
let (_, size) = self.pool_cfg.cfg.get(addr.pool_idx as usize)?;
Some(addr.packet_idx as usize * size)
}
}
impl PoolProvider for StaticMemoryPool {
fn add(&mut self, data: &[u8]) -> Result<StoreAddr, StoreError> {
let data_len = data.len();
if data_len > POOL_MAX_SIZE {
return Err(StoreError::DataTooLarge(data_len));
}
let addr = self.reserve(data_len)?;
self.write(&addr, data)?;
Ok(addr.into())
}
fn free_element<W: FnMut(&mut [u8])>(
&mut self,
len: usize,
mut writer: W,
) -> Result<StoreAddr, StoreError> {
if len > POOL_MAX_SIZE {
return Err(StoreError::DataTooLarge(len));
}
let addr = self.reserve(len)?;
let raw_pos = self.raw_pos(&addr).unwrap();
let block =
&mut self.pool.get_mut(addr.pool_idx as usize).unwrap()[raw_pos..raw_pos + len];
writer(block);
Ok(addr.into())
}
fn modify<U: FnMut(&mut [u8])>(
&mut self,
addr: &StoreAddr,
mut updater: U,
) -> Result<(), StoreError> {
let addr = StaticPoolAddr::from(*addr);
let curr_size = self.addr_check(&addr)?;
let raw_pos = self.raw_pos(&addr).unwrap();
let block = &mut self.pool.get_mut(addr.pool_idx as usize).unwrap()
[raw_pos..raw_pos + curr_size];
updater(block);
Ok(())
}
fn read(&self, addr: &StoreAddr, buf: &mut [u8]) -> Result<usize, StoreError> {
let addr = StaticPoolAddr::from(*addr);
let curr_size = self.addr_check(&addr)?;
if buf.len() < curr_size {
return Err(ByteConversionError::ToSliceTooSmall {
found: buf.len(),
expected: curr_size,
}
.into());
}
let raw_pos = self.raw_pos(&addr).unwrap();
let block =
&self.pool.get(addr.pool_idx as usize).unwrap()[raw_pos..raw_pos + curr_size];
//block.copy_from_slice(&src);
buf[..curr_size].copy_from_slice(block);
Ok(curr_size)
}
fn delete(&mut self, addr: StoreAddr) -> Result<(), StoreError> {
let addr = StaticPoolAddr::from(addr);
self.addr_check(&addr)?;
let block_size = self.pool_cfg.cfg.get(addr.pool_idx as usize).unwrap().1;
let raw_pos = self.raw_pos(&addr).unwrap();
let block = &mut self.pool.get_mut(addr.pool_idx as usize).unwrap()
[raw_pos..raw_pos + block_size];
let size_list = self.sizes_lists.get_mut(addr.pool_idx as usize).unwrap();
size_list[addr.packet_idx as usize] = STORE_FREE;
block.fill(0);
Ok(())
}
fn has_element_at(&self, addr: &StoreAddr) -> Result<bool, StoreError> {
let addr = StaticPoolAddr::from(*addr);
self.validate_addr(&addr)?;
let pool_idx = addr.pool_idx as usize;
let size_list = self.sizes_lists.get(pool_idx).unwrap();
let curr_size = size_list[addr.packet_idx as usize];
if curr_size == STORE_FREE {
return Ok(false);
}
Ok(true)
}
fn len_of_data(&self, addr: &StoreAddr) -> Result<usize, StoreError> {
let addr = StaticPoolAddr::from(*addr);
self.validate_addr(&addr)?;
let pool_idx = addr.pool_idx as usize;
let size_list = self.sizes_lists.get(pool_idx).unwrap();
let size = size_list[addr.packet_idx as usize];
Ok(match size {
STORE_FREE => 0,
_ => size,
})
}
}
impl PoolProviderWithGuards for StaticMemoryPool {
fn modify_with_guard(&mut self, addr: StoreAddr) -> PoolRwGuard<Self> {
PoolRwGuard::new(self, addr)
}
fn read_with_guard(&mut self, addr: StoreAddr) -> PoolGuard<Self> {
PoolGuard::new(self, addr)
}
}
}
#[cfg(test)]
mod tests {
use crate::pool::{
PoolGuard, PoolProvider, PoolProviderWithGuards, PoolRwGuard, StaticMemoryPool,
StaticPoolAddr, StaticPoolConfig, StoreError, StoreIdError, POOL_MAX_SIZE,
};
use std::vec;
fn basic_small_pool() -> StaticMemoryPool {
// 4 buckets of 4 bytes, 2 of 8 bytes and 1 of 16 bytes
let pool_cfg = StaticPoolConfig::new(vec![(4, 4), (2, 8), (1, 16)]);
StaticMemoryPool::new(pool_cfg)
}
#[test]
fn test_cfg() {
// Values where number of buckets is 0 or size is too large should be removed
let mut pool_cfg = StaticPoolConfig::new(vec![(0, 0), (1, 0), (2, POOL_MAX_SIZE)]);
pool_cfg.sanitize();
assert_eq!(*pool_cfg.cfg(), vec![(1, 0)]);
// Entries should be ordered according to bucket size
pool_cfg = StaticPoolConfig::new(vec![(16, 6), (32, 3), (8, 12)]);
pool_cfg.sanitize();
assert_eq!(*pool_cfg.cfg(), vec![(32, 3), (16, 6), (8, 12)]);
// Unstable sort is used, so order of entries with same block length should not matter
pool_cfg = StaticPoolConfig::new(vec![(12, 12), (14, 16), (10, 12)]);
pool_cfg.sanitize();
assert!(
*pool_cfg.cfg() == vec![(12, 12), (10, 12), (14, 16)]
|| *pool_cfg.cfg() == vec![(10, 12), (12, 12), (14, 16)]
);
}
#[test]
fn test_add_and_read() {
let mut local_pool = basic_small_pool();
let mut test_buf: [u8; 16] = [0; 16];
for (i, val) in test_buf.iter_mut().enumerate() {
*val = i as u8;
}
let mut other_buf: [u8; 16] = [0; 16];
let addr = local_pool.add(&test_buf).expect("Adding data failed");
// Read back data and verify correctness
let res = local_pool.read(&addr, &mut other_buf);
assert!(res.is_ok());
let read_len = res.unwrap();
assert_eq!(read_len, 16);
for (i, &val) in other_buf.iter().enumerate() {
assert_eq!(val, i as u8);
}
}
#[test]
fn test_add_smaller_than_full_slot() {
let mut local_pool = basic_small_pool();
let test_buf: [u8; 12] = [0; 12];
let addr = local_pool.add(&test_buf).expect("Adding data failed");
let res = local_pool
.read(&addr, &mut [0; 12])
.expect("Read back failed");
assert_eq!(res, 12);
}
#[test]
fn test_delete() {
let mut local_pool = basic_small_pool();
let test_buf: [u8; 16] = [0; 16];
let addr = local_pool.add(&test_buf).expect("Adding data failed");
// Delete the data
let res = local_pool.delete(addr);
assert!(res.is_ok());
let mut writer = |buf: &mut [u8]| {
assert_eq!(buf.len(), 12);
};
// Verify that the slot is free by trying to get a reference to it
let res = local_pool.free_element(12, &mut writer);
assert!(res.is_ok());
let addr = res.unwrap();
assert_eq!(
addr,
u64::from(StaticPoolAddr {
pool_idx: 2,
packet_idx: 0
})
);
}
#[test]
fn test_modify() {
let mut local_pool = basic_small_pool();
let mut test_buf: [u8; 16] = [0; 16];
for (i, val) in test_buf.iter_mut().enumerate() {
*val = i as u8;
}
let addr = local_pool.add(&test_buf).expect("Adding data failed");
{
// Verify that the slot is free by trying to get a reference to it
local_pool
.modify(&addr, &mut |buf: &mut [u8]| {
buf[0] = 0;
buf[1] = 0x42;
})
.expect("Modifying data failed");
}
local_pool
.read(&addr, &mut test_buf)
.expect("Reading back data failed");
assert_eq!(test_buf[0], 0);
assert_eq!(test_buf[1], 0x42);
assert_eq!(test_buf[2], 2);
assert_eq!(test_buf[3], 3);
}
#[test]
fn test_consecutive_reservation() {
let mut local_pool = basic_small_pool();
// Reserve two smaller blocks consecutively and verify that the third reservation fails
let res = local_pool.free_element(8, |_| {});
assert!(res.is_ok());
let addr0 = res.unwrap();
let res = local_pool.free_element(8, |_| {});
assert!(res.is_ok());
let addr1 = res.unwrap();
let res = local_pool.free_element(8, |_| {});
assert!(res.is_err());
let err = res.unwrap_err();
assert_eq!(err, StoreError::StoreFull(1));
// Verify that the two deletions are successful
assert!(local_pool.delete(addr0).is_ok());
assert!(local_pool.delete(addr1).is_ok());
}
#[test]
fn test_read_does_not_exist() {
let local_pool = basic_small_pool();
// Try to access data which does not exist
let res = local_pool.read(
&StaticPoolAddr {
packet_idx: 0,
pool_idx: 0,
}
.into(),
&mut [],
);
assert!(res.is_err());
assert!(matches!(
res.unwrap_err(),
StoreError::DataDoesNotExist { .. }
));
}
#[test]
fn test_store_full() {
let mut local_pool = basic_small_pool();
let test_buf: [u8; 16] = [0; 16];
assert!(local_pool.add(&test_buf).is_ok());
// The subpool is now full and the call should fail accordingly
let res = local_pool.add(&test_buf);
assert!(res.is_err());
let err = res.unwrap_err();
assert!(matches!(err, StoreError::StoreFull { .. }));
if let StoreError::StoreFull(subpool) = err {
assert_eq!(subpool, 2);
}
}
#[test]
fn test_invalid_pool_idx() {
let local_pool = basic_small_pool();
let addr = StaticPoolAddr {
pool_idx: 3,
packet_idx: 0,
}
.into();
let res = local_pool.read(&addr, &mut []);
assert!(res.is_err());
let err = res.unwrap_err();
assert!(matches!(
err,
StoreError::InvalidStoreId(StoreIdError::InvalidSubpool(3), Some(_))
));
}
#[test]
fn test_invalid_packet_idx() {
let local_pool = basic_small_pool();
let addr = StaticPoolAddr {
pool_idx: 2,
packet_idx: 1,
};
assert_eq!(addr.raw(), 0x00020001);
let res = local_pool.read(&addr.into(), &mut []);
assert!(res.is_err());
let err = res.unwrap_err();
assert!(matches!(
err,
StoreError::InvalidStoreId(StoreIdError::InvalidPacketIdx(1), Some(_))
));
}
#[test]
fn test_add_too_large() {
let mut local_pool = basic_small_pool();
let data_too_large = [0; 20];
let res = local_pool.add(&data_too_large);
assert!(res.is_err());
let err = res.unwrap_err();
assert_eq!(err, StoreError::DataTooLarge(20));
}
#[test]
fn test_data_too_large_1() {
let mut local_pool = basic_small_pool();
let res = local_pool.free_element(POOL_MAX_SIZE + 1, |_| {});
assert!(res.is_err());
assert_eq!(
res.unwrap_err(),
StoreError::DataTooLarge(POOL_MAX_SIZE + 1)
);
}
#[test]
fn test_free_element_too_large() {
let mut local_pool = basic_small_pool();
// Try to request a slot which is too large
let res = local_pool.free_element(20, |_| {});
assert!(res.is_err());
assert_eq!(res.unwrap_err(), StoreError::DataTooLarge(20));
}
#[test]
fn test_pool_guard_deletion_man_creation() {
let mut local_pool = basic_small_pool();
let test_buf: [u8; 16] = [0; 16];
let addr = local_pool.add(&test_buf).expect("Adding data failed");
let read_guard = PoolGuard::new(&mut local_pool, addr);
drop(read_guard);
assert!(!local_pool.has_element_at(&addr).expect("Invalid address"));
}
#[test]
fn test_pool_guard_deletion() {
let mut local_pool = basic_small_pool();
let test_buf: [u8; 16] = [0; 16];
let addr = local_pool.add(&test_buf).expect("Adding data failed");
let read_guard = local_pool.read_with_guard(addr);
drop(read_guard);
assert!(!local_pool.has_element_at(&addr).expect("Invalid address"));
}
#[test]
fn test_pool_guard_with_release() {
let mut local_pool = basic_small_pool();
let test_buf: [u8; 16] = [0; 16];
let addr = local_pool.add(&test_buf).expect("Adding data failed");
let mut read_guard = PoolGuard::new(&mut local_pool, addr);
read_guard.release();
drop(read_guard);
assert!(local_pool.has_element_at(&addr).expect("Invalid address"));
}
#[test]
fn test_pool_modify_guard_man_creation() {
let mut local_pool = basic_small_pool();
let test_buf: [u8; 16] = [0; 16];
let addr = local_pool.add(&test_buf).expect("Adding data failed");
let mut rw_guard = PoolRwGuard::new(&mut local_pool, addr);
rw_guard.update(&mut |_| {}).expect("modify failed");
drop(rw_guard);
assert!(!local_pool.has_element_at(&addr).expect("Invalid address"));
}
#[test]
fn test_pool_modify_guard() {
let mut local_pool = basic_small_pool();
let test_buf: [u8; 16] = [0; 16];
let addr = local_pool.add(&test_buf).expect("Adding data failed");
let mut rw_guard = local_pool.modify_with_guard(addr);
rw_guard.update(&mut |_| {}).expect("modify failed");
drop(rw_guard);
assert!(!local_pool.has_element_at(&addr).expect("Invalid address"));
}
#[test]
fn modify_pool_index_above_0() {
let mut local_pool = basic_small_pool();
let test_buf_0: [u8; 4] = [1; 4];
let test_buf_1: [u8; 4] = [2; 4];
let test_buf_2: [u8; 4] = [3; 4];
let test_buf_3: [u8; 4] = [4; 4];
let addr0 = local_pool.add(&test_buf_0).expect("Adding data failed");
let addr1 = local_pool.add(&test_buf_1).expect("Adding data failed");
let addr2 = local_pool.add(&test_buf_2).expect("Adding data failed");
let addr3 = local_pool.add(&test_buf_3).expect("Adding data failed");
local_pool
.modify(&addr0, |buf| {
assert_eq!(buf, test_buf_0);
})
.expect("Modifying data failed");
local_pool
.modify(&addr1, |buf| {
assert_eq!(buf, test_buf_1);
})
.expect("Modifying data failed");
local_pool
.modify(&addr2, |buf| {
assert_eq!(buf, test_buf_2);
})
.expect("Modifying data failed");
local_pool
.modify(&addr3, |buf| {
assert_eq!(buf, test_buf_3);
})
.expect("Modifying data failed");
}
}

View File

@ -1,102 +0,0 @@
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
/// Generic trait for a device capable of switching itself on or off.
pub trait PowerSwitch {
type Error;
fn switch_on(&mut self) -> Result<(), Self::Error>;
fn switch_off(&mut self) -> Result<(), Self::Error>;
fn is_switch_on(&self) -> bool {
self.switch_state() == SwitchState::On
}
fn switch_state(&self) -> SwitchState;
}
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum SwitchState {
Off = 0,
On = 1,
Unknown = 2,
Faulty = 3,
}
pub type SwitchId = u16;
/// Generic trait for a device capable of turning on and off switches.
pub trait PowerSwitcherCommandSender {
type Error;
fn send_switch_on_cmd(&mut self, switch_id: SwitchId) -> Result<(), Self::Error>;
fn send_switch_off_cmd(&mut self, switch_id: SwitchId) -> Result<(), Self::Error>;
}
pub trait PowerSwitchInfo {
type Error;
/// Retrieve the switch state
fn get_switch_state(&mut self, switch_id: SwitchId) -> Result<SwitchState, Self::Error>;
fn get_is_switch_on(&mut self, switch_id: SwitchId) -> Result<bool, Self::Error> {
Ok(self.get_switch_state(switch_id)? == SwitchState::On)
}
/// The maximum delay it will take to change a switch.
///
/// This may take into account the time to send a command, wait for it to be executed, and
/// see the switch changed.
fn switch_delay_ms(&self) -> u32;
}
#[cfg(test)]
mod tests {
#![allow(dead_code)]
use super::*;
use std::boxed::Box;
struct Pcdu {
switch_rx: std::sync::mpsc::Receiver<(SwitchId, u16)>,
}
#[derive(Eq, PartialEq)]
enum DeviceState {
OFF,
SwitchingPower,
ON,
SETUP,
IDLE,
}
struct MyComplexDevice {
power_switcher: Box<dyn PowerSwitcherCommandSender<Error = ()>>,
power_info: Box<dyn PowerSwitchInfo<Error = ()>>,
switch_id: SwitchId,
some_state: u16,
dev_state: DeviceState,
mode: u32,
submode: u16,
}
impl MyComplexDevice {
pub fn periodic_op(&mut self) {
// .. mode command coming in
let mode = 1;
if mode == 1 {
if self.dev_state == DeviceState::OFF {
self.power_switcher
.send_switch_on_cmd(self.switch_id)
.expect("sending siwthc cmd failed");
self.dev_state = DeviceState::SwitchingPower;
}
if self.dev_state == DeviceState::SwitchingPower {
if self.power_info.get_is_switch_on(0).unwrap() {
self.dev_state = DeviceState::ON;
self.mode = 1;
}
}
}
}
}
}

View File

@ -1,449 +0,0 @@
use crate::pus::{source_buffer_large_enough, EcssTmtcError};
use spacepackets::ecss::tm::PusTmCreator;
use spacepackets::ecss::tm::PusTmSecondaryHeader;
use spacepackets::ecss::{EcssEnumeration, PusError};
use spacepackets::{SpHeader, MAX_APID};
use crate::pus::EcssTmSenderCore;
#[cfg(feature = "alloc")]
pub use alloc_mod::EventReporter;
pub use spacepackets::ecss::event::*;
pub struct EventReporterBase {
msg_count: u16,
apid: u16,
pub dest_id: u16,
}
impl EventReporterBase {
pub fn new(apid: u16) -> Option<Self> {
if apid > MAX_APID {
return None;
}
Some(Self {
msg_count: 0,
dest_id: 0,
apid,
})
}
pub fn event_info(
&mut self,
buf: &mut [u8],
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
self.generate_and_send_generic_tm(
buf,
Subservice::TmInfoReport,
sender,
time_stamp,
event_id,
aux_data,
)
}
pub fn event_low_severity(
&mut self,
buf: &mut [u8],
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
self.generate_and_send_generic_tm(
buf,
Subservice::TmLowSeverityReport,
sender,
time_stamp,
event_id,
aux_data,
)
}
pub fn event_medium_severity(
&mut self,
buf: &mut [u8],
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
self.generate_and_send_generic_tm(
buf,
Subservice::TmMediumSeverityReport,
sender,
time_stamp,
event_id,
aux_data,
)
}
pub fn event_high_severity(
&mut self,
buf: &mut [u8],
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
self.generate_and_send_generic_tm(
buf,
Subservice::TmHighSeverityReport,
sender,
time_stamp,
event_id,
aux_data,
)
}
fn generate_and_send_generic_tm(
&mut self,
buf: &mut [u8],
subservice: Subservice,
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
let tm = self.generate_generic_event_tm(buf, subservice, time_stamp, event_id, aux_data)?;
sender.send_tm(tm.into())?;
self.msg_count += 1;
Ok(())
}
fn generate_generic_event_tm<'a>(
&'a self,
buf: &'a mut [u8],
subservice: Subservice,
time_stamp: &'a [u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<PusTmCreator, EcssTmtcError> {
let mut src_data_len = event_id.size();
if let Some(aux_data) = aux_data {
src_data_len += aux_data.len();
}
source_buffer_large_enough(buf.len(), src_data_len)?;
let mut sp_header = SpHeader::tm_unseg(self.apid, 0, 0).unwrap();
let sec_header = PusTmSecondaryHeader::new(
5,
subservice.into(),
self.msg_count,
self.dest_id,
Some(time_stamp),
);
let mut current_idx = 0;
event_id
.write_to_be_bytes(&mut buf[0..event_id.size()])
.map_err(PusError::ByteConversion)?;
current_idx += event_id.size();
if let Some(aux_data) = aux_data {
buf[current_idx..current_idx + aux_data.len()].copy_from_slice(aux_data);
current_idx += aux_data.len();
}
Ok(PusTmCreator::new(
&mut sp_header,
sec_header,
&buf[0..current_idx],
true,
))
}
}
#[cfg(feature = "alloc")]
mod alloc_mod {
use super::*;
use alloc::vec;
use alloc::vec::Vec;
pub struct EventReporter {
source_data_buf: Vec<u8>,
pub reporter: EventReporterBase,
}
impl EventReporter {
pub fn new(apid: u16, max_event_id_and_aux_data_size: usize) -> Option<Self> {
let reporter = EventReporterBase::new(apid)?;
Some(Self {
source_data_buf: vec![0; max_event_id_and_aux_data_size],
reporter,
})
}
pub fn event_info(
&mut self,
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
self.reporter.event_info(
self.source_data_buf.as_mut_slice(),
sender,
time_stamp,
event_id,
aux_data,
)
}
pub fn event_low_severity(
&mut self,
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
self.reporter.event_low_severity(
self.source_data_buf.as_mut_slice(),
sender,
time_stamp,
event_id,
aux_data,
)
}
pub fn event_medium_severity(
&mut self,
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
self.reporter.event_medium_severity(
self.source_data_buf.as_mut_slice(),
sender,
time_stamp,
event_id,
aux_data,
)
}
pub fn event_high_severity(
&mut self,
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event_id: impl EcssEnumeration,
aux_data: Option<&[u8]>,
) -> Result<(), EcssTmtcError> {
self.reporter.event_high_severity(
self.source_data_buf.as_mut_slice(),
sender,
time_stamp,
event_id,
aux_data,
)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::events::{EventU32, Severity};
use crate::pus::tests::CommonTmInfo;
use crate::pus::{EcssChannel, PusTmWrapper};
use crate::ChannelId;
use spacepackets::ByteConversionError;
use std::cell::RefCell;
use std::collections::VecDeque;
use std::vec::Vec;
const EXAMPLE_APID: u16 = 0xee;
const EXAMPLE_GROUP_ID: u16 = 2;
const EXAMPLE_EVENT_ID_0: u16 = 1;
#[allow(dead_code)]
const EXAMPLE_EVENT_ID_1: u16 = 2;
#[derive(Debug, Eq, PartialEq, Clone)]
struct TmInfo {
pub common: CommonTmInfo,
pub event: EventU32,
pub aux_data: Vec<u8>,
}
#[derive(Default, Clone)]
struct TestSender {
pub service_queue: RefCell<VecDeque<TmInfo>>,
}
impl EcssChannel for TestSender {
fn id(&self) -> ChannelId {
0
}
}
impl EcssTmSenderCore for TestSender {
fn send_tm(&self, tm: PusTmWrapper) -> Result<(), EcssTmtcError> {
match tm {
PusTmWrapper::InStore(_) => {
panic!("TestSender: unexpected call with address");
}
PusTmWrapper::Direct(tm) => {
assert!(!tm.source_data().is_empty());
let src_data = tm.source_data();
assert!(src_data.len() >= 4);
let event =
EventU32::from(u32::from_be_bytes(src_data[0..4].try_into().unwrap()));
let mut aux_data = Vec::new();
if src_data.len() > 4 {
aux_data.extend_from_slice(&src_data[4..]);
}
self.service_queue.borrow_mut().push_back(TmInfo {
common: CommonTmInfo::new_from_tm(&tm),
event,
aux_data,
});
Ok(())
}
}
}
}
fn severity_to_subservice(severity: Severity) -> Subservice {
match severity {
Severity::INFO => Subservice::TmInfoReport,
Severity::LOW => Subservice::TmLowSeverityReport,
Severity::MEDIUM => Subservice::TmMediumSeverityReport,
Severity::HIGH => Subservice::TmHighSeverityReport,
}
}
fn report_basic_event(
reporter: &mut EventReporter,
sender: &mut TestSender,
time_stamp: &[u8],
event: EventU32,
severity: Severity,
aux_data: Option<&[u8]>,
) {
match severity {
Severity::INFO => {
reporter
.event_info(sender, time_stamp, event, aux_data)
.expect("Error reporting info event");
}
Severity::LOW => {
reporter
.event_low_severity(sender, time_stamp, event, aux_data)
.expect("Error reporting low event");
}
Severity::MEDIUM => {
reporter
.event_medium_severity(sender, time_stamp, event, aux_data)
.expect("Error reporting medium event");
}
Severity::HIGH => {
reporter
.event_high_severity(sender, time_stamp, event, aux_data)
.expect("Error reporting high event");
}
}
}
fn basic_event_test(
max_event_aux_data_buf: usize,
severity: Severity,
error_data: Option<&[u8]>,
) {
let mut sender = TestSender::default();
let reporter = EventReporter::new(EXAMPLE_APID, max_event_aux_data_buf);
assert!(reporter.is_some());
let mut reporter = reporter.unwrap();
let time_stamp_empty: [u8; 7] = [0; 7];
let mut error_copy = Vec::new();
if let Some(err_data) = error_data {
error_copy.extend_from_slice(err_data);
}
let event = EventU32::new(severity, EXAMPLE_GROUP_ID, EXAMPLE_EVENT_ID_0)
.expect("Error creating example event");
report_basic_event(
&mut reporter,
&mut sender,
&time_stamp_empty,
event,
severity,
error_data,
);
let mut service_queue = sender.service_queue.borrow_mut();
assert_eq!(service_queue.len(), 1);
let tm_info = service_queue.pop_front().unwrap();
assert_eq!(
tm_info.common.subservice,
severity_to_subservice(severity) as u8
);
assert_eq!(tm_info.common.dest_id, 0);
assert_eq!(tm_info.common.time_stamp, time_stamp_empty);
assert_eq!(tm_info.common.msg_counter, 0);
assert_eq!(tm_info.common.apid, EXAMPLE_APID);
assert_eq!(tm_info.event, event);
assert_eq!(tm_info.aux_data, error_copy);
}
#[test]
fn basic_info_event_generation() {
basic_event_test(4, Severity::INFO, None);
}
#[test]
fn basic_low_severity_event() {
basic_event_test(4, Severity::LOW, None);
}
#[test]
fn basic_medium_severity_event() {
basic_event_test(4, Severity::MEDIUM, None);
}
#[test]
fn basic_high_severity_event() {
basic_event_test(4, Severity::HIGH, None);
}
#[test]
fn event_with_info_string() {
let info_string = "Test Information";
basic_event_test(32, Severity::INFO, Some(info_string.as_bytes()));
}
#[test]
fn low_severity_with_raw_err_data() {
let raw_err_param: i32 = -1;
let raw_err = raw_err_param.to_be_bytes();
basic_event_test(8, Severity::LOW, Some(&raw_err))
}
fn check_buf_too_small(
reporter: &mut EventReporter,
sender: &mut TestSender,
expected_found_len: usize,
) {
let time_stamp_empty: [u8; 7] = [0; 7];
let event = EventU32::new(Severity::INFO, EXAMPLE_GROUP_ID, EXAMPLE_EVENT_ID_0)
.expect("Error creating example event");
let err = reporter.event_info(sender, &time_stamp_empty, event, None);
assert!(err.is_err());
let err = err.unwrap_err();
if let EcssTmtcError::Pus(PusError::ByteConversion(
ByteConversionError::ToSliceTooSmall { found, expected },
)) = err
{
assert_eq!(expected, 4);
assert_eq!(found, expected_found_len);
} else {
panic!("Unexpected error {:?}", err);
}
}
#[test]
fn insufficient_buffer() {
let mut sender = TestSender::default();
for i in 0..3 {
let reporter = EventReporter::new(EXAMPLE_APID, i);
assert!(reporter.is_some());
let mut reporter = reporter.unwrap();
check_buf_too_small(&mut reporter, &mut sender, i);
}
}
}

View File

@ -1,309 +0,0 @@
use crate::events::{EventU32, GenericEvent, Severity};
#[cfg(feature = "alloc")]
use crate::events::{EventU32TypedSev, HasSeverity};
#[cfg(feature = "alloc")]
use alloc::boxed::Box;
#[cfg(feature = "alloc")]
use core::hash::Hash;
#[cfg(feature = "alloc")]
use hashbrown::HashSet;
#[cfg(feature = "alloc")]
pub use crate::pus::event::EventReporter;
use crate::pus::verification::TcStateToken;
#[cfg(feature = "alloc")]
use crate::pus::EcssTmSenderCore;
use crate::pus::EcssTmtcError;
#[cfg(feature = "alloc")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub use alloc_mod::*;
#[cfg(feature = "heapless")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "heapless")))]
pub use heapless_mod::*;
/// This trait allows the PUS event manager implementation to stay generic over various types
/// of backend containers.
///
/// These backend containers keep track on whether a particular event is enabled or disabled for
/// reporting and also expose a simple API to enable or disable the event reporting.
///
/// For example, a straight forward implementation for host systems could use a
/// [hash set](https://docs.rs/hashbrown/latest/hashbrown/struct.HashSet.html)
/// structure to track disabled events. A more primitive and embedded friendly
/// solution could track this information in a static or pre-allocated list which contains
/// the disabled events.
pub trait PusEventMgmtBackendProvider<Provider: GenericEvent> {
type Error;
fn event_enabled(&self, event: &Provider) -> bool;
fn enable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error>;
fn disable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error>;
}
#[cfg(feature = "heapless")]
pub mod heapless_mod {
use super::*;
use crate::events::{GenericEvent, LargestEventRaw};
use std::marker::PhantomData;
#[cfg_attr(doc_cfg, doc(cfg(feature = "heapless")))]
// TODO: After a new version of heapless is released which uses hash32 version 0.3, try using
// regular Event type again.
#[derive(Default)]
pub struct HeaplessPusMgmtBackendProvider<const N: usize, Provider: GenericEvent> {
disabled: heapless::FnvIndexSet<LargestEventRaw, N>,
phantom: PhantomData<Provider>,
}
/// Safety: All contained field are [Send] as well
unsafe impl<const N: usize, Event: GenericEvent + Send> Send
for HeaplessPusMgmtBackendProvider<N, Event>
{
}
impl<const N: usize, Provider: GenericEvent> PusEventMgmtBackendProvider<Provider>
for HeaplessPusMgmtBackendProvider<N, Provider>
{
type Error = ();
fn event_enabled(&self, event: &Provider) -> bool {
self.disabled.contains(&event.raw_as_largest_type())
}
fn enable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error> {
self.disabled
.insert(event.raw_as_largest_type())
.map_err(|_| ())
}
fn disable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error> {
Ok(self.disabled.remove(&event.raw_as_largest_type()))
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum EventRequest<Event: GenericEvent = EventU32> {
Enable(Event),
Disable(Event),
}
#[derive(Debug)]
pub struct EventRequestWithToken<Event: GenericEvent = EventU32> {
pub request: EventRequest<Event>,
pub token: TcStateToken,
}
#[derive(Debug)]
pub enum EventManError {
EcssTmtcError(EcssTmtcError),
SeverityMissmatch(Severity, Severity),
}
impl From<EcssTmtcError> for EventManError {
fn from(v: EcssTmtcError) -> Self {
Self::EcssTmtcError(v)
}
}
#[cfg(feature = "alloc")]
pub mod alloc_mod {
use super::*;
/// Default backend provider which uses a hash set as the event reporting status container
/// like mentioned in the example of the [PusEventMgmtBackendProvider] documentation.
///
/// This provider is a good option for host systems or larger embedded systems where
/// the expected occasional memory allocation performed by the [HashSet] is not an issue.
pub struct DefaultPusMgmtBackendProvider<Event: GenericEvent = EventU32> {
disabled: HashSet<Event>,
}
/// Safety: All contained field are [Send] as well
unsafe impl<Event: GenericEvent + Send> Send for DefaultPusMgmtBackendProvider<Event> {}
impl<Event: GenericEvent> Default for DefaultPusMgmtBackendProvider<Event> {
fn default() -> Self {
Self {
disabled: HashSet::default(),
}
}
}
impl<Provider: GenericEvent + PartialEq + Eq + Hash + Copy + Clone>
PusEventMgmtBackendProvider<Provider> for DefaultPusMgmtBackendProvider<Provider>
{
type Error = ();
fn event_enabled(&self, event: &Provider) -> bool {
!self.disabled.contains(event)
}
fn enable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error> {
Ok(self.disabled.remove(event))
}
fn disable_event_reporting(&mut self, event: &Provider) -> Result<bool, Self::Error> {
Ok(self.disabled.insert(*event))
}
}
pub struct PusEventDispatcher<BackendError, Provider: GenericEvent> {
reporter: EventReporter,
backend: Box<dyn PusEventMgmtBackendProvider<Provider, Error = BackendError>>,
}
/// Safety: All contained fields are send as well.
unsafe impl<E: Send, Event: GenericEvent + Send> Send for PusEventDispatcher<E, Event> {}
impl<BackendError, Provider: GenericEvent> PusEventDispatcher<BackendError, Provider> {
pub fn new(
reporter: EventReporter,
backend: Box<dyn PusEventMgmtBackendProvider<Provider, Error = BackendError>>,
) -> Self {
Self { reporter, backend }
}
}
impl<BackendError, Event: GenericEvent> PusEventDispatcher<BackendError, Event> {
pub fn enable_tm_for_event(&mut self, event: &Event) -> Result<bool, BackendError> {
self.backend.enable_event_reporting(event)
}
pub fn disable_tm_for_event(&mut self, event: &Event) -> Result<bool, BackendError> {
self.backend.disable_event_reporting(event)
}
pub fn generate_pus_event_tm_generic(
&mut self,
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event: Event,
aux_data: Option<&[u8]>,
) -> Result<bool, EventManError> {
if !self.backend.event_enabled(&event) {
return Ok(false);
}
match event.severity() {
Severity::INFO => self
.reporter
.event_info(sender, time_stamp, event, aux_data)
.map(|_| true)
.map_err(|e| e.into()),
Severity::LOW => self
.reporter
.event_low_severity(sender, time_stamp, event, aux_data)
.map(|_| true)
.map_err(|e| e.into()),
Severity::MEDIUM => self
.reporter
.event_medium_severity(sender, time_stamp, event, aux_data)
.map(|_| true)
.map_err(|e| e.into()),
Severity::HIGH => self
.reporter
.event_high_severity(sender, time_stamp, event, aux_data)
.map(|_| true)
.map_err(|e| e.into()),
}
}
}
impl<BackendError> PusEventDispatcher<BackendError, EventU32> {
pub fn enable_tm_for_event_with_sev<Severity: HasSeverity>(
&mut self,
event: &EventU32TypedSev<Severity>,
) -> Result<bool, BackendError> {
self.backend.enable_event_reporting(event.as_ref())
}
pub fn disable_tm_for_event_with_sev<Severity: HasSeverity>(
&mut self,
event: &EventU32TypedSev<Severity>,
) -> Result<bool, BackendError> {
self.backend.disable_event_reporting(event.as_ref())
}
pub fn generate_pus_event_tm<Severity: HasSeverity>(
&mut self,
sender: &mut (impl EcssTmSenderCore + ?Sized),
time_stamp: &[u8],
event: EventU32TypedSev<Severity>,
aux_data: Option<&[u8]>,
) -> Result<bool, EventManError> {
self.generate_pus_event_tm_generic(sender, time_stamp, event.into(), aux_data)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::events::SeverityInfo;
use crate::pus::MpscTmAsVecSender;
use std::sync::mpsc::{channel, TryRecvError};
const INFO_EVENT: EventU32TypedSev<SeverityInfo> =
EventU32TypedSev::<SeverityInfo>::const_new(1, 0);
const LOW_SEV_EVENT: EventU32 = EventU32::const_new(Severity::LOW, 1, 5);
const EMPTY_STAMP: [u8; 7] = [0; 7];
fn create_basic_man() -> PusEventDispatcher<(), EventU32> {
let reporter = EventReporter::new(0x02, 128).expect("Creating event repoter failed");
let backend = DefaultPusMgmtBackendProvider::<EventU32>::default();
PusEventDispatcher::new(reporter, Box::new(backend))
}
#[test]
fn test_basic() {
let mut event_man = create_basic_man();
let (event_tx, event_rx) = channel();
let mut sender = MpscTmAsVecSender::new(0, "test_sender", event_tx);
let event_sent = event_man
.generate_pus_event_tm(&mut sender, &EMPTY_STAMP, INFO_EVENT, None)
.expect("Sending info event failed");
assert!(event_sent);
// Will not check packet here, correctness of packet was tested somewhere else
event_rx.try_recv().expect("Receiving event TM failed");
}
#[test]
fn test_disable_event() {
let mut event_man = create_basic_man();
let (event_tx, event_rx) = channel();
let mut sender = MpscTmAsVecSender::new(0, "test", event_tx);
let res = event_man.disable_tm_for_event(&LOW_SEV_EVENT);
assert!(res.is_ok());
assert!(res.unwrap());
let mut event_sent = event_man
.generate_pus_event_tm_generic(&mut sender, &EMPTY_STAMP, LOW_SEV_EVENT, None)
.expect("Sending low severity event failed");
assert!(!event_sent);
let res = event_rx.try_recv();
assert!(res.is_err());
assert!(matches!(res.unwrap_err(), TryRecvError::Empty));
// Check that only the low severity event was disabled
event_sent = event_man
.generate_pus_event_tm(&mut sender, &EMPTY_STAMP, INFO_EVENT, None)
.expect("Sending info event failed");
assert!(event_sent);
event_rx.try_recv().expect("No info event received");
}
#[test]
fn test_reenable_event() {
let mut event_man = create_basic_man();
let (event_tx, event_rx) = channel();
let mut sender = MpscTmAsVecSender::new(0, "test", event_tx);
let mut res = event_man.disable_tm_for_event_with_sev(&INFO_EVENT);
assert!(res.is_ok());
assert!(res.unwrap());
res = event_man.enable_tm_for_event_with_sev(&INFO_EVENT);
assert!(res.is_ok());
assert!(res.unwrap());
let event_sent = event_man
.generate_pus_event_tm(&mut sender, &EMPTY_STAMP, INFO_EVENT, None)
.expect("Sending info event failed");
assert!(event_sent);
event_rx.try_recv().expect("No info event received");
}
}

View File

@ -1,280 +0,0 @@
use crate::events::EventU32;
use crate::pus::event_man::{EventRequest, EventRequestWithToken};
use crate::pus::verification::TcStateToken;
use crate::pus::{PartialPusHandlingError, PusPacketHandlerResult, PusPacketHandlingError};
use spacepackets::ecss::event::Subservice;
use spacepackets::ecss::PusPacket;
use std::sync::mpsc::Sender;
use super::{EcssTcInMemConverter, PusServiceBase, PusServiceHelper};
pub struct PusService5EventHandler<TcInMemConverter: EcssTcInMemConverter> {
pub service_helper: PusServiceHelper<TcInMemConverter>,
event_request_tx: Sender<EventRequestWithToken>,
}
impl<TcInMemConverter: EcssTcInMemConverter> PusService5EventHandler<TcInMemConverter> {
pub fn new(
service_handler: PusServiceHelper<TcInMemConverter>,
event_request_tx: Sender<EventRequestWithToken>,
) -> Self {
Self {
service_helper: service_handler,
event_request_tx,
}
}
pub fn handle_one_tc(&mut self) -> Result<PusPacketHandlerResult, PusPacketHandlingError> {
let possible_packet = self.service_helper.retrieve_and_accept_next_packet()?;
if possible_packet.is_none() {
return Ok(PusPacketHandlerResult::Empty);
}
let ecss_tc_and_token = possible_packet.unwrap();
let tc = self
.service_helper
.tc_in_mem_converter
.convert_ecss_tc_in_memory_to_reader(&ecss_tc_and_token.tc_in_memory)?;
let subservice = tc.subservice();
let srv = Subservice::try_from(subservice);
if srv.is_err() {
return Ok(PusPacketHandlerResult::CustomSubservice(
tc.subservice(),
ecss_tc_and_token.token,
));
}
let handle_enable_disable_request = |enable: bool, stamp: [u8; 7]| {
if tc.user_data().len() < 4 {
return Err(PusPacketHandlingError::NotEnoughAppData(
"at least 4 bytes event ID expected".into(),
));
}
let user_data = tc.user_data();
let event_u32 = EventU32::from(u32::from_be_bytes(user_data[0..4].try_into().unwrap()));
let start_token = self
.service_helper
.common
.verification_handler
.borrow_mut()
.start_success(ecss_tc_and_token.token, Some(&stamp))
.map_err(|_| PartialPusHandlingError::Verification);
let partial_error = start_token.clone().err();
let mut token: TcStateToken = ecss_tc_and_token.token.into();
if let Ok(start_token) = start_token {
token = start_token.into();
}
let event_req_with_token = if enable {
EventRequestWithToken {
request: EventRequest::Enable(event_u32),
token,
}
} else {
EventRequestWithToken {
request: EventRequest::Disable(event_u32),
token,
}
};
self.event_request_tx
.send(event_req_with_token)
.map_err(|_| {
PusPacketHandlingError::Other("Forwarding event request failed".into())
})?;
if let Some(partial_error) = partial_error {
return Ok(PusPacketHandlerResult::RequestHandledPartialSuccess(
partial_error,
));
}
Ok(PusPacketHandlerResult::RequestHandled)
};
let mut partial_error = None;
let time_stamp = PusServiceBase::get_current_timestamp(&mut partial_error);
match srv.unwrap() {
Subservice::TmInfoReport
| Subservice::TmLowSeverityReport
| Subservice::TmMediumSeverityReport
| Subservice::TmHighSeverityReport => {
return Err(PusPacketHandlingError::InvalidSubservice(tc.subservice()))
}
Subservice::TcEnableEventGeneration => {
handle_enable_disable_request(true, time_stamp)?;
}
Subservice::TcDisableEventGeneration => {
handle_enable_disable_request(false, time_stamp)?;
}
Subservice::TcReportDisabledList | Subservice::TmDisabledEventsReport => {
return Ok(PusPacketHandlerResult::SubserviceNotImplemented(
subservice,
ecss_tc_and_token.token,
));
}
}
Ok(PusPacketHandlerResult::RequestHandled)
}
}
#[cfg(test)]
mod tests {
use delegate::delegate;
use spacepackets::ecss::event::Subservice;
use spacepackets::util::UnsignedEnum;
use spacepackets::{
ecss::{
tc::{PusTcCreator, PusTcSecondaryHeader},
tm::PusTmReader,
},
SequenceFlags, SpHeader,
};
use std::sync::mpsc::{self, Sender};
use crate::pus::event_man::EventRequest;
use crate::pus::tests::SimplePusPacketHandler;
use crate::pus::verification::RequestId;
use crate::{
events::EventU32,
pus::{
event_man::EventRequestWithToken,
tests::{PusServiceHandlerWithSharedStoreCommon, PusTestHarness, TEST_APID},
verification::{TcStateAccepted, VerificationToken},
EcssTcInSharedStoreConverter, PusPacketHandlerResult, PusPacketHandlingError,
},
};
use super::PusService5EventHandler;
const TEST_EVENT_0: EventU32 = EventU32::const_new(crate::events::Severity::INFO, 5, 25);
struct Pus5HandlerWithStoreTester {
common: PusServiceHandlerWithSharedStoreCommon,
handler: PusService5EventHandler<EcssTcInSharedStoreConverter>,
}
impl Pus5HandlerWithStoreTester {
pub fn new(event_request_tx: Sender<EventRequestWithToken>) -> Self {
let (common, srv_handler) = PusServiceHandlerWithSharedStoreCommon::new();
Self {
common,
handler: PusService5EventHandler::new(srv_handler, event_request_tx),
}
}
}
impl PusTestHarness for Pus5HandlerWithStoreTester {
delegate! {
to self.common {
fn send_tc(&mut self, tc: &PusTcCreator) -> VerificationToken<TcStateAccepted>;
fn read_next_tm(&mut self) -> PusTmReader<'_>;
fn check_no_tm_available(&self) -> bool;
fn check_next_verification_tm(&self, subservice: u8, expected_request_id: RequestId);
}
}
}
impl SimplePusPacketHandler for Pus5HandlerWithStoreTester {
delegate! {
to self.handler {
fn handle_one_tc(&mut self) -> Result<PusPacketHandlerResult, PusPacketHandlingError>;
}
}
}
fn event_test(
test_harness: &mut (impl PusTestHarness + SimplePusPacketHandler),
subservice: Subservice,
expected_event_req: EventRequest,
event_req_receiver: mpsc::Receiver<EventRequestWithToken>,
) {
let mut sp_header = SpHeader::tc(TEST_APID, SequenceFlags::Unsegmented, 0, 0).unwrap();
let sec_header = PusTcSecondaryHeader::new_simple(5, subservice as u8);
let mut app_data = [0; 4];
TEST_EVENT_0
.write_to_be_bytes(&mut app_data)
.expect("writing test event failed");
let ping_tc = PusTcCreator::new(&mut sp_header, sec_header, &app_data, true);
let token = test_harness.send_tc(&ping_tc);
let request_id = token.req_id();
test_harness.handle_one_tc().unwrap();
test_harness.check_next_verification_tm(1, request_id);
test_harness.check_next_verification_tm(3, request_id);
// Completion TM is not generated for us.
assert!(test_harness.check_no_tm_available());
let event_request = event_req_receiver
.try_recv()
.expect("no event request received");
assert_eq!(expected_event_req, event_request.request);
}
#[test]
fn test_enabling_event_reporting() {
let (event_request_tx, event_request_rx) = mpsc::channel();
let mut test_harness = Pus5HandlerWithStoreTester::new(event_request_tx);
event_test(
&mut test_harness,
Subservice::TcEnableEventGeneration,
EventRequest::Enable(TEST_EVENT_0),
event_request_rx,
);
}
#[test]
fn test_disabling_event_reporting() {
let (event_request_tx, event_request_rx) = mpsc::channel();
let mut test_harness = Pus5HandlerWithStoreTester::new(event_request_tx);
event_test(
&mut test_harness,
Subservice::TcDisableEventGeneration,
EventRequest::Disable(TEST_EVENT_0),
event_request_rx,
);
}
#[test]
fn test_empty_tc_queue() {
let (event_request_tx, _) = mpsc::channel();
let mut test_harness = Pus5HandlerWithStoreTester::new(event_request_tx);
let result = test_harness.handle_one_tc();
assert!(result.is_ok());
let result = result.unwrap();
if let PusPacketHandlerResult::Empty = result {
} else {
panic!("unexpected result type {result:?}")
}
}
#[test]
fn test_sending_custom_subservice() {
let (event_request_tx, _) = mpsc::channel();
let mut test_harness = Pus5HandlerWithStoreTester::new(event_request_tx);
let mut sp_header = SpHeader::tc(TEST_APID, SequenceFlags::Unsegmented, 0, 0).unwrap();
let sec_header = PusTcSecondaryHeader::new_simple(5, 200);
let ping_tc = PusTcCreator::new_no_app_data(&mut sp_header, sec_header, true);
test_harness.send_tc(&ping_tc);
let result = test_harness.handle_one_tc();
assert!(result.is_ok());
let result = result.unwrap();
if let PusPacketHandlerResult::CustomSubservice(subservice, _) = result {
assert_eq!(subservice, 200);
} else {
panic!("unexpected result type {result:?}")
}
}
#[test]
fn test_sending_invalid_app_data() {
let (event_request_tx, _) = mpsc::channel();
let mut test_harness = Pus5HandlerWithStoreTester::new(event_request_tx);
let mut sp_header = SpHeader::tc(TEST_APID, SequenceFlags::Unsegmented, 0, 0).unwrap();
let sec_header =
PusTcSecondaryHeader::new_simple(5, Subservice::TcEnableEventGeneration as u8);
let ping_tc = PusTcCreator::new(&mut sp_header, sec_header, &[0, 1, 2], true);
test_harness.send_tc(&ping_tc);
let result = test_harness.handle_one_tc();
assert!(result.is_err());
let result = result.unwrap_err();
if let PusPacketHandlingError::NotEnoughAppData(string) = result {
assert_eq!(string, "at least 4 bytes event ID expected");
} else {
panic!("unexpected result type {result:?}")
}
}
}

View File

@ -1 +0,0 @@
pub use spacepackets::ecss::hk::*;

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More