diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..6e1ad4a
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,18 @@
+[run]
+omit =
+ agents/*
+ certs/*
+ client/*
+ dist/*
+ images/*
+ simulation_bridge/docs/*
+ simulation_bridge/resources/*
+ simulation_bridge/test/*
+ */venv/*
+ */.venv/*
+ */__init__.py
+
+[report]
+exclude_lines =
+ if __name__ == .__main__.:
+ pragma: no cover
diff --git a/.github/workflows/simulation-bridge-tests.yml b/.github/workflows/simulation-bridge-tests.yml
new file mode 100644
index 0000000..e327931
--- /dev/null
+++ b/.github/workflows/simulation-bridge-tests.yml
@@ -0,0 +1,45 @@
+name: Simulation Bridge Tests
+
+on:
+ pull_request:
+
+jobs:
+ test:
+ name: Test on ${{ matrix.os }}
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ os: [ubuntu-latest, windows-latest, macos-latest]
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install Poetry
+ run: |
+ python -m pip install --upgrade pip
+ curl -sSL https://install.python-poetry.org | python -
+ env:
+ POETRY_HOME: ${{ runner.temp }}/poetry
+
+ - name: Add Poetry to PATH
+ run: echo "${{ runner.temp }}/poetry/bin" >> $GITHUB_PATH
+ if: runner.os != 'Windows'
+
+ - name: Add Poetry to PATH on Windows
+ run: echo "${{ runner.temp }}\poetry\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8
+ if: runner.os == 'Windows'
+ shell: pwsh
+
+ - name: Install dependencies via Poetry
+ run: |
+ poetry install
+
+ - name: Run Pytest
+ run: |
+ poetry run pytest
diff --git a/.gitignore b/.gitignore
index 353f881..d1087db 100644
--- a/.gitignore
+++ b/.gitignore
@@ -198,8 +198,16 @@ agents/matlab/dist/
config*.yaml
simulation*.yaml
*.csv
+interactive.py
+*_interactive.py
+**/examples/interactive-simulation/*
#SIMULATION BRIDGE
/logs
-
+/certs
+*.pem
+*_use.yaml
+client/
+dist/
.venv*
certs
+
diff --git a/.pep8 b/.pep8
new file mode 100644
index 0000000..50f451a
--- /dev/null
+++ b/.pep8
@@ -0,0 +1,2 @@
+[pycodestyle]
+max_line_length = 80
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..92ebd16
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,587 @@
+[MASTER]
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-allow-list=
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
+# for backward compatibility.)
+extension-pkg-whitelist=
+
+# Return non-zero exit code if any of these messages/categories are detected,
+# even if score is above --fail-under value. Syntax same as enable. Messages
+# specified are enabled, while categories only check already-enabled messages.
+fail-on=
+
+# Specify a score threshold to be exceeded before program exits with error.
+fail-under=10.0
+
+# Files or directories to be skipped. They should be base names, not paths.
+ignore=CVS
+
+# Add files or directories matching the regex patterns to the ignore-list. The
+# regex matches against paths and can be in Posix or Windows format.
+ignore-paths=matlab_agent/.venv
+
+# Files or directories matching the regex patterns are skipped. The regex
+# matches against base names, not paths. The default value ignores emacs file
+# locks
+ignore-patterns=docs|venv|.venv|resources
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+init-hook='import sys; sys.path.append("src")'
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use.
+jobs=1
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Minimum Python version to use for version dependent checks. Will default to
+# the version used to run pylint.
+py-version=3.10
+
+# Discover python modules and packages in the file system subtree.
+recursive=no
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
+# UNDEFINED.
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once). You can also use "--disable=all" to
+# disable everything first and then re-enable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable=raw-checker-failed,
+ bad-inline-option,
+ locally-disabled,
+ file-ignored,
+ suppressed-message,
+ useless-suppression,
+ deprecated-pragma,
+ use-symbolic-message-instead,
+ missing-function-docstring,
+ missing-module-docstring,
+ missing-class-docstring
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=c-extension-no-member
+
+
+[REPORTS]
+
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'fatal', 'error', 'warning', 'refactor',
+# 'convention', and 'info' which contain the number of messages in each
+# category, as well as 'statement' which is the total number of statements
+# analyzed. This score is used by the global evaluation report (RP0004).
+evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+#msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages.
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit,argparse.parse_error
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# class is considered mixin if its name matches the mixin-class-rgx option.
+ignore-mixin-members=yes
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+# Regex pattern to define which classes are considered mixins ignore-mixin-
+# members is set to 'yes'
+mixin-class-rgx=.*[Mm]ixin
+
+# List of decorators that change the signature of a decorated function.
+signature-mutators=
+
+
+[LOGGING]
+
+# The type of string formatting that logging methods do. `old` means using %
+# formatting, `new` is for `{}` formatting.
+logging-format-style=old
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules=logging
+
+
+[BASIC]
+
+# Naming style matching correct argument names.
+#argument-naming-style=camelCase
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style. If left empty, argument names will be checked with the set
+# naming style.
+#argument-rgx=
+
+# Naming style matching correct attribute names.
+#attr-naming-style=camelCase
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style. If left empty, attribute names will be checked with the set naming
+# style.
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names=foo,
+ bar,
+ baz,
+ toto,
+ tutu,
+ tata
+
+# Bad variable names regexes, separated by a comma. If names match any regex,
+# they will always be refused
+bad-names-rgxs=
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style. If left empty, class attribute names will be checked
+# with the set naming style.
+#class-attribute-rgx=
+
+# Naming style matching correct class constant names.
+class-const-naming-style=UPPER_CASE
+
+# Regular expression matching correct class constant names. Overrides class-
+# const-naming-style. If left empty, class constant names will be checked with
+# the set naming style.
+#class-const-rgx=
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-
+# style. If left empty, class names will be checked with the set naming style.
+#class-rgx=
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style. If left empty, constant names will be checked with the set naming
+# style.
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names.
+#function-naming-style=camelCase
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style. If left empty, function names will be checked with the set
+# naming style.
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names=i,
+ j,
+ k,
+ ex,
+ Run,
+ _
+
+# Good variable names regexes, separated by a comma. If names match any regex,
+# they will always be accepted
+good-names-rgxs=
+
+# Include a hint for the correct naming format with invalid-name.
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style. If left empty, inline iteration names will be checked
+# with the set naming style.
+#inlinevar-rgx=
+
+# Naming style matching correct method names.
+#method-naming-style=camelCase
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style. If left empty, method names will be checked with the set naming style.
+#method-rgx=
+
+# Naming style matching correct module names.
+#module-naming-style=camelCase
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style. If left empty, module names will be checked with the set naming style.
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+# These decorators are taken in consideration only for invalid-name.
+property-classes=abc.abstractproperty
+
+# Regular expression matching correct type variable names. If left empty, type
+# variable names will be checked with the set naming style.
+#typevar-rgx=
+
+# Naming style matching correct variable names.
+#variable-naming-style=camelCase
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style. If left empty, variable names will be checked with the set
+# naming style.
+#variable-rgx=
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it work,
+# install the 'python-enchant' package.
+spelling-dict=
+
+# List of comma separated words that should be considered directives if they
+# appear and the beginning of a comment and should not be checked.
+spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains the private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+ XXX,
+ TODO
+
+# Regular expression of note tags to take in consideration.
+#notes-rgx=
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of names allowed to shadow builtins
+allowed-redefined-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+ _cb
+
+# A regular expression matching the name of dummy variables (i.e. expected to
+# not be used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore.
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
+
+
+[SIMILARITIES]
+
+# Comments are removed from the similarity computation
+ignore-comments=yes
+
+# Docstrings are removed from the similarity computation
+ignore-docstrings=yes
+
+# Imports are removed from the similarity computation
+ignore-imports=no
+
+# Signatures are removed from the similarity computation
+ignore-signatures=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )??$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module.
+max-module-lines=1000
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[STRING]
+
+# This flag controls whether inconsistent-quotes generates a warning when the
+# character used as a quote delimiter is used inconsistently within a module.
+check-quote-consistency=no
+
+# This flag controls whether the implicit-str-concat should generate a warning
+# on implicit string concatenation in sequences defined over several lines.
+check-str-concat-over-line-jumps=no
+
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma.
+deprecated-modules=
+
+# Output a graph (.gv or any supported image format) of external dependencies
+# to the given file (report RP0402 must not be disabled).
+ext-import-graph=
+
+# Output a graph (.gv or any supported image format) of all (i.e. internal and
+# external) dependencies to the given file (report RP0402 must not be
+# disabled).
+import-graph=
+
+# Output a graph (.gv or any supported image format) of internal dependencies
+# to the given file (report RP0402 must not be disabled).
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[CLASSES]
+
+# Warn about protected attribute access inside special methods
+check-protected-access-in-special-methods=no
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+ __new__,
+ setUp,
+ __post_init__
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+ _fields,
+ _replace,
+ _source,
+ _make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=cls
+
+
+[DESIGN]
+
+# List of regular expressions of class ancestor names to ignore when counting
+# public methods (see R0903)
+exclude-too-few-public-methods=
+
+# List of qualified class names to ignore when counting class parents (see
+# R0901)
+ignored-parents=
+
+# Maximum number of arguments for function / method.
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# Maximum number of branch for function / method body.
+max-branches=12
+
+# Maximum number of locals for function / method body.
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body.
+max-returns=6
+
+# Maximum number of statements in function / method body.
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=1
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "BaseException, Exception".
+overgeneral-exceptions=builtins.BaseException,builtins.Exception
diff --git a/.vscode/settings.json b/.vscode/settings.json
index c6aa1d5..61b4b09 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,7 +1,6 @@
{
- "python.testing.pytestArgs": ["."],
"python.testing.pytestEnabled": true,
- "python.testing.unittestEnabled": false,
- "python.testing.cwd": "${workspaceFolder}/agents/matlab",
- "python.testing.autoTestDiscoverOnSaveEnabled": true
+ "python.testing.pytestArgs": ["simulation_bridge/test"],
+ "python.testing.autoTestDiscoverOnSaveEnabled": true,
+ "python.testing.promptToConfigure": false
}
diff --git a/INSTRUCTION.md b/INSTRUCTION.md
deleted file mode 100644
index 21639b5..0000000
--- a/INSTRUCTION.md
+++ /dev/null
@@ -1,85 +0,0 @@
-# Setup Instructions
-
-Adhere to the following steps to configure and execute the **Simulation Bridge** effectively.
-
-## Install Requirements
-
-### 1. Install Poetry
-
-Ensure that Poetry is installed on your system. If it is not already installed, execute the following command:
-
-```bash
-python3 -m pip install --user pipx
-python3 -m pipx ensurepath
-pipx install poetry
-```
-
-Verify the installation by checking the Poetry version:
-
-```bash
-poetry --version
-```
-
-### 2. Clone the Repository
-
-Clone the Simulation Bridge repository to your local environment:
-
-```bash
-git clone https://github.com/INTO-CPS-Association/simulation-bridge
-cd simulation-bridge
-```
-
-### 3. Install Dependencies
-
-Use Poetry to install the project dependencies specified in the `pyproject.toml` file:
-
-```bash
-poetry install
-```
-
-This will install all required libraries for the project.
-
----
-
-## Install RabbitMQ
-
-The Simulation Bridge requires an active RabbitMQ server. You can choose one of the following options:
-
-### Option 1: Install RabbitMQ Locally
-
-If you do not have access to an external RabbitMQ server, you can install one locally. On macOS, use Homebrew:
-
-```bash
-brew update
-brew install rabbitmq
-brew services start rabbitmq
-```
-
-Verify that RabbitMQ is running:
-
-```bash
-brew services list
-rabbitmqctl status
-lsof -i :5672
-```
-
-### Option 2: Use a Remote RabbitMQ Server
-
-Alternatively, connect to an existing RabbitMQ instance hosted on a remote server (on-premise or cloud).
-
-## Author
-
-
-
-
-
diff --git a/README.md b/README.md
index d5c9b7f..baef49d 100644
--- a/README.md
+++ b/README.md
@@ -1,69 +1,95 @@
# Simulation Bridge
-The **Simulation Bridge** is an open-source middleware solution designed to enable seamless and dynamic communication between Digital Twins (DT), Mock Physical Twins (MockPT), and their dedicated Simulator counterparts.
-It serves as a **modular**, **reusable**, and **bidirectional** bridge, supporting multiple protocols and interaction modes to ensure interoperability across diverse simulation environments.
+Simulation Bridge (_sim-bridge_) is a modular, reusable, and bidirectional middleware designed to orchestrate distributed simulation systems involving Digital Twins (DTs), Mock Physical Twins (MockPTs), and Physical Twins (PTs).
-Built around the concept of simulation, the bridge facilitates control, monitoring, and data exchange among the involved entities, providing a universal middleware solution that enhances flexibility and integration within simulation-based systems.
+It acts as an intelligent communication layer between DTs, PTs, and heterogeneous simulation environments, integrating and coordinating simulations developed with diverse models, technologies, and protocols.
-
+_sim-bridge_ exposes a unified interface that enables seamless data exchange and interaction. It receives inputs from DTs or PTs, routes them to the appropriate simulators through dedicated components called _Simulator Agents_, collects the simulation results, and delivers them back to the clients.
-## Overview
+
+
+
+ Figure 1: Simulation Bridge Architecture
+
-
+> Please refer to the [**User Guide**](USERGUIDE.md) for detailed requirements, configuration instructions, and usage guidelines for the Simulation Bridge.
----
+## Table of Contents
+
+- [Simulation Bridge](#simulation-bridge)
+ - [Table of Contents](#table-of-contents)
+ - [Key Features](#key-features)
+ - [Agents](#agents)
+ - [Modes of Simulation](#modes-of-simulation)
+ - [Plug-in Protocol Adapters](#plug-in-protocol-adapters)
+ - [Documentation](#documentation)
+ - [Simulation Bridge](#simulation-bridge-1)
+ - [Matlab Agent](#matlab-agent)
+ - [Package Development](#package-development)
+ - [License](#license)
+ - [Author](#author)
## Key Features
-### 🌐 Multi-Protocol Support
+#### Agents
+
+Agent is a software connector that acts as an interpreter between _sim-bridge_ and the specific simulator. Each simulator requires its own Agent, which must be designed to be reusable across diverse simulation scenarios for that simulator.
+
+> Refer to the [Matlab Agent](agents/matlab/README.md) for an implementation example.
-- **RabbitMQ** (default)
-- **MQTT**
-- **REST API**
-- Custom protocol plugins for tailored integrations
+#### Modes of Simulation
-### ⚙️ Flexible Interaction Modes
+_sim-bridge_ supports Batch and Streaming simulation modes, allowing for both discrete and real-time streaming simulation workflows:
-| **Mode** | **Description** |
-| ------------- | ------------------------------------------------- |
-| **Batch** | Execute simulations without real-time monitoring. |
-| **Streaming** | Enable real-time monitoring and control. |
+| Mode | Description |
+| --------- | ----------------------------------------------------------------------------------------------------------------------------------------------- |
+| Batch | Inputs are provided at the start, the simulation runs to completion without intermediate monitoring, and results are delivered only at the end. |
+| Streaming | Enables real-time, step-by-step updates from the simulation during execution |
-### 🔍 Intelligent Discoverability
+#### Plug-in Protocol Adapters
-- Dynamic capability detection through an advanced agent system.
-- Automatic registration of simulator features for seamless integration.
+The system follows a _plug-in-based protocol adapter architecture_, enabling seamless future integration of additional protocols.
+It currently supports MQTT, RabbitMQ, and RESTful interfaces, allowing external clients to communicate with the _sim-bridge_ through these channels.
-### 🔄 Advanced Data Transformation
+Supports secure (TLS) and insecure connections for all protocols: MQTT/mqtts, AMQP/amqps, HTTP/2.0, and HTTPS.
-- Effortless conversion between **JSON**, **XML**, and **CSV** formats.
-- Protocol-agnostic data formatting to ensure compatibility across systems.
+> TLS certificates (cert.pem, key.pem) are generated automatically when needed.
----
+
+
+
+ Figure 2: Plug-in Protocol Adapter Architecture
+
## Documentation
### Simulation Bridge
-- [📘 **Instruction Guide** ↗](INSTRUCTION.md): A comprehensive guide to set up and configure the Simulation Bridge.
-- [🚀 **Usage Guide** ↗](USAGE.md): Detailed instructions on how to run the Simulation Bridge and its components.
+- [**User Guide** ↗](/USERGUIDE.md): Comprehensive guide covering system requirements, configuration steps, and detailed usage instructions for the Simulation Bridge.
+- [**Internal Architecture** ↗](simulation_bridge/docs/internal_architecture.md): Overview of the system's architecture, key modules, and their interactions.
+- [**Class Diagram** ↗](simulation_bridge/docs/class_diagram.md): UML Class Diagram of the Simulation Bridge Architecture
-### Simulators
+### Matlab Agent
-#### Matlab
+- [**Matlab Agent** ↗](agents/matlab/README.md): Explanation of the MATLAB agent functionality and configuration.
+- [**Matlab Simulation Constraints** ↗](agents/matlab/matlab_agent/docs/README.md): A breakdown of the constraints and requirements for MATLAB-driven simulations.
-- [🔗 **Matlab Agent** ↗](agents/matlab/README.md): Explanation of the MATLAB agent functionality and configuration.
-- [⚙️ **Matlab Simulation Constraints** ↗](agents/matlab/matlab_agent/docs/README.md): A breakdown of the constraints and requirements for MATLAB-driven simulations.
+## Package Development
----
+The developer-specific commands are
+
+```bash
+pylint simulation_bridge
+autopep8 --in-place --aggressive --recursive 'simulation_bridge'
+pytest --cov=simulation_bridge --cov-report=term --cov-report=html simulation_bridge/test/
+open htmlcov/index.html
+```
## License
This project is licensed under the **INTO-CPS Association Public License v1.0**.
See the [LICENSE](./LICENSE) file for full license text.
----
-
## Author
Marco Melloni Digital Automation Engineering Student University of Modena and Reggio Emilia, Department of Sciences and Methods for Engineering (DISMI)
Prof. Marco Picone Associate Professor University of Modena and Reggio Emilia, Department of Sciences and Methods for Engineering (DISMI)
Dr. Prasad Talasila Postdoctoral Researcher Aarhus University
diff --git a/USAGE.md b/USAGE.md
deleted file mode 100644
index 75400bd..0000000
--- a/USAGE.md
+++ /dev/null
@@ -1,32 +0,0 @@
-# Usage Guide
-
-## Installation Instructions
-
-For comprehensive installation steps, please refer to the [INSTRUCTION.md](INSTRUCTION.md) file located in the project directory. Ensure that all prerequisites are met before proceeding with the installation.
-
-## Simulation Bridge
-
-To execute the Simulation Bridge, run the following command:
-
-```bash
-poetry run simulation-bridge
-```
-
-Verify that all required dependencies are installed and properly configured prior to execution.
-
-## Author
-
-
-
-
-
diff --git a/USERGUIDE.md b/USERGUIDE.md
new file mode 100644
index 0000000..d2aa481
--- /dev/null
+++ b/USERGUIDE.md
@@ -0,0 +1,321 @@
+# User Guide
+
+This guide outlines how to configure and execute the _sim-bridge_ application. It provides detailed information on the configuration parameters, command-line options, and execution procedures.
+
+## Table of Contents
+
+- [User Guide](#user-guide)
+ - [Table of Contents](#table-of-contents)
+ - [Requirements](#requirements)
+ - [Clone the Repository](#clone-the-repository)
+ - [Install Poetry and Create Virtual Environment](#install-poetry-and-create-virtual-environment)
+ - [Install Project Dependencies](#install-project-dependencies)
+ - [Install RabbitMQ](#install-rabbitmq)
+ - [Option 1: Install RabbitMQ Locally](#option-1-install-rabbitmq-locally)
+ - [Option 2: Use a Remote RabbitMQ Server](#option-2-use-a-remote-rabbitmq-server)
+ - [Configuration](#configuration)
+ - [Usage](#usage)
+ - [Generating a Template Configuration](#generating-a-template-configuration)
+ - [Generating a Complete Project Structure](#generating-a-complete-project-structure)
+ - [Running with Default Configuration](#running-with-default-configuration)
+ - [Running with Custom Configuration](#running-with-custom-configuration)
+ - [Use _sim-bridge_ as a Pip-Installable Package](#use-sim-bridge-as-a-pip-installable-package)
+ - [Build the Package](#build-the-package)
+ - [Install the Package](#install-the-package)
+ - [Use the Package](#use-the-package)
+ - [Command-Line Options Overview](#command-line-options-overview)
+ - [Author](#author)
+
+## Requirements
+
+### Clone the Repository
+
+```bash
+git clone https://github.com/INTO-CPS-Association/simulation-bridge.git
+cd simulation-bridge
+```
+
+### Install Poetry and Create Virtual Environment
+
+Ensure Poetry is installed on your system:
+
+```bash
+python3 -m pip install --user pipx
+python3 -m pipx ensurepath
+pipx install poetry
+```
+
+Verify the installation:
+
+```bash
+poetry --version
+```
+
+Activate the virtual environment:
+
+```bash
+poetry env activate
+```
+
+> **Important:** The `poetry env activate` command prints the activation command. Copy and run the displayed command:
+>
+> ```bash
+> source /path/to/virtualenv/bin/activate
+> ```
+
+Verify the environment is active:
+
+```bash
+which python
+```
+
+### Install Project Dependencies
+
+Install all dependencies defined in `pyproject.toml`:
+
+```bash
+poetry install
+```
+
+### Install RabbitMQ
+
+The _sim-bridge_ requires an active RabbitMQ server. Choose one of the following options:
+
+#### Option 1: Install RabbitMQ Locally
+
+On macOS using Homebrew:
+
+```bash
+brew update
+brew install rabbitmq
+brew services start rabbitmq
+```
+
+Verify RabbitMQ is running:
+
+```bash
+brew services list
+rabbitmqctl status
+lsof -i :5672
+```
+
+#### Option 2: Use a Remote RabbitMQ Server
+
+Connect to an existing RabbitMQ instance hosted on a remote server.
+
+## Configuration
+
+The _sim-bridge_ uses a YAML-based configuration file. Below is a comprehensive example:
+
+```yaml
+# Unique identifier for this simulation bridge instance
+simulation_bridge:
+ bridge_id: simulation_bridge # ID used to identify this instance of the sim-bridge
+
+# RabbitMQ protocol adapter configuration
+rabbitmq:
+ host: localhost # RabbitMQ broker hostname or IP address
+ port: 5672 # Port for non-TLS AMQP connections (default: 5672)
+ vhost: / # Virtual host used in RabbitMQ
+ username: guest # Username for RabbitMQ authentication
+ password: guest # Password for RabbitMQ authentication
+ tls: false # Whether to use TLS (amqps) or not
+
+ infrastructure:
+ exchanges:
+ - name: ex.input.bridge # Exchange for receiving input messages from external systems
+ type: topic # Exchange type (topic allows pattern-based routing)
+ durable: true # Exchange survives broker restarts
+ auto_delete: false # Exchange won't be deleted when no longer used
+ internal: false # Exchange is available to external producers
+
+ - name: ex.bridge.output # Exchange for sending output messages to external systems
+ type: topic
+ durable: true
+ auto_delete: false
+ internal: false
+
+ - name: ex.sim.result # Exchange for simulation result messages
+ type: topic
+ durable: true
+ auto_delete: false
+ internal: false
+
+ - name: ex.bridge.result # Exchange for bridge-processed results
+ type: topic
+ durable: true
+ auto_delete: false
+ internal: false
+
+ queues:
+ - name: Q.bridge.input # Queue for receiving messages intended for the bridge
+ durable: true # Queue survives broker restarts
+ exclusive: false # Queue is not exclusive to one connection
+ auto_delete: false # Queue will not be deleted automatically
+
+ - name: Q.bridge.result # Queue for receiving simulation results
+ durable: true
+ exclusive: false
+ auto_delete: false
+
+ bindings:
+ - queue: Q.bridge.input # Bind the input queue...
+ exchange: ex.input.bridge # ...to this exchange...
+ routing_key: "#" # ...with wildcard routing (all messages)
+
+ - queue: Q.bridge.result # Bind the result queue...
+ exchange: ex.sim.result # ...to receive all simulation result messages
+ routing_key: "#" # ...with wildcard routing
+
+# MQTT protocol adapter configuration
+mqtt:
+ host: localhost # MQTT broker hostname or IP
+ port: 1883 # Port for MQTT (1883 for non-TLS, 8883 for TLS)
+ keepalive: 60 # Keep-alive interval in seconds for MQTT client
+ input_topic: bridge/input # Topic to subscribe to for receiving messages
+ output_topic: bridge/output # Topic to publish processed messages to
+ qos: 0 # Quality of Service level (0 = at most once)
+ username: guest # Username for MQTT authentication
+ password: guest # Password for MQTT authentication
+ tls: false # Whether to use secure MQTT (mqtts) or not
+
+# REST protocol adapter configuration
+rest:
+ host: 0.0.0.0 # REST API binds to all network interfaces
+ port: 5000 # Port for RESTful HTTP server
+ endpoint: /message # Endpoint path for sending messages to the bridge
+ debug: false # Disable Flask debug mode (set to true for development)
+ certfile: certs/cert.pem # Path to the TLS certificate file for HTTPS
+ keyfile: certs/key.pem # Path to the private key file for HTTPS
+
+# Logging configuration
+logging:
+ level: INFO # Logging level (e.g., DEBUG, INFO, WARNING, ERROR)
+ format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s" # Format of log messages
+ file: logs/sim_bridge.log # Path to the log output file
+```
+
+> **Note:** Certificate files (`certfile.pem` and `keyfile.pem`) will be automatically created by the _sim-bridge_ if missing.
+
+## Usage
+
+### Generating a Template Configuration
+
+Create a default configuration file:
+
+```bash
+poetry run simulation-bridge --generate-config
+```
+
+This generates a `config.yaml` file in the current directory. Existing files will not be overwritten.
+
+### Generating a Complete Project Structure
+
+Generate a complete example project with clients and configurations:
+
+```bash
+poetry run simulation-bridge --generate-project
+```
+
+This creates the following structure:
+
+```
+.
+├── config.yaml # Main configuration file
+├── client/
+│ ├── README.md # Client documentation
+│ ├── simulation.yaml # Example simulation payload
+│ ├── mqtt/
+│ │ ├── mqtt_client.py # MQTT client implementation
+│ │ ├── mqtt_use.yaml # MQTT usage configuration
+│ │ └── requirements.txt # MQTT client requirements
+│ ├── rabbitmq/
+│ │ ├── rabbitmq_client.py # RabbitMQ client implementation
+│ │ ├── rabbitmq_use.yaml # RabbitMQ usage configuration
+│ │ └── requirements.txt # RabbitMQ client requirements
+│ └── rest/
+│ ├── rest_client.py # REST client implementation
+│ ├── rest_use.yaml # REST usage configuration
+│ └── requirements.txt # REST client requirements
+```
+
+### Running with Default Configuration
+
+Launch the bridge with the default configuration:
+
+```bash
+poetry run simulation-bridge
+```
+
+The application loads configuration from `simulation-bridge/config.yaml` by default.
+
+### Running with Custom Configuration
+
+Specify a custom configuration file:
+
+```bash
+poetry run simulation-bridge --config-file /path/to/config.yaml
+```
+
+Or use the shorthand syntax:
+
+```bash
+poetry run simulation-bridge -c /path/to/config.yaml
+```
+
+## Use _sim-bridge_ as a Pip-Installable Package
+
+If you prefer to use `simulation-bridge` as a standalone Python package, you can build and install it using the following steps:
+
+### Build the Package
+
+In the root of the project (where `pyproject.toml` is located), run:
+
+```bash
+poetry build
+```
+
+This will generate the distribution files in the `dist/` directory:
+
+- `simulation_bridge-.tar.gz`
+- `simulation_bridge--py3-none-any.whl`
+
+### Install the Package
+
+You can install the built package using pip:
+
+```bash
+pip install dist/simulation_bridge--py3-none-any.whl
+```
+
+Replace `` with the actual version number (e.g., `0.1.0`).
+
+### Use the Package
+
+After installation, the `simulation-bridge` command will be available globally in your environment:
+
+```bash
+simulation-bridge --help
+```
+
+You can use it exactly as described in the previous sections:
+
+```bash
+simulation-bridge --generate-config
+simulation-bridge --config-file config.yaml
+```
+
+> **Note:** When using the installed package, you no longer need to prefix commands with `poetry run`.
+
+## Command-Line Options Overview
+
+| Option | Description |
+| --------------------- | -------------------------------------------------------------------- |
+| `--generate-config` | Generates a default configuration file in the current directory |
+| `--generate-project` | Generates a sample project with clients, configs, and usage examples |
+| `--config-file`, `-c` | Path to a custom configuration file |
+| `--help`, `-h` | Displays help information for available options |
+
+## Author
+
+ Marco Melloni Digital Automation Engineering Student University of Modena and Reggio Emilia, Department of Sciences and Methods for Engineering (DISMI)
Prof. Marco Picone Associate Professor University of Modena and Reggio Emilia, Department of Sciences and Methods for Engineering (DISMI)
Dr. Prasad Talasila Postdoctoral Researcher Aarhus University
diff --git a/agents/matlab/README.md b/agents/matlab/README.md
index ff381e3..ee4c13d 100644
--- a/agents/matlab/README.md
+++ b/agents/matlab/README.md
@@ -165,6 +165,7 @@ rabbitmq:
password: guest # The password for authenticating with RabbitMQ.
heartbeat: 600 # The heartbeat interval (in seconds) to keep the connection alive.
vhost: / # The virtual host to use for RabbitMQ connections.
+ tls: false # Enable TLS/SSL encryption (true = enabled, false = disabled)
simulation:
path: /Users/foo/simulation-bridge/agents/matlab/matlab_agent/docs/examples # The file path to the folder containing MATLAB simulation files.
diff --git a/agents/matlab/matlab_agent/config/config.yaml.template b/agents/matlab/matlab_agent/config/config.yaml.template
index 648bcef..c1fc04a 100644
--- a/agents/matlab/matlab_agent/config/config.yaml.template
+++ b/agents/matlab/matlab_agent/config/config.yaml.template
@@ -9,6 +9,7 @@ rabbitmq:
password: guest
heartbeat: 600
vhost: /
+ tls: false
simulation:
path: /Users/foo/simulation-bridge/agents/matlab/matlab_agent/docs/examples
diff --git a/agents/matlab/matlab_agent/src/comm/rabbitmq/rabbitmq_manager.py b/agents/matlab/matlab_agent/src/comm/rabbitmq/rabbitmq_manager.py
index 2ab5c77..f5b84be 100644
--- a/agents/matlab/matlab_agent/src/comm/rabbitmq/rabbitmq_manager.py
+++ b/agents/matlab/matlab_agent/src/comm/rabbitmq/rabbitmq_manager.py
@@ -4,6 +4,7 @@
set up exchanges and queues, and send/receive messages within a simulation agent framework.
"""
import sys
+import ssl
import uuid
from typing import Dict, Any, Callable, Optional
@@ -61,13 +62,27 @@ def connect(self) -> bool:
)
vhost = rabbitmq_config.get('vhost', '/')
logger.debug(f"Using vhost: {vhost}")
- parameters = pika.ConnectionParameters(
- host=rabbitmq_config.get('host', 'localhost'),
- port=rabbitmq_config.get('port', 5672),
- virtual_host=vhost,
- credentials=credentials,
- heartbeat=rabbitmq_config.get('heartbeat', 600)
- )
+ use_tls = rabbitmq_config.get('tls', False)
+
+ if use_tls:
+ context = ssl.create_default_context()
+ ssl_options = pika.SSLOptions(context, rabbitmq_config.get('host', 'localhost'))
+ parameters = pika.ConnectionParameters(
+ host=rabbitmq_config.get('host', 'localhost'),
+ port=rabbitmq_config.get('port', 5671),
+ virtual_host=vhost,
+ credentials=credentials,
+ ssl_options=ssl_options,
+ heartbeat=rabbitmq_config.get('heartbeat', 600)
+ )
+ else:
+ parameters = pika.ConnectionParameters(
+ host=rabbitmq_config.get('host', 'localhost'),
+ port=rabbitmq_config.get('port', 5672),
+ virtual_host=vhost,
+ credentials=credentials,
+ heartbeat=rabbitmq_config.get('heartbeat', 600)
+ )
self.connection = pika.BlockingConnection(parameters)
if self.connection.is_open:
diff --git a/agents/matlab/matlab_agent/src/core/streaming.py b/agents/matlab/matlab_agent/src/core/streaming.py
index 7c4b05c..f59471b 100644
--- a/agents/matlab/matlab_agent/src/core/streaming.py
+++ b/agents/matlab/matlab_agent/src/core/streaming.py
@@ -257,10 +257,11 @@ def start(self, performance_monitor: PerformanceMonitor) -> None:
self.sim_file,
'streaming',
self.response_templates,
- outputs={'status': 'completed'},
+ outputs={},
metadata=self.get_metadata(),
bridge_meta=self.bridge_meta,
- request_id=self.request_id
+ request_id=self.request_id,
+ status='Simulation Started',
)
)
diff --git a/agents/matlab/matlab_agent/src/utils/config_manager.py b/agents/matlab/matlab_agent/src/utils/config_manager.py
index b89565d..7a136b3 100644
--- a/agents/matlab/matlab_agent/src/utils/config_manager.py
+++ b/agents/matlab/matlab_agent/src/utils/config_manager.py
@@ -36,6 +36,7 @@ class Config(BaseModel):
rabbitmq_password: str = Field(default="guest")
rabbitmq_heartbeat: int = Field(default=600)
rabbitmq_virtual_host: str = Field(default="/")
+ rabbitmq_tls: bool = Field(default=False)
# Simulation folder path
simulation_path: str = Field(default=".")
@@ -103,7 +104,8 @@ def to_dict(self) -> Dict[str, Any]:
"username": self.rabbitmq_username,
"password": self.rabbitmq_password,
"heartbeat": self.rabbitmq_heartbeat,
- "vhost": self.rabbitmq_virtual_host
+ "vhost": self.rabbitmq_virtual_host,
+ "tls": self.rabbitmq_tls
},
"simulation": {
"path": self.simulation_path
@@ -175,6 +177,7 @@ def from_dict(cls, config_dict: Dict[str, Any]) -> 'Config':
flat_config["rabbitmq_heartbeat"] = rabbitmq.get("heartbeat", 600)
flat_config["rabbitmq_virtual_host"] = rabbitmq.get(
"vhost", "/")
+ flat_config["rabbitmq_tls"] = rabbitmq.get("tls", False)
if simulation := config_dict.get("simulation", {}):
flat_config["simulation_path"] = simulation.get(
diff --git a/images/event_based_sm.png b/images/event_based_sm.png
new file mode 100644
index 0000000..1ad2977
Binary files /dev/null and b/images/event_based_sm.png differ
diff --git a/images/plug-in-protocol-adapter.png b/images/plug-in-protocol-adapter.png
new file mode 100644
index 0000000..58e6dee
Binary files /dev/null and b/images/plug-in-protocol-adapter.png differ
diff --git a/images/software_architecture.png b/images/software_architecture.png
index 60866da..e5d3774 100644
Binary files a/images/software_architecture.png and b/images/software_architecture.png differ
diff --git a/images/software_architecture_scheme.png b/images/software_architecture_scheme.png
new file mode 100644
index 0000000..60866da
Binary files /dev/null and b/images/software_architecture_scheme.png differ
diff --git a/poetry.lock b/poetry.lock
index 332e810..8091b93 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,5 +1,17 @@
# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
+[[package]]
+name = "aiofiles"
+version = "24.1.0"
+description = "File support for asyncio."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"},
+ {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"},
+]
+
[[package]]
name = "annotated-types"
version = "0.7.0"
@@ -12,6 +24,55 @@ files = [
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
+[[package]]
+name = "anyio"
+version = "4.9.0"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"},
+ {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"},
+]
+
+[package.dependencies]
+idna = ">=2.8"
+sniffio = ">=1.1"
+typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
+
+[package.extras]
+doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
+test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""]
+trio = ["trio (>=0.26.1)"]
+
+[[package]]
+name = "astroid"
+version = "3.3.10"
+description = "An abstract syntax tree for Python with inference support."
+optional = false
+python-versions = ">=3.9.0"
+groups = ["main"]
+files = [
+ {file = "astroid-3.3.10-py3-none-any.whl", hash = "sha256:104fb9cb9b27ea95e847a94c003be03a9e039334a8ebca5ee27dafaf5c5711eb"},
+ {file = "astroid-3.3.10.tar.gz", hash = "sha256:c332157953060c6deb9caa57303ae0d20b0fbdb2e59b4a4f2a6ba49d0a7961ce"},
+]
+
+[[package]]
+name = "autopep8"
+version = "2.3.2"
+description = "A tool that automatically formats Python code to conform to the PEP 8 style guide"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128"},
+ {file = "autopep8-2.3.2.tar.gz", hash = "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758"},
+]
+
+[package.dependencies]
+pycodestyle = ">=2.12.0"
+
[[package]]
name = "black"
version = "25.1.0"
@@ -57,16 +118,223 @@ d = ["aiohttp (>=3.10)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "blinker"
+version = "1.9.0"
+description = "Fast, simple object-to-object and broadcast signaling"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"},
+ {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"},
+]
+
+[[package]]
+name = "certifi"
+version = "2025.6.15"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057"},
+ {file = "certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.17.1"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "platform_python_implementation != \"PyPy\""
+files = [
+ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
+ {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
+ {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
+ {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
+ {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
+ {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
+ {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
+ {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
+ {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
+ {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
+ {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
+ {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
+ {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
+ {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
+ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
+ {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.2"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"},
+ {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"},
+ {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"},
+ {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"},
+ {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"},
+ {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"},
+ {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"},
+ {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"},
+ {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"},
+ {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"},
+]
+
[[package]]
name = "click"
-version = "8.1.8"
+version = "8.2.1"
description = "Composable command line interface toolkit"
optional = false
-python-versions = ">=3.7"
-groups = ["dev"]
+python-versions = ">=3.10"
+groups = ["main", "dev"]
files = [
- {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
- {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
+ {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"},
+ {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"},
]
[package.dependencies]
@@ -78,105 +346,207 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
-groups = ["dev"]
-markers = "sys_platform == \"win32\" or platform_system == \"Windows\""
+groups = ["main", "dev"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
+markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""}
[[package]]
-name = "contourpy"
-version = "1.3.2"
-description = "Python library for calculating contours of 2D quadrilateral grids"
+name = "colorlog"
+version = "6.9.0"
+description = "Add colours to the output of Python's logging module."
optional = false
-python-versions = ">=3.10"
+python-versions = ">=3.6"
groups = ["main"]
files = [
- {file = "contourpy-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba38e3f9f330af820c4b27ceb4b9c7feee5fe0493ea53a8720f4792667465934"},
- {file = "contourpy-1.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc41ba0714aa2968d1f8674ec97504a8f7e334f48eeacebcaa6256213acb0989"},
- {file = "contourpy-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9be002b31c558d1ddf1b9b415b162c603405414bacd6932d031c5b5a8b757f0d"},
- {file = "contourpy-1.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8d2e74acbcba3bfdb6d9d8384cdc4f9260cae86ed9beee8bd5f54fee49a430b9"},
- {file = "contourpy-1.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e259bced5549ac64410162adc973c5e2fb77f04df4a439d00b478e57a0e65512"},
- {file = "contourpy-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad687a04bc802cbe8b9c399c07162a3c35e227e2daccf1668eb1f278cb698631"},
- {file = "contourpy-1.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cdd22595308f53ef2f891040ab2b93d79192513ffccbd7fe19be7aa773a5e09f"},
- {file = "contourpy-1.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b4f54d6a2defe9f257327b0f243612dd051cc43825587520b1bf74a31e2f6ef2"},
- {file = "contourpy-1.3.2-cp310-cp310-win32.whl", hash = "sha256:f939a054192ddc596e031e50bb13b657ce318cf13d264f095ce9db7dc6ae81c0"},
- {file = "contourpy-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c440093bbc8fc21c637c03bafcbef95ccd963bc6e0514ad887932c18ca2a759a"},
- {file = "contourpy-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a37a2fb93d4df3fc4c0e363ea4d16f83195fc09c891bc8ce072b9d084853445"},
- {file = "contourpy-1.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7cd50c38f500bbcc9b6a46643a40e0913673f869315d8e70de0438817cb7773"},
- {file = "contourpy-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6658ccc7251a4433eebd89ed2672c2ed96fba367fd25ca9512aa92a4b46c4f1"},
- {file = "contourpy-1.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:70771a461aaeb335df14deb6c97439973d253ae70660ca085eec25241137ef43"},
- {file = "contourpy-1.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a887a6e8c4cd0897507d814b14c54a8c2e2aa4ac9f7686292f9769fcf9a6ab"},
- {file = "contourpy-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3859783aefa2b8355697f16642695a5b9792e7a46ab86da1118a4a23a51a33d7"},
- {file = "contourpy-1.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eab0f6db315fa4d70f1d8ab514e527f0366ec021ff853d7ed6a2d33605cf4b83"},
- {file = "contourpy-1.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d91a3ccc7fea94ca0acab82ceb77f396d50a1f67412efe4c526f5d20264e6ecd"},
- {file = "contourpy-1.3.2-cp311-cp311-win32.whl", hash = "sha256:1c48188778d4d2f3d48e4643fb15d8608b1d01e4b4d6b0548d9b336c28fc9b6f"},
- {file = "contourpy-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:5ebac872ba09cb8f2131c46b8739a7ff71de28a24c869bcad554477eb089a878"},
- {file = "contourpy-1.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4caf2bcd2969402bf77edc4cb6034c7dd7c0803213b3523f111eb7460a51b8d2"},
- {file = "contourpy-1.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82199cb78276249796419fe36b7386bd8d2cc3f28b3bc19fe2454fe2e26c4c15"},
- {file = "contourpy-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106fab697af11456fcba3e352ad50effe493a90f893fca6c2ca5c033820cea92"},
- {file = "contourpy-1.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d14f12932a8d620e307f715857107b1d1845cc44fdb5da2bc8e850f5ceba9f87"},
- {file = "contourpy-1.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:532fd26e715560721bb0d5fc7610fce279b3699b018600ab999d1be895b09415"},
- {file = "contourpy-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b383144cf2d2c29f01a1e8170f50dacf0eac02d64139dcd709a8ac4eb3cfe"},
- {file = "contourpy-1.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c49f73e61f1f774650a55d221803b101d966ca0c5a2d6d5e4320ec3997489441"},
- {file = "contourpy-1.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3d80b2c0300583228ac98d0a927a1ba6a2ba6b8a742463c564f1d419ee5b211e"},
- {file = "contourpy-1.3.2-cp312-cp312-win32.whl", hash = "sha256:90df94c89a91b7362e1142cbee7568f86514412ab8a2c0d0fca72d7e91b62912"},
- {file = "contourpy-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c942a01d9163e2e5cfb05cb66110121b8d07ad438a17f9e766317bcb62abf73"},
- {file = "contourpy-1.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:de39db2604ae755316cb5967728f4bea92685884b1e767b7c24e983ef5f771cb"},
- {file = "contourpy-1.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3f9e896f447c5c8618f1edb2bafa9a4030f22a575ec418ad70611450720b5b08"},
- {file = "contourpy-1.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71e2bd4a1c4188f5c2b8d274da78faab884b59df20df63c34f74aa1813c4427c"},
- {file = "contourpy-1.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de425af81b6cea33101ae95ece1f696af39446db9682a0b56daaa48cfc29f38f"},
- {file = "contourpy-1.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:977e98a0e0480d3fe292246417239d2d45435904afd6d7332d8455981c408b85"},
- {file = "contourpy-1.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:434f0adf84911c924519d2b08fc10491dd282b20bdd3fa8f60fd816ea0b48841"},
- {file = "contourpy-1.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c66c4906cdbc50e9cba65978823e6e00b45682eb09adbb78c9775b74eb222422"},
- {file = "contourpy-1.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8b7fc0cd78ba2f4695fd0a6ad81a19e7e3ab825c31b577f384aa9d7817dc3bef"},
- {file = "contourpy-1.3.2-cp313-cp313-win32.whl", hash = "sha256:15ce6ab60957ca74cff444fe66d9045c1fd3e92c8936894ebd1f3eef2fff075f"},
- {file = "contourpy-1.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e1578f7eafce927b168752ed7e22646dad6cd9bca673c60bff55889fa236ebf9"},
- {file = "contourpy-1.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0475b1f6604896bc7c53bb070e355e9321e1bc0d381735421a2d2068ec56531f"},
- {file = "contourpy-1.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c85bb486e9be652314bb5b9e2e3b0d1b2e643d5eec4992c0fbe8ac71775da739"},
- {file = "contourpy-1.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:745b57db7758f3ffc05a10254edd3182a2a83402a89c00957a8e8a22f5582823"},
- {file = "contourpy-1.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:970e9173dbd7eba9b4e01aab19215a48ee5dd3f43cef736eebde064a171f89a5"},
- {file = "contourpy-1.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6c4639a9c22230276b7bffb6a850dfc8258a2521305e1faefe804d006b2e532"},
- {file = "contourpy-1.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc829960f34ba36aad4302e78eabf3ef16a3a100863f0d4eeddf30e8a485a03b"},
- {file = "contourpy-1.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d32530b534e986374fc19eaa77fcb87e8a99e5431499949b828312bdcd20ac52"},
- {file = "contourpy-1.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e298e7e70cf4eb179cc1077be1c725b5fd131ebc81181bf0c03525c8abc297fd"},
- {file = "contourpy-1.3.2-cp313-cp313t-win32.whl", hash = "sha256:d0e589ae0d55204991450bb5c23f571c64fe43adaa53f93fc902a84c96f52fe1"},
- {file = "contourpy-1.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:78e9253c3de756b3f6a5174d024c4835acd59eb3f8e2ca13e775dbffe1558f69"},
- {file = "contourpy-1.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fd93cc7f3139b6dd7aab2f26a90dde0aa9fc264dbf70f6740d498a70b860b82c"},
- {file = "contourpy-1.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:107ba8a6a7eec58bb475329e6d3b95deba9440667c4d62b9b6063942b61d7f16"},
- {file = "contourpy-1.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ded1706ed0c1049224531b81128efbd5084598f18d8a2d9efae833edbd2b40ad"},
- {file = "contourpy-1.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5f5964cdad279256c084b69c3f412b7801e15356b16efa9d78aa974041903da0"},
- {file = "contourpy-1.3.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b65a95d642d4efa8f64ba12558fcb83407e58a2dfba9d796d77b63ccfcaff5"},
- {file = "contourpy-1.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8c5acb8dddb0752bf252e01a3035b21443158910ac16a3b0d20e7fed7d534ce5"},
- {file = "contourpy-1.3.2.tar.gz", hash = "sha256:b6945942715a034c671b7fc54f9588126b0b8bf23db2696e3ca8328f3ff0ab54"},
+ {file = "colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff"},
+ {file = "colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2"},
]
[package.dependencies]
-numpy = ">=1.23"
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
[package.extras]
-bokeh = ["bokeh", "selenium"]
-docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"]
-mypy = ["bokeh", "contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.15.0)", "types-Pillow"]
-test = ["Pillow", "contourpy[test-no-images]", "matplotlib"]
-test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"]
+development = ["black", "flake8", "mypy", "pytest", "types-colorama"]
[[package]]
-name = "cycler"
-version = "0.12.1"
-description = "Composable style cycles"
+name = "coverage"
+version = "7.9.1"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "coverage-7.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cc94d7c5e8423920787c33d811c0be67b7be83c705f001f7180c7b186dcf10ca"},
+ {file = "coverage-7.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16aa0830d0c08a2c40c264cef801db8bc4fc0e1892782e45bcacbd5889270509"},
+ {file = "coverage-7.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf95981b126f23db63e9dbe4cf65bd71f9a6305696fa5e2262693bc4e2183f5b"},
+ {file = "coverage-7.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f05031cf21699785cd47cb7485f67df619e7bcdae38e0fde40d23d3d0210d3c3"},
+ {file = "coverage-7.9.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4fbcab8764dc072cb651a4bcda4d11fb5658a1d8d68842a862a6610bd8cfa3"},
+ {file = "coverage-7.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16649a7330ec307942ed27d06ee7e7a38417144620bb3d6e9a18ded8a2d3e5"},
+ {file = "coverage-7.9.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cea0a27a89e6432705fffc178064503508e3c0184b4f061700e771a09de58187"},
+ {file = "coverage-7.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e980b53a959fa53b6f05343afbd1e6f44a23ed6c23c4b4c56c6662bbb40c82ce"},
+ {file = "coverage-7.9.1-cp310-cp310-win32.whl", hash = "sha256:70760b4c5560be6ca70d11f8988ee6542b003f982b32f83d5ac0b72476607b70"},
+ {file = "coverage-7.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a66e8f628b71f78c0e0342003d53b53101ba4e00ea8dabb799d9dba0abbbcebe"},
+ {file = "coverage-7.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95c765060e65c692da2d2f51a9499c5e9f5cf5453aeaf1420e3fc847cc060582"},
+ {file = "coverage-7.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba383dc6afd5ec5b7a0d0c23d38895db0e15bcba7fb0fa8901f245267ac30d86"},
+ {file = "coverage-7.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae0383f13cbdcf1e5e7014489b0d71cc0106458878ccde52e8a12ced4298ed"},
+ {file = "coverage-7.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69aa417a030bf11ec46149636314c24c8d60fadb12fc0ee8f10fda0d918c879d"},
+ {file = "coverage-7.9.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a4be2a28656afe279b34d4f91c3e26eccf2f85500d4a4ff0b1f8b54bf807338"},
+ {file = "coverage-7.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:382e7ddd5289f140259b610e5f5c58f713d025cb2f66d0eb17e68d0a94278875"},
+ {file = "coverage-7.9.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e5532482344186c543c37bfad0ee6069e8ae4fc38d073b8bc836fc8f03c9e250"},
+ {file = "coverage-7.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a39d18b3f50cc121d0ce3838d32d58bd1d15dab89c910358ebefc3665712256c"},
+ {file = "coverage-7.9.1-cp311-cp311-win32.whl", hash = "sha256:dd24bd8d77c98557880def750782df77ab2b6885a18483dc8588792247174b32"},
+ {file = "coverage-7.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:6b55ad10a35a21b8015eabddc9ba31eb590f54adc9cd39bcf09ff5349fd52125"},
+ {file = "coverage-7.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:6ad935f0016be24c0e97fc8c40c465f9c4b85cbbe6eac48934c0dc4d2568321e"},
+ {file = "coverage-7.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8de12b4b87c20de895f10567639c0797b621b22897b0af3ce4b4e204a743626"},
+ {file = "coverage-7.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5add197315a054e92cee1b5f686a2bcba60c4c3e66ee3de77ace6c867bdee7cb"},
+ {file = "coverage-7.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600a1d4106fe66f41e5d0136dfbc68fe7200a5cbe85610ddf094f8f22e1b0300"},
+ {file = "coverage-7.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a876e4c3e5a2a1715a6608906aa5a2e0475b9c0f68343c2ada98110512ab1d8"},
+ {file = "coverage-7.9.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81f34346dd63010453922c8e628a52ea2d2ccd73cb2487f7700ac531b247c8a5"},
+ {file = "coverage-7.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:888f8eee13f2377ce86d44f338968eedec3291876b0b8a7289247ba52cb984cd"},
+ {file = "coverage-7.9.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9969ef1e69b8c8e1e70d591f91bbc37fc9a3621e447525d1602801a24ceda898"},
+ {file = "coverage-7.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:60c458224331ee3f1a5b472773e4a085cc27a86a0b48205409d364272d67140d"},
+ {file = "coverage-7.9.1-cp312-cp312-win32.whl", hash = "sha256:5f646a99a8c2b3ff4c6a6e081f78fad0dde275cd59f8f49dc4eab2e394332e74"},
+ {file = "coverage-7.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:30f445f85c353090b83e552dcbbdad3ec84c7967e108c3ae54556ca69955563e"},
+ {file = "coverage-7.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:af41da5dca398d3474129c58cb2b106a5d93bbb196be0d307ac82311ca234342"},
+ {file = "coverage-7.9.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:31324f18d5969feef7344a932c32428a2d1a3e50b15a6404e97cba1cc9b2c631"},
+ {file = "coverage-7.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0c804506d624e8a20fb3108764c52e0eef664e29d21692afa375e0dd98dc384f"},
+ {file = "coverage-7.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef64c27bc40189f36fcc50c3fb8f16ccda73b6a0b80d9bd6e6ce4cffcd810bbd"},
+ {file = "coverage-7.9.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4fe2348cc6ec372e25adec0219ee2334a68d2f5222e0cba9c0d613394e12d86"},
+ {file = "coverage-7.9.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34ed2186fe52fcc24d4561041979a0dec69adae7bce2ae8d1c49eace13e55c43"},
+ {file = "coverage-7.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:25308bd3d00d5eedd5ae7d4357161f4df743e3c0240fa773ee1b0f75e6c7c0f1"},
+ {file = "coverage-7.9.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73e9439310f65d55a5a1e0564b48e34f5369bee943d72c88378f2d576f5a5751"},
+ {file = "coverage-7.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37ab6be0859141b53aa89412a82454b482c81cf750de4f29223d52268a86de67"},
+ {file = "coverage-7.9.1-cp313-cp313-win32.whl", hash = "sha256:64bdd969456e2d02a8b08aa047a92d269c7ac1f47e0c977675d550c9a0863643"},
+ {file = "coverage-7.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:be9e3f68ca9edb897c2184ad0eee815c635565dbe7a0e7e814dc1f7cbab92c0a"},
+ {file = "coverage-7.9.1-cp313-cp313-win_arm64.whl", hash = "sha256:1c503289ffef1d5105d91bbb4d62cbe4b14bec4d13ca225f9c73cde9bb46207d"},
+ {file = "coverage-7.9.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0b3496922cb5f4215bf5caaef4cf12364a26b0be82e9ed6d050f3352cf2d7ef0"},
+ {file = "coverage-7.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9565c3ab1c93310569ec0d86b017f128f027cab0b622b7af288696d7ed43a16d"},
+ {file = "coverage-7.9.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2241ad5dbf79ae1d9c08fe52b36d03ca122fb9ac6bca0f34439e99f8327ac89f"},
+ {file = "coverage-7.9.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb5838701ca68b10ebc0937dbd0eb81974bac54447c55cd58dea5bca8451029"},
+ {file = "coverage-7.9.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a25f814591a8c0c5372c11ac8967f669b97444c47fd794926e175c4047ece"},
+ {file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2d04b16a6062516df97969f1ae7efd0de9c31eb6ebdceaa0d213b21c0ca1a683"},
+ {file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7931b9e249edefb07cd6ae10c702788546341d5fe44db5b6108a25da4dca513f"},
+ {file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52e92b01041151bf607ee858e5a56c62d4b70f4dac85b8c8cb7fb8a351ab2c10"},
+ {file = "coverage-7.9.1-cp313-cp313t-win32.whl", hash = "sha256:684e2110ed84fd1ca5f40e89aa44adf1729dc85444004111aa01866507adf363"},
+ {file = "coverage-7.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:437c576979e4db840539674e68c84b3cda82bc824dd138d56bead1435f1cb5d7"},
+ {file = "coverage-7.9.1-cp313-cp313t-win_arm64.whl", hash = "sha256:18a0912944d70aaf5f399e350445738a1a20b50fbea788f640751c2ed9208b6c"},
+ {file = "coverage-7.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f424507f57878e424d9a95dc4ead3fbdd72fd201e404e861e465f28ea469951"},
+ {file = "coverage-7.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:535fde4001b2783ac80865d90e7cc7798b6b126f4cd8a8c54acfe76804e54e58"},
+ {file = "coverage-7.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02532fd3290bb8fa6bec876520842428e2a6ed6c27014eca81b031c2d30e3f71"},
+ {file = "coverage-7.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56f5eb308b17bca3bbff810f55ee26d51926d9f89ba92707ee41d3c061257e55"},
+ {file = "coverage-7.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfa447506c1a52271f1b0de3f42ea0fa14676052549095e378d5bff1c505ff7b"},
+ {file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9ca8e220006966b4a7b68e8984a6aee645a0384b0769e829ba60281fe61ec4f7"},
+ {file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:49f1d0788ba5b7ba65933f3a18864117c6506619f5ca80326b478f72acf3f385"},
+ {file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68cd53aec6f45b8e4724c0950ce86eacb775c6be01ce6e3669fe4f3a21e768ed"},
+ {file = "coverage-7.9.1-cp39-cp39-win32.whl", hash = "sha256:95335095b6c7b1cc14c3f3f17d5452ce677e8490d101698562b2ffcacc304c8d"},
+ {file = "coverage-7.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:e1b5191d1648acc439b24721caab2fd0c86679d8549ed2c84d5a7ec1bedcc244"},
+ {file = "coverage-7.9.1-pp39.pp310.pp311-none-any.whl", hash = "sha256:db0f04118d1db74db6c9e1cb1898532c7dcc220f1d2718f058601f7c3f499514"},
+ {file = "coverage-7.9.1-py3-none-any.whl", hash = "sha256:66b974b145aa189516b6bf2d8423e888b742517d37872f6ee4c5be0073bd9a3c"},
+ {file = "coverage-7.9.1.tar.gz", hash = "sha256:6cf43c78c4282708a28e466316935ec7489a9c487518a77fa68f716c67909cec"},
+]
+
+[package.extras]
+toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
+
+[[package]]
+name = "cryptography"
+version = "45.0.4"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = "!=3.9.0,!=3.9.1,>=3.7"
+groups = ["main"]
+files = [
+ {file = "cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069"},
+ {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d"},
+ {file = "cryptography-45.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4ca0f52170e821bc8da6fc0cc565b7bb8ff8d90d36b5e9fdd68e8a86bdf72036"},
+ {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f3fe7a5ae34d5a414957cc7f457e2b92076e72938423ac64d215722f6cf49a9e"},
+ {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:25eb4d4d3e54595dc8adebc6bbd5623588991d86591a78c2548ffb64797341e2"},
+ {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce1678a2ccbe696cf3af15a75bb72ee008d7ff183c9228592ede9db467e64f1b"},
+ {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:49fe9155ab32721b9122975e168a6760d8ce4cffe423bcd7ca269ba41b5dfac1"},
+ {file = "cryptography-45.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:2882338b2a6e0bd337052e8b9007ced85c637da19ef9ecaf437744495c8c2999"},
+ {file = "cryptography-45.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:23b9c3ea30c3ed4db59e7b9619272e94891f8a3a5591d0b656a7582631ccf750"},
+ {file = "cryptography-45.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0a97c927497e3bc36b33987abb99bf17a9a175a19af38a892dc4bbb844d7ee2"},
+ {file = "cryptography-45.0.4-cp311-abi3-win32.whl", hash = "sha256:e00a6c10a5c53979d6242f123c0a97cff9f3abed7f064fc412c36dc521b5f257"},
+ {file = "cryptography-45.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:817ee05c6c9f7a69a16200f0c90ab26d23a87701e2a284bd15156783e46dbcc8"},
+ {file = "cryptography-45.0.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:964bcc28d867e0f5491a564b7debb3ffdd8717928d315d12e0d7defa9e43b723"},
+ {file = "cryptography-45.0.4-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6a5bf57554e80f75a7db3d4b1dacaa2764611ae166ab42ea9a72bcdb5d577637"},
+ {file = "cryptography-45.0.4-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:46cf7088bf91bdc9b26f9c55636492c1cce3e7aaf8041bbf0243f5e5325cfb2d"},
+ {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7bedbe4cc930fa4b100fc845ea1ea5788fcd7ae9562e669989c11618ae8d76ee"},
+ {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:eaa3e28ea2235b33220b949c5a0d6cf79baa80eab2eb5607ca8ab7525331b9ff"},
+ {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7ef2dde4fa9408475038fc9aadfc1fb2676b174e68356359632e980c661ec8f6"},
+ {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:6a3511ae33f09094185d111160fd192c67aa0a2a8d19b54d36e4c78f651dc5ad"},
+ {file = "cryptography-45.0.4-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:06509dc70dd71fa56eaa138336244e2fbaf2ac164fc9b5e66828fccfd2b680d6"},
+ {file = "cryptography-45.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5f31e6b0a5a253f6aa49be67279be4a7e5a4ef259a9f33c69f7d1b1191939872"},
+ {file = "cryptography-45.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:944e9ccf67a9594137f942d5b52c8d238b1b4e46c7a0c2891b7ae6e01e7c80a4"},
+ {file = "cryptography-45.0.4-cp37-abi3-win32.whl", hash = "sha256:c22fe01e53dc65edd1945a2e6f0015e887f84ced233acecb64b4daadb32f5c97"},
+ {file = "cryptography-45.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:627ba1bc94f6adf0b0a2e35d87020285ead22d9f648c7e75bb64f367375f3b22"},
+ {file = "cryptography-45.0.4-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a77c6fb8d76e9c9f99f2f3437c1a4ac287b34eaf40997cfab1e9bd2be175ac39"},
+ {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7aad98a25ed8ac917fdd8a9c1e706e5a0956e06c498be1f713b61734333a4507"},
+ {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3530382a43a0e524bc931f187fc69ef4c42828cf7d7f592f7f249f602b5a4ab0"},
+ {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:6b613164cb8425e2f8db5849ffb84892e523bf6d26deb8f9bb76ae86181fa12b"},
+ {file = "cryptography-45.0.4-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:96d4819e25bf3b685199b304a0029ce4a3caf98947ce8a066c9137cc78ad2c58"},
+ {file = "cryptography-45.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b97737a3ffbea79eebb062eb0d67d72307195035332501722a9ca86bab9e3ab2"},
+ {file = "cryptography-45.0.4-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4828190fb6c4bcb6ebc6331f01fe66ae838bb3bd58e753b59d4b22eb444b996c"},
+ {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:03dbff8411206713185b8cebe31bc5c0eb544799a50c09035733716b386e61a4"},
+ {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51dfbd4d26172d31150d84c19bbe06c68ea4b7f11bbc7b3a5e146b367c311349"},
+ {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:0339a692de47084969500ee455e42c58e449461e0ec845a34a6a9b9bf7df7fb8"},
+ {file = "cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:0cf13c77d710131d33e63626bd55ae7c0efb701ebdc2b3a7952b9b23a0412862"},
+ {file = "cryptography-45.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bbc505d1dc469ac12a0a064214879eac6294038d6b24ae9f71faae1448a9608d"},
+ {file = "cryptography-45.0.4.tar.gz", hash = "sha256:7405ade85c83c37682c8fe65554759800a4a8c54b2d96e0f8ad114d31b808d57"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""}
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""]
+docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
+nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""]
+pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
+sdist = ["build (>=1.0.0)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["certifi (>=2024)", "cryptography-vectors (==45.0.4)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
+test-randomorder = ["pytest-randomly"]
+
+[[package]]
+name = "dill"
+version = "0.4.0"
+description = "serialize all of Python"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049"},
+ {file = "dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0"},
+]
+
+[package.extras]
+graph = ["objgraph (>=1.7.2)"]
+profile = ["gprof2dot (>=2022.7.29)"]
+
+[[package]]
+name = "fastapi"
+version = "0.115.13"
+description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
- {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
- {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
+ {file = "fastapi-0.115.13-py3-none-any.whl", hash = "sha256:0a0cab59afa7bab22f5eb347f8c9864b681558c278395e94035a741fc10cd865"},
+ {file = "fastapi-0.115.13.tar.gz", hash = "sha256:55d1d25c2e1e0a0a50aceb1c8705cd932def273c102bff0b1c1da88b3c6eb307"},
]
+[package.dependencies]
+pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
+starlette = ">=0.40.0,<0.47.0"
+typing-extensions = ">=4.8.0"
+
[package.extras]
-docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
-tests = ["pytest", "pytest-cov", "pytest-xdist"]
+all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
+standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
[[package]]
name = "flake8"
@@ -196,78 +566,166 @@ pycodestyle = ">=2.13.0,<2.14.0"
pyflakes = ">=3.3.0,<3.4.0"
[[package]]
-name = "fonttools"
-version = "4.57.0"
-description = "Tools to manipulate font files"
+name = "flask"
+version = "3.1.1"
+description = "A simple framework for building complex web applications."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "flask-3.1.1-py3-none-any.whl", hash = "sha256:07aae2bb5eaf77993ef57e357491839f5fd9f4dc281593a81a9e4d79a24f295c"},
+ {file = "flask-3.1.1.tar.gz", hash = "sha256:284c7b8f2f58cb737f0cf1c30fd7eaf0ccfcde196099d24ecede3fc2005aa59e"},
+]
+
+[package.dependencies]
+blinker = ">=1.9.0"
+click = ">=8.1.3"
+itsdangerous = ">=2.2.0"
+jinja2 = ">=3.1.2"
+markupsafe = ">=2.1.1"
+werkzeug = ">=3.1.0"
+
+[package.extras]
+async = ["asgiref (>=3.2)"]
+dotenv = ["python-dotenv"]
+
+[[package]]
+name = "h11"
+version = "0.16.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
- {file = "fonttools-4.57.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:babe8d1eb059a53e560e7bf29f8e8f4accc8b6cfb9b5fd10e485bde77e71ef41"},
- {file = "fonttools-4.57.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:81aa97669cd726349eb7bd43ca540cf418b279ee3caba5e2e295fb4e8f841c02"},
- {file = "fonttools-4.57.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0e9618630edd1910ad4f07f60d77c184b2f572c8ee43305ea3265675cbbfe7e"},
- {file = "fonttools-4.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34687a5d21f1d688d7d8d416cb4c5b9c87fca8a1797ec0d74b9fdebfa55c09ab"},
- {file = "fonttools-4.57.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69ab81b66ebaa8d430ba56c7a5f9abe0183afefd3a2d6e483060343398b13fb1"},
- {file = "fonttools-4.57.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d639397de852f2ccfb3134b152c741406752640a266d9c1365b0f23d7b88077f"},
- {file = "fonttools-4.57.0-cp310-cp310-win32.whl", hash = "sha256:cc066cb98b912f525ae901a24cd381a656f024f76203bc85f78fcc9e66ae5aec"},
- {file = "fonttools-4.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:7a64edd3ff6a7f711a15bd70b4458611fb240176ec11ad8845ccbab4fe6745db"},
- {file = "fonttools-4.57.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3871349303bdec958360eedb619169a779956503ffb4543bb3e6211e09b647c4"},
- {file = "fonttools-4.57.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c59375e85126b15a90fcba3443eaac58f3073ba091f02410eaa286da9ad80ed8"},
- {file = "fonttools-4.57.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967b65232e104f4b0f6370a62eb33089e00024f2ce143aecbf9755649421c683"},
- {file = "fonttools-4.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39acf68abdfc74e19de7485f8f7396fa4d2418efea239b7061d6ed6a2510c746"},
- {file = "fonttools-4.57.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d077f909f2343daf4495ba22bb0e23b62886e8ec7c109ee8234bdbd678cf344"},
- {file = "fonttools-4.57.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:46370ac47a1e91895d40e9ad48effbe8e9d9db1a4b80888095bc00e7beaa042f"},
- {file = "fonttools-4.57.0-cp311-cp311-win32.whl", hash = "sha256:ca2aed95855506b7ae94e8f1f6217b7673c929e4f4f1217bcaa236253055cb36"},
- {file = "fonttools-4.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:17168a4670bbe3775f3f3f72d23ee786bd965395381dfbb70111e25e81505b9d"},
- {file = "fonttools-4.57.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:889e45e976c74abc7256d3064aa7c1295aa283c6bb19810b9f8b604dfe5c7f31"},
- {file = "fonttools-4.57.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0425c2e052a5f1516c94e5855dbda706ae5a768631e9fcc34e57d074d1b65b92"},
- {file = "fonttools-4.57.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44c26a311be2ac130f40a96769264809d3b0cb297518669db437d1cc82974888"},
- {file = "fonttools-4.57.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c41ba992df5b8d680b89fd84c6a1f2aca2b9f1ae8a67400c8930cd4ea115f6"},
- {file = "fonttools-4.57.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ea1e9e43ca56b0c12440a7c689b1350066595bebcaa83baad05b8b2675129d98"},
- {file = "fonttools-4.57.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84fd56c78d431606332a0627c16e2a63d243d0d8b05521257d77c6529abe14d8"},
- {file = "fonttools-4.57.0-cp312-cp312-win32.whl", hash = "sha256:f4376819c1c778d59e0a31db5dc6ede854e9edf28bbfa5b756604727f7f800ac"},
- {file = "fonttools-4.57.0-cp312-cp312-win_amd64.whl", hash = "sha256:57e30241524879ea10cdf79c737037221f77cc126a8cdc8ff2c94d4a522504b9"},
- {file = "fonttools-4.57.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:408ce299696012d503b714778d89aa476f032414ae57e57b42e4b92363e0b8ef"},
- {file = "fonttools-4.57.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bbceffc80aa02d9e8b99f2a7491ed8c4a783b2fc4020119dc405ca14fb5c758c"},
- {file = "fonttools-4.57.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f022601f3ee9e1f6658ed6d184ce27fa5216cee5b82d279e0f0bde5deebece72"},
- {file = "fonttools-4.57.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dea5893b58d4637ffa925536462ba626f8a1b9ffbe2f5c272cdf2c6ebadb817"},
- {file = "fonttools-4.57.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dff02c5c8423a657c550b48231d0a48d7e2b2e131088e55983cfe74ccc2c7cc9"},
- {file = "fonttools-4.57.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:767604f244dc17c68d3e2dbf98e038d11a18abc078f2d0f84b6c24571d9c0b13"},
- {file = "fonttools-4.57.0-cp313-cp313-win32.whl", hash = "sha256:8e2e12d0d862f43d51e5afb8b9751c77e6bec7d2dc00aad80641364e9df5b199"},
- {file = "fonttools-4.57.0-cp313-cp313-win_amd64.whl", hash = "sha256:f1d6bc9c23356908db712d282acb3eebd4ae5ec6d8b696aa40342b1d84f8e9e3"},
- {file = "fonttools-4.57.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9d57b4e23ebbe985125d3f0cabbf286efa191ab60bbadb9326091050d88e8213"},
- {file = "fonttools-4.57.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:579ba873d7f2a96f78b2e11028f7472146ae181cae0e4d814a37a09e93d5c5cc"},
- {file = "fonttools-4.57.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e3e1ec10c29bae0ea826b61f265ec5c858c5ba2ce2e69a71a62f285cf8e4595"},
- {file = "fonttools-4.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1968f2a2003c97c4ce6308dc2498d5fd4364ad309900930aa5a503c9851aec8"},
- {file = "fonttools-4.57.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:aff40f8ac6763d05c2c8f6d240c6dac4bb92640a86d9b0c3f3fff4404f34095c"},
- {file = "fonttools-4.57.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d07f1b64008e39fceae7aa99e38df8385d7d24a474a8c9872645c4397b674481"},
- {file = "fonttools-4.57.0-cp38-cp38-win32.whl", hash = "sha256:51d8482e96b28fb28aa8e50b5706f3cee06de85cbe2dce80dbd1917ae22ec5a6"},
- {file = "fonttools-4.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:03290e818782e7edb159474144fca11e36a8ed6663d1fcbd5268eb550594fd8e"},
- {file = "fonttools-4.57.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7339e6a3283e4b0ade99cade51e97cde3d54cd6d1c3744459e886b66d630c8b3"},
- {file = "fonttools-4.57.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:05efceb2cb5f6ec92a4180fcb7a64aa8d3385fd49cfbbe459350229d1974f0b1"},
- {file = "fonttools-4.57.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a97bb05eb24637714a04dee85bdf0ad1941df64fe3b802ee4ac1c284a5f97b7c"},
- {file = "fonttools-4.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:541cb48191a19ceb1a2a4b90c1fcebd22a1ff7491010d3cf840dd3a68aebd654"},
- {file = "fonttools-4.57.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:cdef9a056c222d0479a1fdb721430f9efd68268014c54e8166133d2643cb05d9"},
- {file = "fonttools-4.57.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3cf97236b192a50a4bf200dc5ba405aa78d4f537a2c6e4c624bb60466d5b03bd"},
- {file = "fonttools-4.57.0-cp39-cp39-win32.whl", hash = "sha256:e952c684274a7714b3160f57ec1d78309f955c6335c04433f07d36c5eb27b1f9"},
- {file = "fonttools-4.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2a722c0e4bfd9966a11ff55c895c817158fcce1b2b6700205a376403b546ad9"},
- {file = "fonttools-4.57.0-py3-none-any.whl", hash = "sha256:3122c604a675513c68bd24c6a8f9091f1c2376d18e8f5fe5a101746c81b3e98f"},
- {file = "fonttools-4.57.0.tar.gz", hash = "sha256:727ece10e065be2f9dd239d15dd5d60a66e17eac11aea47d447f9f03fdbc42de"},
+ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"},
+ {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
+]
+
+[[package]]
+name = "h2"
+version = "4.2.0"
+description = "Pure-Python HTTP/2 protocol implementation"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"},
+ {file = "h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"},
+]
+
+[package.dependencies]
+hpack = ">=4.1,<5"
+hyperframe = ">=6.1,<7"
+
+[[package]]
+name = "hpack"
+version = "4.1.0"
+description = "Pure-Python HPACK header encoding"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"},
+ {file = "hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"},
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+description = "A minimal low-level HTTP client."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"},
+ {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"},
+]
+
+[package.dependencies]
+certifi = "*"
+h11 = ">=0.16"
+
+[package.extras]
+asyncio = ["anyio (>=4.0,<5.0)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+trio = ["trio (>=0.22.0,<1.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+description = "The next generation HTTP client."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
+ {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
+]
+
+[package.dependencies]
+anyio = "*"
+certifi = "*"
+httpcore = "==1.*"
+idna = "*"
+
+[package.extras]
+brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
+cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+zstd = ["zstandard (>=0.18.0)"]
+
+[[package]]
+name = "hypercorn"
+version = "0.17.3"
+description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "hypercorn-0.17.3-py3-none-any.whl", hash = "sha256:059215dec34537f9d40a69258d323f56344805efb462959e727152b0aa504547"},
+ {file = "hypercorn-0.17.3.tar.gz", hash = "sha256:1b37802ee3ac52d2d85270700d565787ab16cf19e1462ccfa9f089ca17574165"},
+]
+
+[package.dependencies]
+h11 = "*"
+h2 = ">=3.1.0"
+priority = "*"
+wsproto = ">=0.14.0"
+
+[package.extras]
+docs = ["pydata_sphinx_theme", "sphinxcontrib_mermaid"]
+h3 = ["aioquic (>=0.9.0,<1.0)"]
+trio = ["trio (>=0.22.0)"]
+uvloop = ["uvloop (>=0.18) ; platform_system != \"Windows\""]
+
+[[package]]
+name = "hyperframe"
+version = "6.1.0"
+description = "Pure-Python HTTP/2 framing"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"},
+ {file = "hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"},
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.6"
+groups = ["main"]
+files = [
+ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
+ {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
]
[package.extras]
-all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
-graphite = ["lz4 (>=1.7.4.2)"]
-interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
-lxml = ["lxml (>=4.0)"]
-pathops = ["skia-pathops (>=0.5.0)"]
-plot = ["matplotlib"]
-repacker = ["uharfbuzz (>=0.23.0)"]
-symfont = ["sympy"]
-type1 = ["xattr ; sys_platform == \"darwin\""]
-ufo = ["fs (>=2.2.0,<3)"]
-unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
-woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
+all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
[[package]]
name = "iniconfig"
@@ -287,7 +745,7 @@ version = "6.0.1"
description = "A Python utility / library to sort Python imports."
optional = false
python-versions = ">=3.9.0"
-groups = ["dev"]
+groups = ["main", "dev"]
files = [
{file = "isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615"},
{file = "isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450"},
@@ -298,152 +756,105 @@ colors = ["colorama"]
plugins = ["setuptools"]
[[package]]
-name = "kiwisolver"
-version = "1.4.8"
-description = "A fast implementation of the Cassowary constraint solver"
+name = "itsdangerous"
+version = "2.2.0"
+description = "Safely pass data to untrusted environments and back."
optional = false
-python-versions = ">=3.10"
+python-versions = ">=3.8"
groups = ["main"]
files = [
- {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"},
- {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"},
- {file = "kiwisolver-1.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce2cf1e5688edcb727fdf7cd1bbd0b6416758996826a8be1d958f91880d0809d"},
- {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c8bf637892dc6e6aad2bc6d4d69d08764166e5e3f69d469e55427b6ac001b19d"},
- {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:034d2c891f76bd3edbdb3ea11140d8510dca675443da7304205a2eaa45d8334c"},
- {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47b28d1dfe0793d5e96bce90835e17edf9a499b53969b03c6c47ea5985844c3"},
- {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb158fe28ca0c29f2260cca8c43005329ad58452c36f0edf298204de32a9a3ed"},
- {file = "kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5536185fce131780ebd809f8e623bf4030ce1b161353166c49a3c74c287897f"},
- {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:369b75d40abedc1da2c1f4de13f3482cb99e3237b38726710f4a793432b1c5ff"},
- {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:641f2ddf9358c80faa22e22eb4c9f54bd3f0e442e038728f500e3b978d00aa7d"},
- {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d561d2d8883e0819445cfe58d7ddd673e4015c3c57261d7bdcd3710d0d14005c"},
- {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1732e065704b47c9afca7ffa272f845300a4eb959276bf6970dc07265e73b605"},
- {file = "kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bcb1ebc3547619c3b58a39e2448af089ea2ef44b37988caf432447374941574e"},
- {file = "kiwisolver-1.4.8-cp310-cp310-win_amd64.whl", hash = "sha256:89c107041f7b27844179ea9c85d6da275aa55ecf28413e87624d033cf1f6b751"},
- {file = "kiwisolver-1.4.8-cp310-cp310-win_arm64.whl", hash = "sha256:b5773efa2be9eb9fcf5415ea3ab70fc785d598729fd6057bea38d539ead28271"},
- {file = "kiwisolver-1.4.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a4d3601908c560bdf880f07d94f31d734afd1bb71e96585cace0e38ef44c6d84"},
- {file = "kiwisolver-1.4.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856b269c4d28a5c0d5e6c1955ec36ebfd1651ac00e1ce0afa3e28da95293b561"},
- {file = "kiwisolver-1.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2b9a96e0f326205af81a15718a9073328df1173a2619a68553decb7097fd5d7"},
- {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5020c83e8553f770cb3b5fc13faac40f17e0b205bd237aebd21d53d733adb03"},
- {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dace81d28c787956bfbfbbfd72fdcef014f37d9b48830829e488fdb32b49d954"},
- {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11e1022b524bd48ae56c9b4f9296bce77e15a2e42a502cceba602f804b32bb79"},
- {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b9b4d2892fefc886f30301cdd80debd8bb01ecdf165a449eb6e78f79f0fabd6"},
- {file = "kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a96c0e790ee875d65e340ab383700e2b4891677b7fcd30a699146f9384a2bb0"},
- {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23454ff084b07ac54ca8be535f4174170c1094a4cff78fbae4f73a4bcc0d4dab"},
- {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:87b287251ad6488e95b4f0b4a79a6d04d3ea35fde6340eb38fbd1ca9cd35bbbc"},
- {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b21dbe165081142b1232a240fc6383fd32cdd877ca6cc89eab93e5f5883e1c25"},
- {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:768cade2c2df13db52475bd28d3a3fac8c9eff04b0e9e2fda0f3760f20b3f7fc"},
- {file = "kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d47cfb2650f0e103d4bf68b0b5804c68da97272c84bb12850d877a95c056bd67"},
- {file = "kiwisolver-1.4.8-cp311-cp311-win_amd64.whl", hash = "sha256:ed33ca2002a779a2e20eeb06aea7721b6e47f2d4b8a8ece979d8ba9e2a167e34"},
- {file = "kiwisolver-1.4.8-cp311-cp311-win_arm64.whl", hash = "sha256:16523b40aab60426ffdebe33ac374457cf62863e330a90a0383639ce14bf44b2"},
- {file = "kiwisolver-1.4.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6af5e8815fd02997cb6ad9bbed0ee1e60014438ee1a5c2444c96f87b8843502"},
- {file = "kiwisolver-1.4.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bade438f86e21d91e0cf5dd7c0ed00cda0f77c8c1616bd83f9fc157fa6760d31"},
- {file = "kiwisolver-1.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b83dc6769ddbc57613280118fb4ce3cd08899cc3369f7d0e0fab518a7cf37fdb"},
- {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111793b232842991be367ed828076b03d96202c19221b5ebab421ce8bcad016f"},
- {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:257af1622860e51b1a9d0ce387bf5c2c4f36a90594cb9514f55b074bcc787cfc"},
- {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b5637c3f316cab1ec1c9a12b8c5f4750a4c4b71af9157645bf32830e39c03a"},
- {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782bb86f245ec18009890e7cb8d13a5ef54dcf2ebe18ed65f795e635a96a1c6a"},
- {file = "kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc978a80a0db3a66d25767b03688f1147a69e6237175c0f4ffffaaedf744055a"},
- {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:36dbbfd34838500a31f52c9786990d00150860e46cd5041386f217101350f0d3"},
- {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:eaa973f1e05131de5ff3569bbba7f5fd07ea0595d3870ed4a526d486fe57fa1b"},
- {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a66f60f8d0c87ab7f59b6fb80e642ebb29fec354a4dfad687ca4092ae69d04f4"},
- {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858416b7fb777a53f0c59ca08190ce24e9abbd3cffa18886a5781b8e3e26f65d"},
- {file = "kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:085940635c62697391baafaaeabdf3dd7a6c3643577dde337f4d66eba021b2b8"},
- {file = "kiwisolver-1.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:01c3d31902c7db5fb6182832713d3b4122ad9317c2c5877d0539227d96bb2e50"},
- {file = "kiwisolver-1.4.8-cp312-cp312-win_arm64.whl", hash = "sha256:a3c44cb68861de93f0c4a8175fbaa691f0aa22550c331fefef02b618a9dcb476"},
- {file = "kiwisolver-1.4.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1c8ceb754339793c24aee1c9fb2485b5b1f5bb1c2c214ff13368431e51fc9a09"},
- {file = "kiwisolver-1.4.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a62808ac74b5e55a04a408cda6156f986cefbcf0ada13572696b507cc92fa1"},
- {file = "kiwisolver-1.4.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68269e60ee4929893aad82666821aaacbd455284124817af45c11e50a4b42e3c"},
- {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d142fba9c464bc3bbfeff15c96eab0e7310343d6aefb62a79d51421fcc5f1b"},
- {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc373e0eef45b59197de815b1b28ef89ae3955e7722cc9710fb91cd77b7f47"},
- {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77e6f57a20b9bd4e1e2cedda4d0b986ebd0216236f0106e55c28aea3d3d69b16"},
- {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08e77738ed7538f036cd1170cbed942ef749137b1311fa2bbe2a7fda2f6bf3cc"},
- {file = "kiwisolver-1.4.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5ce1e481a74b44dd5e92ff03ea0cb371ae7a0268318e202be06c8f04f4f1246"},
- {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fc2ace710ba7c1dfd1a3b42530b62b9ceed115f19a1656adefce7b1782a37794"},
- {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3452046c37c7692bd52b0e752b87954ef86ee2224e624ef7ce6cb21e8c41cc1b"},
- {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7e9a60b50fe8b2ec6f448fe8d81b07e40141bfced7f896309df271a0b92f80f3"},
- {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:918139571133f366e8362fa4a297aeba86c7816b7ecf0bc79168080e2bd79957"},
- {file = "kiwisolver-1.4.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e063ef9f89885a1d68dd8b2e18f5ead48653176d10a0e324e3b0030e3a69adeb"},
- {file = "kiwisolver-1.4.8-cp313-cp313-win_amd64.whl", hash = "sha256:a17b7c4f5b2c51bb68ed379defd608a03954a1845dfed7cc0117f1cc8a9b7fd2"},
- {file = "kiwisolver-1.4.8-cp313-cp313-win_arm64.whl", hash = "sha256:3cd3bc628b25f74aedc6d374d5babf0166a92ff1317f46267f12d2ed54bc1d30"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:370fd2df41660ed4e26b8c9d6bbcad668fbe2560462cba151a721d49e5b6628c"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:84a2f830d42707de1d191b9490ac186bf7997a9495d4e9072210a1296345f7dc"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7a3ad337add5148cf51ce0b55642dc551c0b9d6248458a757f98796ca7348712"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7506488470f41169b86d8c9aeff587293f530a23a23a49d6bc64dab66bedc71e"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f0121b07b356a22fb0414cec4666bbe36fd6d0d759db3d37228f496ed67c880"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6d6bd87df62c27d4185de7c511c6248040afae67028a8a22012b010bc7ad062"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:291331973c64bb9cce50bbe871fb2e675c4331dab4f31abe89f175ad7679a4d7"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:893f5525bb92d3d735878ec00f781b2de998333659507d29ea4466208df37bed"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b47a465040146981dc9db8647981b8cb96366fbc8d452b031e4f8fdffec3f26d"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:99cea8b9dd34ff80c521aef46a1dddb0dcc0283cf18bde6d756f1e6f31772165"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:151dffc4865e5fe6dafce5480fab84f950d14566c480c08a53c663a0020504b6"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:577facaa411c10421314598b50413aa1ebcf5126f704f1e5d72d7e4e9f020d90"},
- {file = "kiwisolver-1.4.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:be4816dc51c8a471749d664161b434912eee82f2ea66bd7628bd14583a833e85"},
- {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e7a019419b7b510f0f7c9dceff8c5eae2392037eae483a7f9162625233802b0a"},
- {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:286b18e86682fd2217a48fc6be6b0f20c1d0ed10958d8dc53453ad58d7be0bf8"},
- {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4191ee8dfd0be1c3666ccbac178c5a05d5f8d689bbe3fc92f3c4abec817f8fe0"},
- {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd2785b9391f2873ad46088ed7599a6a71e762e1ea33e87514b1a441ed1da1c"},
- {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c07b29089b7ba090b6f1a669f1411f27221c3662b3a1b7010e67b59bb5a6f10b"},
- {file = "kiwisolver-1.4.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:65ea09a5a3faadd59c2ce96dc7bf0f364986a315949dc6374f04396b0d60e09b"},
- {file = "kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e"},
-]
-
-[[package]]
-name = "matplotlib"
-version = "3.10.1"
-description = "Python plotting package"
+ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
+ {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+description = "A very fast and expressive template engine."
optional = false
-python-versions = ">=3.10"
+python-versions = ">=3.7"
groups = ["main"]
files = [
- {file = "matplotlib-3.10.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ff2ae14910be903f4a24afdbb6d7d3a6c44da210fc7d42790b87aeac92238a16"},
- {file = "matplotlib-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0721a3fd3d5756ed593220a8b86808a36c5031fce489adb5b31ee6dbb47dd5b2"},
- {file = "matplotlib-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0673b4b8f131890eb3a1ad058d6e065fb3c6e71f160089b65f8515373394698"},
- {file = "matplotlib-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e875b95ac59a7908978fe307ecdbdd9a26af7fa0f33f474a27fcf8c99f64a19"},
- {file = "matplotlib-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2589659ea30726284c6c91037216f64a506a9822f8e50592d48ac16a2f29e044"},
- {file = "matplotlib-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a97ff127f295817bc34517255c9db6e71de8eddaab7f837b7d341dee9f2f587f"},
- {file = "matplotlib-3.10.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:057206ff2d6ab82ff3e94ebd94463d084760ca682ed5f150817b859372ec4401"},
- {file = "matplotlib-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a144867dd6bf8ba8cb5fc81a158b645037e11b3e5cf8a50bd5f9917cb863adfe"},
- {file = "matplotlib-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56c5d9fcd9879aa8040f196a235e2dcbdf7dd03ab5b07c0696f80bc6cf04bedd"},
- {file = "matplotlib-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f69dc9713e4ad2fb21a1c30e37bd445d496524257dfda40ff4a8efb3604ab5c"},
- {file = "matplotlib-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c59af3e8aca75d7744b68e8e78a669e91ccbcf1ac35d0102a7b1b46883f1dd7"},
- {file = "matplotlib-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:11b65088c6f3dae784bc72e8d039a2580186285f87448babb9ddb2ad0082993a"},
- {file = "matplotlib-3.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:66e907a06e68cb6cfd652c193311d61a12b54f56809cafbed9736ce5ad92f107"},
- {file = "matplotlib-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b4bb156abb8fa5e5b2b460196f7db7264fc6d62678c03457979e7d5254b7be"},
- {file = "matplotlib-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1985ad3d97f51307a2cbfc801a930f120def19ba22864182dacef55277102ba6"},
- {file = "matplotlib-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96f2c2f825d1257e437a1482c5a2cf4fee15db4261bd6fc0750f81ba2b4ba3d"},
- {file = "matplotlib-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35e87384ee9e488d8dd5a2dd7baf471178d38b90618d8ea147aced4ab59c9bea"},
- {file = "matplotlib-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:cfd414bce89cc78a7e1d25202e979b3f1af799e416010a20ab2b5ebb3a02425c"},
- {file = "matplotlib-3.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c42eee41e1b60fd83ee3292ed83a97a5f2a8239b10c26715d8a6172226988d7b"},
- {file = "matplotlib-3.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4f0647b17b667ae745c13721602b540f7aadb2a32c5b96e924cd4fea5dcb90f1"},
- {file = "matplotlib-3.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa3854b5f9473564ef40a41bc922be978fab217776e9ae1545c9b3a5cf2092a3"},
- {file = "matplotlib-3.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e496c01441be4c7d5f96d4e40f7fca06e20dcb40e44c8daa2e740e1757ad9e6"},
- {file = "matplotlib-3.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5d45d3f5245be5b469843450617dcad9af75ca50568acf59997bed9311131a0b"},
- {file = "matplotlib-3.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:8e8e25b1209161d20dfe93037c8a7f7ca796ec9aa326e6e4588d8c4a5dd1e473"},
- {file = "matplotlib-3.10.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:19b06241ad89c3ae9469e07d77efa87041eac65d78df4fcf9cac318028009b01"},
- {file = "matplotlib-3.10.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:01e63101ebb3014e6e9f80d9cf9ee361a8599ddca2c3e166c563628b39305dbb"},
- {file = "matplotlib-3.10.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f06bad951eea6422ac4e8bdebcf3a70c59ea0a03338c5d2b109f57b64eb3972"},
- {file = "matplotlib-3.10.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfb036f34873b46978f55e240cff7a239f6c4409eac62d8145bad3fc6ba5a3"},
- {file = "matplotlib-3.10.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dc6ab14a7ab3b4d813b88ba957fc05c79493a037f54e246162033591e770de6f"},
- {file = "matplotlib-3.10.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bc411ebd5889a78dabbc457b3fa153203e22248bfa6eedc6797be5df0164dbf9"},
- {file = "matplotlib-3.10.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:648406f1899f9a818cef8c0231b44dcfc4ff36f167101c3fd1c9151f24220fdc"},
- {file = "matplotlib-3.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:02582304e352f40520727984a5a18f37e8187861f954fea9be7ef06569cf85b4"},
- {file = "matplotlib-3.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3809916157ba871bcdd33d3493acd7fe3037db5daa917ca6e77975a94cef779"},
- {file = "matplotlib-3.10.1.tar.gz", hash = "sha256:e8d2d0e3881b129268585bf4765ad3ee73a4591d77b9a18c214ac7e3a79fb2ba"},
+ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
+ {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
]
[package.dependencies]
-contourpy = ">=1.0.1"
-cycler = ">=0.10"
-fonttools = ">=4.22.0"
-kiwisolver = ">=1.3.1"
-numpy = ">=1.23"
-packaging = ">=20.0"
-pillow = ">=8"
-pyparsing = ">=2.3.1"
-python-dateutil = ">=2.7"
+MarkupSafe = ">=2.0"
[package.extras]
-dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.2"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"},
+ {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"},
+ {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"},
+ {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"},
+ {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"},
+ {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"},
+ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"},
+]
[[package]]
name = "mccabe"
@@ -451,7 +862,7 @@ version = "0.7.0"
description = "McCabe checker, plugin for flake8"
optional = false
python-versions = ">=3.6"
-groups = ["dev"]
+groups = ["main", "dev"]
files = [
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
@@ -459,48 +870,49 @@ files = [
[[package]]
name = "mypy"
-version = "1.15.0"
+version = "1.16.1"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
- {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"},
- {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"},
- {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"},
- {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"},
- {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"},
- {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"},
- {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"},
- {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"},
- {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"},
- {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"},
- {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"},
- {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"},
- {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"},
- {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"},
- {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"},
- {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"},
- {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"},
- {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"},
- {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"},
- {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"},
- {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"},
- {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"},
- {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"},
- {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"},
- {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"},
- {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"},
- {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"},
- {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"},
- {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"},
- {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"},
- {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"},
- {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"},
+ {file = "mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a"},
+ {file = "mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72"},
+ {file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea7469ee5902c95542bea7ee545f7006508c65c8c54b06dc2c92676ce526f3ea"},
+ {file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:352025753ef6a83cb9e7f2427319bb7875d1fdda8439d1e23de12ab164179574"},
+ {file = "mypy-1.16.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff9fa5b16e4c1364eb89a4d16bcda9987f05d39604e1e6c35378a2987c1aac2d"},
+ {file = "mypy-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:1256688e284632382f8f3b9e2123df7d279f603c561f099758e66dd6ed4e8bd6"},
+ {file = "mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc"},
+ {file = "mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782"},
+ {file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507"},
+ {file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca"},
+ {file = "mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4"},
+ {file = "mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6"},
+ {file = "mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d"},
+ {file = "mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9"},
+ {file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79"},
+ {file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15"},
+ {file = "mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd"},
+ {file = "mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b"},
+ {file = "mypy-1.16.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ddc91eb318c8751c69ddb200a5937f1232ee8efb4e64e9f4bc475a33719de438"},
+ {file = "mypy-1.16.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87ff2c13d58bdc4bbe7dc0dedfe622c0f04e2cb2a492269f3b418df2de05c536"},
+ {file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a7cfb0fe29fe5a9841b7c8ee6dffb52382c45acdf68f032145b75620acfbd6f"},
+ {file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359"},
+ {file = "mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be"},
+ {file = "mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee"},
+ {file = "mypy-1.16.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fc688329af6a287567f45cc1cefb9db662defeb14625213a5b7da6e692e2069"},
+ {file = "mypy-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e198ab3f55924c03ead626ff424cad1732d0d391478dfbf7bb97b34602395da"},
+ {file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09aa4f91ada245f0a45dbc47e548fd94e0dd5a8433e0114917dc3b526912a30c"},
+ {file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13c7cd5b1cb2909aa318a90fd1b7e31f17c50b242953e7dd58345b2a814f6383"},
+ {file = "mypy-1.16.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:58e07fb958bc5d752a280da0e890c538f1515b79a65757bbdc54252ba82e0b40"},
+ {file = "mypy-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:f895078594d918f93337a505f8add9bd654d1a24962b4c6ed9390e12531eb31b"},
+ {file = "mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37"},
+ {file = "mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab"},
]
[package.dependencies]
mypy_extensions = ">=1.0.0"
+pathspec = ">=0.9.0"
typing_extensions = ">=4.6.0"
[package.extras]
@@ -522,78 +934,13 @@ files = [
{file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
]
-[[package]]
-name = "numpy"
-version = "2.2.5"
-description = "Fundamental package for array computing in Python"
-optional = false
-python-versions = ">=3.10"
-groups = ["main"]
-files = [
- {file = "numpy-2.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f4a922da1729f4c40932b2af4fe84909c7a6e167e6e99f71838ce3a29f3fe26"},
- {file = "numpy-2.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6f91524d31b34f4a5fee24f5bc16dcd1491b668798b6d85585d836c1e633a6a"},
- {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:19f4718c9012e3baea91a7dba661dcab2451cda2550678dc30d53acb91a7290f"},
- {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:eb7fd5b184e5d277afa9ec0ad5e4eb562ecff541e7f60e69ee69c8d59e9aeaba"},
- {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6413d48a9be53e183eb06495d8e3b006ef8f87c324af68241bbe7a39e8ff54c3"},
- {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7451f92eddf8503c9b8aa4fe6aa7e87fd51a29c2cfc5f7dbd72efde6c65acf57"},
- {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0bcb1d057b7571334139129b7f941588f69ce7c4ed15a9d6162b2ea54ded700c"},
- {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36ab5b23915887543441efd0417e6a3baa08634308894316f446027611b53bf1"},
- {file = "numpy-2.2.5-cp310-cp310-win32.whl", hash = "sha256:422cc684f17bc963da5f59a31530b3936f57c95a29743056ef7a7903a5dbdf88"},
- {file = "numpy-2.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:e4f0b035d9d0ed519c813ee23e0a733db81ec37d2e9503afbb6e54ccfdee0fa7"},
- {file = "numpy-2.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c42365005c7a6c42436a54d28c43fe0e01ca11eb2ac3cefe796c25a5f98e5e9b"},
- {file = "numpy-2.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:498815b96f67dc347e03b719ef49c772589fb74b8ee9ea2c37feae915ad6ebda"},
- {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6411f744f7f20081b1b4e7112e0f4c9c5b08f94b9f086e6f0adf3645f85d3a4d"},
- {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9de6832228f617c9ef45d948ec1cd8949c482238d68b2477e6f642c33a7b0a54"},
- {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:369e0d4647c17c9363244f3468f2227d557a74b6781cb62ce57cf3ef5cc7c610"},
- {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262d23f383170f99cd9191a7c85b9a50970fe9069b2f8ab5d786eca8a675d60b"},
- {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa70fdbdc3b169d69e8c59e65c07a1c9351ceb438e627f0fdcd471015cd956be"},
- {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37e32e985f03c06206582a7323ef926b4e78bdaa6915095ef08070471865b906"},
- {file = "numpy-2.2.5-cp311-cp311-win32.whl", hash = "sha256:f5045039100ed58fa817a6227a356240ea1b9a1bc141018864c306c1a16d4175"},
- {file = "numpy-2.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:b13f04968b46ad705f7c8a80122a42ae8f620536ea38cf4bdd374302926424dd"},
- {file = "numpy-2.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ee461a4eaab4f165b68780a6a1af95fb23a29932be7569b9fab666c407969051"},
- {file = "numpy-2.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec31367fd6a255dc8de4772bd1658c3e926d8e860a0b6e922b615e532d320ddc"},
- {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:47834cde750d3c9f4e52c6ca28a7361859fcaf52695c7dc3cc1a720b8922683e"},
- {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:2c1a1c6ccce4022383583a6ded7bbcda22fc635eb4eb1e0a053336425ed36dfa"},
- {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d75f338f5f79ee23548b03d801d28a505198297534f62416391857ea0479571"},
- {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a801fef99668f309b88640e28d261991bfad9617c27beda4a3aec4f217ea073"},
- {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:abe38cd8381245a7f49967a6010e77dbf3680bd3627c0fe4362dd693b404c7f8"},
- {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a0ac90e46fdb5649ab6369d1ab6104bfe5854ab19b645bf5cda0127a13034ae"},
- {file = "numpy-2.2.5-cp312-cp312-win32.whl", hash = "sha256:0cd48122a6b7eab8f06404805b1bd5856200e3ed6f8a1b9a194f9d9054631beb"},
- {file = "numpy-2.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:ced69262a8278547e63409b2653b372bf4baff0870c57efa76c5703fd6543282"},
- {file = "numpy-2.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059b51b658f4414fff78c6d7b1b4e18283ab5fa56d270ff212d5ba0c561846f4"},
- {file = "numpy-2.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47f9ed103af0bc63182609044b0490747e03bd20a67e391192dde119bf43d52f"},
- {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:261a1ef047751bb02f29dfe337230b5882b54521ca121fc7f62668133cb119c9"},
- {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4520caa3807c1ceb005d125a75e715567806fed67e315cea619d5ec6e75a4191"},
- {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d14b17b9be5f9c9301f43d2e2a4886a33b53f4e6fdf9ca2f4cc60aeeee76372"},
- {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba321813a00e508d5421104464510cc962a6f791aa2fca1c97b1e65027da80d"},
- {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4cbdef3ddf777423060c6f81b5694bad2dc9675f110c4b2a60dc0181543fac7"},
- {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54088a5a147ab71a8e7fdfd8c3601972751ded0739c6b696ad9cb0343e21ab73"},
- {file = "numpy-2.2.5-cp313-cp313-win32.whl", hash = "sha256:c8b82a55ef86a2d8e81b63da85e55f5537d2157165be1cb2ce7cfa57b6aef38b"},
- {file = "numpy-2.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:d8882a829fd779f0f43998e931c466802a77ca1ee0fe25a3abe50278616b1471"},
- {file = "numpy-2.2.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8b025c351b9f0e8b5436cf28a07fa4ac0204d67b38f01433ac7f9b870fa38c6"},
- {file = "numpy-2.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dfa94b6a4374e7851bbb6f35e6ded2120b752b063e6acdd3157e4d2bb922eba"},
- {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:97c8425d4e26437e65e1d189d22dff4a079b747ff9c2788057bfb8114ce1e133"},
- {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:352d330048c055ea6db701130abc48a21bec690a8d38f8284e00fab256dc1376"},
- {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b4c0773b6ada798f51f0f8e30c054d32304ccc6e9c5d93d46cb26f3d385ab19"},
- {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f09e00d4dccd76b179c0f18a44f041e5332fd0e022886ba1c0bbf3ea4a18d0"},
- {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02f226baeefa68f7d579e213d0f3493496397d8f1cff5e2b222af274c86a552a"},
- {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c26843fd58f65da9491165072da2cccc372530681de481ef670dcc8e27cfb066"},
- {file = "numpy-2.2.5-cp313-cp313t-win32.whl", hash = "sha256:1a161c2c79ab30fe4501d5a2bbfe8b162490757cf90b7f05be8b80bc02f7bb8e"},
- {file = "numpy-2.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:d403c84991b5ad291d3809bace5e85f4bbf44a04bdc9a88ed2bb1807b3360bb8"},
- {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b4ea7e1cff6784e58fe281ce7e7f05036b3e1c89c6f922a6bfbc0a7e8768adbe"},
- {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d7543263084a85fbc09c704b515395398d31d6395518446237eac219eab9e55e"},
- {file = "numpy-2.2.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0255732338c4fdd00996c0421884ea8a3651eea555c3a56b84892b66f696eb70"},
- {file = "numpy-2.2.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d2e3bdadaba0e040d1e7ab39db73e0afe2c74ae277f5614dad53eadbecbbb169"},
- {file = "numpy-2.2.5.tar.gz", hash = "sha256:a9c0d994680cd991b1cb772e8b297340085466a6fe964bc9d4e80f5e2f43c291"},
-]
-
[[package]]
name = "packaging"
version = "25.0"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
-groups = ["main", "dev"]
+groups = ["dev"]
files = [
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
@@ -643,116 +990,16 @@ gevent = ["gevent"]
tornado = ["tornado"]
twisted = ["twisted"]
-[[package]]
-name = "pillow"
-version = "11.2.1"
-description = "Python Imaging Library (Fork)"
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047"},
- {file = "pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95"},
- {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61"},
- {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1"},
- {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c"},
- {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d"},
- {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97"},
- {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579"},
- {file = "pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d"},
- {file = "pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad"},
- {file = "pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2"},
- {file = "pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70"},
- {file = "pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf"},
- {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7"},
- {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8"},
- {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600"},
- {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788"},
- {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e"},
- {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e"},
- {file = "pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6"},
- {file = "pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193"},
- {file = "pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7"},
- {file = "pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f"},
- {file = "pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b"},
- {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d"},
- {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4"},
- {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d"},
- {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4"},
- {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443"},
- {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c"},
- {file = "pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3"},
- {file = "pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941"},
- {file = "pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb"},
- {file = "pillow-11.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28"},
- {file = "pillow-11.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830"},
- {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0"},
- {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1"},
- {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f"},
- {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155"},
- {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14"},
- {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b"},
- {file = "pillow-11.2.1-cp313-cp313-win32.whl", hash = "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2"},
- {file = "pillow-11.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691"},
- {file = "pillow-11.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c"},
- {file = "pillow-11.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22"},
- {file = "pillow-11.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7"},
- {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16"},
- {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b"},
- {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406"},
- {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91"},
- {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751"},
- {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9"},
- {file = "pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd"},
- {file = "pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e"},
- {file = "pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681"},
- {file = "pillow-11.2.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8"},
- {file = "pillow-11.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909"},
- {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928"},
- {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79"},
- {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35"},
- {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb"},
- {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a"},
- {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36"},
- {file = "pillow-11.2.1-cp39-cp39-win32.whl", hash = "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67"},
- {file = "pillow-11.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1"},
- {file = "pillow-11.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e"},
- {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156"},
- {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772"},
- {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363"},
- {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0"},
- {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01"},
- {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193"},
- {file = "pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013"},
- {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed"},
- {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c"},
- {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd"},
- {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076"},
- {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b"},
- {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f"},
- {file = "pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044"},
- {file = "pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6"},
-]
-
-[package.extras]
-docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"]
-fpx = ["olefile"]
-mic = ["olefile"]
-test-arrow = ["pyarrow"]
-tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"]
-typing = ["typing-extensions ; python_version < \"3.10\""]
-xmp = ["defusedxml"]
-
[[package]]
name = "platformdirs"
-version = "4.3.7"
+version = "4.3.8"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.9"
-groups = ["dev"]
+groups = ["main", "dev"]
files = [
- {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"},
- {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"},
+ {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"},
+ {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"},
]
[package.extras]
@@ -762,44 +1009,32 @@ type = ["mypy (>=1.14.1)"]
[[package]]
name = "pluggy"
-version = "1.5.0"
+version = "1.6.0"
description = "plugin and hook calling mechanisms for python"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
groups = ["dev"]
files = [
- {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
- {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
+ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
+ {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
]
[package.extras]
dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
+testing = ["coverage", "pytest", "pytest-benchmark"]
[[package]]
-name = "psutil"
-version = "7.0.0"
-description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7."
+name = "priority"
+version = "2.0.0"
+description = "A pure-Python implementation of the HTTP/2 priority tree"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.6.1"
groups = ["main"]
files = [
- {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"},
- {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"},
- {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"},
- {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"},
- {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"},
- {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"},
- {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"},
- {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"},
- {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"},
- {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"},
+ {file = "priority-2.0.0-py3-none-any.whl", hash = "sha256:6f8eefce5f3ad59baf2c080a664037bb4725cd0a790d53d59ab4059288faf6aa"},
+ {file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"},
]
-[package.extras]
-dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"]
-test = ["pytest", "pytest-xdist", "setuptools"]
-
[[package]]
name = "pycodestyle"
version = "2.13.0"
@@ -812,16 +1047,29 @@ files = [
{file = "pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae"},
]
+[[package]]
+name = "pycparser"
+version = "2.22"
+description = "C parser in Python"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "platform_python_implementation != \"PyPy\""
+files = [
+ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
+ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
+]
+
[[package]]
name = "pydantic"
-version = "2.11.4"
+version = "2.11.7"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
- {file = "pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb"},
- {file = "pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d"},
+ {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"},
+ {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"},
]
[package.dependencies]
@@ -959,70 +1207,105 @@ files = [
]
[[package]]
-name = "pyparsing"
-version = "3.2.3"
-description = "pyparsing module - Classes and methods to define and execute parsing grammars"
+name = "pygments"
+version = "2.19.1"
+description = "Pygments is a syntax highlighting package written in Python."
optional = false
-python-versions = ">=3.9"
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"},
+ {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"},
+]
+
+[package.extras]
+windows-terminal = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "pylint"
+version = "3.3.7"
+description = "python code static checker"
+optional = false
+python-versions = ">=3.9.0"
groups = ["main"]
files = [
- {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"},
- {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"},
+ {file = "pylint-3.3.7-py3-none-any.whl", hash = "sha256:43860aafefce92fca4cf6b61fe199cdc5ae54ea28f9bf4cd49de267b5195803d"},
+ {file = "pylint-3.3.7.tar.gz", hash = "sha256:2b11de8bde49f9c5059452e0c310c079c746a0a8eeaa789e5aa966ecc23e4559"},
]
+[package.dependencies]
+astroid = ">=3.3.8,<=3.4.0.dev0"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""}
+isort = ">=4.2.5,<5.13 || >5.13,<7"
+mccabe = ">=0.6,<0.8"
+platformdirs = ">=2.2"
+tomlkit = ">=0.10.1"
+
[package.extras]
-diagrams = ["jinja2", "railroad-diagrams"]
+spelling = ["pyenchant (>=3.2,<4.0)"]
+testutils = ["gitpython (>3)"]
[[package]]
name = "pytest"
-version = "8.3.5"
+version = "8.4.1"
description = "pytest: simple powerful testing with Python"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
groups = ["dev"]
files = [
- {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"},
- {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"},
+ {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"},
+ {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"},
]
[package.dependencies]
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-iniconfig = "*"
-packaging = "*"
+colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
+iniconfig = ">=1"
+packaging = ">=20"
pluggy = ">=1.5,<2"
+pygments = ">=2.7.2"
[package.extras]
-dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
[[package]]
-name = "python-dateutil"
-version = "2.9.0.post0"
-description = "Extensions to the standard Python datetime module"
+name = "pytest-asyncio"
+version = "1.0.0"
+description = "Pytest support for asyncio"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
-groups = ["main"]
+python-versions = ">=3.9"
+groups = ["dev"]
files = [
- {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
- {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
+ {file = "pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3"},
+ {file = "pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f"},
]
[package.dependencies]
-six = ">=1.5"
+pytest = ">=8.2,<9"
+
+[package.extras]
+docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"]
+testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
[[package]]
-name = "python-json-logger"
-version = "3.3.0"
-description = "JSON Log Formatter for the Python Logging Package"
+name = "pytest-cov"
+version = "6.2.1"
+description = "Pytest plugin for measuring coverage."
optional = false
-python-versions = ">=3.8"
-groups = ["main"]
+python-versions = ">=3.9"
+groups = ["dev"]
files = [
- {file = "python_json_logger-3.3.0-py3-none-any.whl", hash = "sha256:dd980fae8cffb24c13caf6e158d3d61c0d6d22342f932cb6e9deedab3d35eec7"},
- {file = "python_json_logger-3.3.0.tar.gz", hash = "sha256:12b7e74b17775e7d565129296105bbe3910842d9d0eb083fc83a6a617aa8df84"},
+ {file = "pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5"},
+ {file = "pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2"},
]
+[package.dependencies]
+coverage = {version = ">=7.5", extras = ["toml"]}
+pluggy = ">=1.2"
+pytest = ">=6.2.5"
+
[package.extras]
-dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "pyyaml"
@@ -1088,121 +1371,205 @@ files = [
]
[[package]]
-name = "scipy"
-version = "1.15.2"
-description = "Fundamental algorithms for scientific computing in Python"
+name = "quart"
+version = "0.20.0"
+description = "A Python ASGI web framework with the same API as Flask"
optional = false
-python-versions = ">=3.10"
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "quart-0.20.0-py3-none-any.whl", hash = "sha256:003c08f551746710acb757de49d9b768986fd431517d0eb127380b656b98b8f1"},
+ {file = "quart-0.20.0.tar.gz", hash = "sha256:08793c206ff832483586f5ae47018c7e40bdd75d886fee3fabbdaa70c2cf505d"},
+]
+
+[package.dependencies]
+aiofiles = "*"
+blinker = ">=1.6"
+click = ">=8.0"
+flask = ">=3.0"
+hypercorn = ">=0.11.2"
+itsdangerous = "*"
+jinja2 = "*"
+markupsafe = "*"
+werkzeug = ">=3.0"
+
+[package.extras]
+dotenv = ["python-dotenv"]
+
+[[package]]
+name = "requests"
+version = "2.32.4"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.8"
groups = ["main"]
files = [
- {file = "scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9"},
- {file = "scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5"},
- {file = "scipy-1.15.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:ecf797d2d798cf7c838c6d98321061eb3e72a74710e6c40540f0e8087e3b499e"},
- {file = "scipy-1.15.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:9b18aa747da280664642997e65aab1dd19d0c3d17068a04b3fe34e2559196cb9"},
- {file = "scipy-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87994da02e73549dfecaed9e09a4f9d58a045a053865679aeb8d6d43747d4df3"},
- {file = "scipy-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69ea6e56d00977f355c0f84eba69877b6df084516c602d93a33812aa04d90a3d"},
- {file = "scipy-1.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:888307125ea0c4466287191e5606a2c910963405ce9671448ff9c81c53f85f58"},
- {file = "scipy-1.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9412f5e408b397ff5641080ed1e798623dbe1ec0d78e72c9eca8992976fa65aa"},
- {file = "scipy-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:b5e025e903b4f166ea03b109bb241355b9c42c279ea694d8864d033727205e65"},
- {file = "scipy-1.15.2-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:92233b2df6938147be6fa8824b8136f29a18f016ecde986666be5f4d686a91a4"},
- {file = "scipy-1.15.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:62ca1ff3eb513e09ed17a5736929429189adf16d2d740f44e53270cc800ecff1"},
- {file = "scipy-1.15.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:4c6676490ad76d1c2894d77f976144b41bd1a4052107902238047fb6a473e971"},
- {file = "scipy-1.15.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8bf5cb4a25046ac61d38f8d3c3426ec11ebc350246a4642f2f315fe95bda655"},
- {file = "scipy-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a8e34cf4c188b6dd004654f88586d78f95639e48a25dfae9c5e34a6dc34547e"},
- {file = "scipy-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28a0d2c2075946346e4408b211240764759e0fabaeb08d871639b5f3b1aca8a0"},
- {file = "scipy-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:42dabaaa798e987c425ed76062794e93a243be8f0f20fff6e7a89f4d61cb3d40"},
- {file = "scipy-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6f5e296ec63c5da6ba6fa0343ea73fd51b8b3e1a300b0a8cae3ed4b1122c7462"},
- {file = "scipy-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:597a0c7008b21c035831c39927406c6181bcf8f60a73f36219b69d010aa04737"},
- {file = "scipy-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c4697a10da8f8765bb7c83e24a470da5797e37041edfd77fd95ba3811a47c4fd"},
- {file = "scipy-1.15.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:869269b767d5ee7ea6991ed7e22b3ca1f22de73ab9a49c44bad338b725603301"},
- {file = "scipy-1.15.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bad78d580270a4d32470563ea86c6590b465cb98f83d760ff5b0990cb5518a93"},
- {file = "scipy-1.15.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b09ae80010f52efddb15551025f9016c910296cf70adbf03ce2a8704f3a5ad20"},
- {file = "scipy-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6fd6eac1ce74a9f77a7fc724080d507c5812d61e72bd5e4c489b042455865e"},
- {file = "scipy-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b871df1fe1a3ba85d90e22742b93584f8d2b8e6124f8372ab15c71b73e428b8"},
- {file = "scipy-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:03205d57a28e18dfd39f0377d5002725bf1f19a46f444108c29bdb246b6c8a11"},
- {file = "scipy-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:601881dfb761311045b03114c5fe718a12634e5608c3b403737ae463c9885d53"},
- {file = "scipy-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:e7c68b6a43259ba0aab737237876e5c2c549a031ddb7abc28c7b47f22e202ded"},
- {file = "scipy-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01edfac9f0798ad6b46d9c4c9ca0e0ad23dbf0b1eb70e96adb9fa7f525eff0bf"},
- {file = "scipy-1.15.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:08b57a9336b8e79b305a143c3655cc5bdbe6d5ece3378578888d2afbb51c4e37"},
- {file = "scipy-1.15.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:54c462098484e7466362a9f1672d20888f724911a74c22ae35b61f9c5919183d"},
- {file = "scipy-1.15.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:cf72ff559a53a6a6d77bd8eefd12a17995ffa44ad86c77a5df96f533d4e6c6bb"},
- {file = "scipy-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de9d1416b3d9e7df9923ab23cd2fe714244af10b763975bea9e4f2e81cebd27"},
- {file = "scipy-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb530e4794fc8ea76a4a21ccb67dea33e5e0e60f07fc38a49e821e1eae3b71a0"},
- {file = "scipy-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5ea7ed46d437fc52350b028b1d44e002646e28f3e8ddc714011aaf87330f2f32"},
- {file = "scipy-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11e7ad32cf184b74380f43d3c0a706f49358b904fa7d5345f16ddf993609184d"},
- {file = "scipy-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:a5080a79dfb9b78b768cebf3c9dcbc7b665c5875793569f48bf0e2b1d7f68f6f"},
- {file = "scipy-1.15.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:447ce30cee6a9d5d1379087c9e474628dab3db4a67484be1b7dc3196bfb2fac9"},
- {file = "scipy-1.15.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c90ebe8aaa4397eaefa8455a8182b164a6cc1d59ad53f79943f266d99f68687f"},
- {file = "scipy-1.15.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:def751dd08243934c884a3221156d63e15234a3155cf25978b0a668409d45eb6"},
- {file = "scipy-1.15.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:302093e7dfb120e55515936cb55618ee0b895f8bcaf18ff81eca086c17bd80af"},
- {file = "scipy-1.15.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd5b77413e1855351cdde594eca99c1f4a588c2d63711388b6a1f1c01f62274"},
- {file = "scipy-1.15.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d0194c37037707b2afa7a2f2a924cf7bac3dc292d51b6a925e5fcb89bc5c776"},
- {file = "scipy-1.15.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:bae43364d600fdc3ac327db99659dcb79e6e7ecd279a75fe1266669d9a652828"},
- {file = "scipy-1.15.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f031846580d9acccd0044efd1a90e6f4df3a6e12b4b6bd694a7bc03a89892b28"},
- {file = "scipy-1.15.2-cp313-cp313t-win_amd64.whl", hash = "sha256:fe8a9eb875d430d81755472c5ba75e84acc980e4a8f6204d402849234d3017db"},
- {file = "scipy-1.15.2.tar.gz", hash = "sha256:cd58a314d92838f7e6f755c8a2167ead4f27e1fd5c1251fd54289569ef3495ec"},
+ {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"},
+ {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"},
]
[package.dependencies]
-numpy = ">=1.23.5,<2.5"
+certifi = ">=2017.4.17"
+charset_normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
[package.extras]
-dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"]
-doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"]
-test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
-name = "six"
-version = "1.17.0"
-description = "Python 2 and 3 compatibility utilities"
+name = "sniffio"
+version = "1.3.1"
+description = "Sniff out which async library your code is running under"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+python-versions = ">=3.7"
groups = ["main"]
files = [
- {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
- {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
+ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
+ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
+]
+
+[[package]]
+name = "starlette"
+version = "0.46.2"
+description = "The little ASGI library that shines."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"},
+ {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"},
+]
+
+[package.dependencies]
+anyio = ">=3.6.2,<5"
+
+[package.extras]
+full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
+
+[[package]]
+name = "tomlkit"
+version = "0.13.3"
+description = "Style preserving TOML library"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0"},
+ {file = "tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1"},
]
[[package]]
name = "types-pyyaml"
-version = "6.0.12.20250402"
+version = "6.0.12.20250516"
description = "Typing stubs for PyYAML"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
- {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"},
- {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"},
+ {file = "types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530"},
+ {file = "types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba"},
]
[[package]]
name = "typing-extensions"
-version = "4.13.2"
-description = "Backported and Experimental Type Hints for Python 3.8+"
+version = "4.14.0"
+description = "Backported and Experimental Type Hints for Python 3.9+"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
- {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"},
- {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"},
+ {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"},
+ {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"},
]
[[package]]
name = "typing-inspection"
-version = "0.4.0"
+version = "0.4.1"
description = "Runtime typing introspection tools"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
- {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"},
- {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"},
+ {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"},
+ {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"},
]
[package.dependencies]
typing-extensions = ">=4.12.0"
+[[package]]
+name = "urllib3"
+version = "2.4.0"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"},
+ {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
+h2 = ["h2 (>=4,<5)"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["zstandard (>=0.18.0)"]
+
+[[package]]
+name = "uvicorn"
+version = "0.34.3"
+description = "The lightning-fast ASGI server."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "uvicorn-0.34.3-py3-none-any.whl", hash = "sha256:16246631db62bdfbf069b0645177d6e8a77ba950cfedbfd093acef9444e4d885"},
+ {file = "uvicorn-0.34.3.tar.gz", hash = "sha256:35919a9a979d7a59334b6b10e05d77c1d0d574c50e0fc98b8b1a0f165708b55a"},
+]
+
+[package.dependencies]
+click = ">=7.0"
+h11 = ">=0.8"
+
+[package.extras]
+standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"]
+
+[[package]]
+name = "werkzeug"
+version = "3.1.3"
+description = "The comprehensive WSGI web application library."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"},
+ {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog (>=2.3)"]
+
+[[package]]
+name = "wsproto"
+version = "1.2.0"
+description = "WebSockets state-machine based protocol implementation"
+optional = false
+python-versions = ">=3.7.0"
+groups = ["main"]
+files = [
+ {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"},
+ {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"},
+]
+
+[package.dependencies]
+h11 = ">=0.9.0,<1"
+
[metadata]
lock-version = "2.1"
python-versions = "^3.12"
-content-hash = "21def6a1aae7733a0bab3852db036f408051f09d8bf8ad76ce49323d2299c0ab"
+content-hash = "9359273c3ed8bc841eb0dae6b11f747a7bce2e105b70bc3cf2fadc0a2ffc5cad"
diff --git a/pyproject.toml b/pyproject.toml
index 1a998b1..8780586 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -6,32 +6,44 @@ authors = ["Marco Melloni <291358@studenti.unimore.it>"]
license = "ICAPL"
readme = "README.md"
-packages = [
- { include = "simulation_bridge", from = "src" }
+packages = [{ include = "simulation_bridge" }]
+
+include = [
+ "simulation_bridge/resources/**/*",
+ "simulation_bridge/config/**/*"
]
[tool.poetry.dependencies]
-python = "^3.12"
+blinker = "^1.9.0"
+colorlog = "^6.9.0"
+fastapi = "^0.115.12"
+hypercorn = "^0.17"
+httpx = "^0.28.1"
paho-mqtt = ">=2.1.0,<3.0.0"
-pyyaml = ">=6.0.2,<7.0.0"
pika = "^1.3.2"
-scipy = "^1.15.2"
-matplotlib = "^3.10.1"
-python-json-logger = "^3.3.0"
-pydantic = "^2.11.3"
-psutil = "^7.0.0"
+pydantic = "^2.11.5"
+pyyaml = ">=6.0.2,<7.0.0"
+pylint = "^3.3.7"
+quart = "^0.20.0"
+requests = "^2.32.3"
+python = "^3.12"
+uvicorn = "^0.34.2"
+cryptography = "^45.0.4"
[tool.poetry.group.dev.dependencies]
-pytest = "^8.3.5"
+autopep8 = "^2.3.2"
black = "^25.1.0"
+flake8 = "^7.2.0"
isort = "^6.0.1"
mypy = "^1.15.0"
-flake8 = "^7.2.0"
+pytest = "^8.3.5"
+pytest-asyncio = "^1.0.0"
types-pyyaml = "^6.0.12.20250402"
+pytest-cov = "^6.1.1"
[build-system]
requires = ["poetry-core>=2.0.0,<3.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
-simulation-bridge = "src.simulation_bridge.main:main"
\ No newline at end of file
+simulation-bridge = "simulation_bridge.src.main:main"
\ No newline at end of file
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000..7ef0154
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,2 @@
+[pytest]
+testpaths = simulation_bridge/test/
diff --git a/src/__init__.py b/simulation_bridge/__init__.py
similarity index 100%
rename from src/__init__.py
rename to simulation_bridge/__init__.py
diff --git a/simulation_bridge/config/config.yaml.template b/simulation_bridge/config/config.yaml.template
new file mode 100644
index 0000000..980fdd9
--- /dev/null
+++ b/simulation_bridge/config/config.yaml.template
@@ -0,0 +1,80 @@
+simulation_bridge:
+ bridge_id: simulation_bridge
+
+rabbitmq:
+ host: localhost
+ port: 5672
+ vhost: /
+ username: guest
+ password: guest
+ tls: false
+
+ infrastructure:
+ exchanges:
+ - name: ex.input.bridge
+ type: topic
+ durable: true
+ auto_delete: false
+ internal: false
+
+ - name: ex.bridge.output
+ type: topic
+ durable: true
+ auto_delete: false
+ internal: false
+
+ - name: ex.sim.result
+ type: topic
+ durable: true
+ auto_delete: false
+ internal: false
+
+ - name: ex.bridge.result
+ type: topic
+ durable: true
+ auto_delete: false
+ internal: false
+
+ queues:
+ - name: Q.bridge.input
+ durable: true
+ exclusive: false
+ auto_delete: false
+
+ - name: Q.bridge.result
+ durable: true
+ exclusive: false
+ auto_delete: false
+
+ bindings:
+ - queue: Q.bridge.input
+ exchange: ex.input.bridge
+ routing_key: "#"
+
+ - queue: Q.bridge.result
+ exchange: ex.sim.result
+ routing_key: "#"
+
+mqtt:
+ host: localhost
+ port: 1883
+ keepalive: 60
+ input_topic: bridge/input
+ output_topic: bridge/output
+ qos: 0
+ username: guest
+ password: guest
+ tls: false
+
+rest:
+ host: localhost
+ port: 5000
+ endpoint: /message
+ debug: false
+ certfile: certs/cert.pem
+ keyfile: certs/key.pem
+
+logging:
+ level: INFO
+ format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+ file: logs/sim_bridge.log
\ No newline at end of file
diff --git a/simulation_bridge/docs/class_diagram.md b/simulation_bridge/docs/class_diagram.md
new file mode 100644
index 0000000..f3adf73
--- /dev/null
+++ b/simulation_bridge/docs/class_diagram.md
@@ -0,0 +1,129 @@
+```mermaid
+classDiagram
+ class BridgeOrchestrator {
+ -simulation_bridge_id: str
+ -config_manager: ConfigManager
+ -config: dict
+ -bridge: BridgeCore
+ -adapters: dict
+ -_running: bool
+ -protocol_config: dict
+ -adapter_classes: dict
+ +__init__(simulation_bridge_id: str, config_path: str = None)
+ +setup_interfaces()
+ +start()
+ +stop()
+ -_import_adapter_classes(): dict
+ }
+
+ class BridgeCore {
+ -config: dict
+ -connection: pika.BlockingConnection
+ -channel: pika.channel.Channel
+ -adapters: dict
+ +__init__(config_manager: ConfigManager, adapters: dict)
+ +_initialize_rabbitmq_connection()
+ +_ensure_connection() bool
+ +handle_input_message(sender, **kwargs)
+ +handle_result_rabbitmq_message(sender, **kwargs)
+ +handle_result_unknown_message(sender, **kwargs)
+ +_publish_message(producer, consumer, message, exchange='ex.bridge.output', protocol='unknown')
+ }
+
+ class RabbitMQInfrastructure {
+ -config: dict
+ -connection: pika.BlockingConnection
+ -channel: pika.channel.Channel
+ +__init__(config_manager: ConfigManager)
+ +setup()
+ +reconnect()
+ -_setup_exchanges()
+ -_setup_queues()
+ -_setup_bindings()
+ }
+
+ class ConfigManager {
+ -config_path: Path
+ -config: dict
+ +__init__(config_path: Optional[str] = None)
+ +get_config() Dict[str, Any]
+ +get_rabbitmq_config() Dict[str, Any]
+ +get_mqtt_config() Dict[str, Any]
+ +get_rest_config() Dict[str, Any]
+ +get_logging_config() Dict[str, Any]
+ }
+
+ class SignalManager {
+ -PROTOCOL_CONFIG: dict
+ -_bridge_core_instance: object
+ -_adapter_instances: dict
+ +set_bridge_core(bridge_core_instance)
+ +register_adapter_instance(protocol: str, adapter_instance: object)
+ +get_available_signals(protocol: str) List[str]
+ +get_enabled_protocols() List[str]
+ +is_protocol_enabled(protocol: str) bool
+ +connect_all_signals()
+ +disconnect_all_signals()
+ -_resolve_callback(func_path: str, protocol: str) Callable
+ }
+
+ class RabbitMQAdapter {
+ +__init__(config_manager: ConfigManager)
+ +start()
+ +stop()
+ -_get_config() Dict[str, Any]
+ -_process_message(ch, method, properties, body, queue_name)
+ -_run_consumer()
+ -_handle_message(message: Dict[str, Any])
+ -_start_adapter()
+ }
+
+
+ class MQTTAdapter {
+ +__init__(config_manager: ConfigManager)
+ +start()
+ +stop()
+ +send_result(message)
+ +publish_result_message_mqtt(sender, **kwargs)
+ -_get_config() Dict[str, Any]
+ -on_connect(client, userdata, flags, rc)
+ -on_disconnect(client, userdata, rc)
+ -on_message(client, userdata, msg)
+ -_process_messages()
+ -_run_client()
+ -_handle_message(message: Dict[str, Any])
+ }
+
+ class RESTAdapter {
+ +__init__(config_manager: ConfigManager)
+ +start()
+ +stop()
+ +message_received_input_rest()
+ +message_received_result_rest()
+ +send_result_sync(producer: str, result: dict)
+ +send_result(producer: str, result: dict)
+ -_get_config() Dict[str, Any]
+ -_setup_routes()
+ -_handle_streaming_message() Response
+ -_parse_message(body: bytes, content_type: str) Dict[str, Any]
+ -_generate_response(producer: str, queue: asyncio.Queue) AsyncGenerator[str]
+ -_start_server()
+ -_handle_message(message: dict)
+ }
+
+
+ BridgeOrchestrator --> ConfigManager : uses configuration data
+ BridgeOrchestrator --> SignalManager : registers and connects all signals for event handling
+ BridgeOrchestrator --> BridgeCore : creates & controls core bridge logic
+ BridgeOrchestrator --> RabbitMQInfrastructure : sets up RabbitMQ infrastructure
+ BridgeCore --> ConfigManager : accesses configuration
+ RabbitMQInfrastructure --> ConfigManager : uses configuration for connection settings
+ SignalManager "1" o-- "1..*" BridgeCore : manages signals for BridgeCore communication
+ SignalManager "1" o-- "1..*" RabbitMQAdapter : manages signals for RabbitMQ event handling
+ SignalManager "1" o-- "1..*" MQTTAdapter : manages signals for MQTT event handling
+ SignalManager "1" o-- "1..*" RESTAdapter : manages signals for REST event handling
+
+ RabbitMQAdapter ..> BridgeCore : calls handle_input_message,handle_result_rabbitmq_message,handle_result_unknown_message
+ MQTTAdapter ..> BridgeCore : calls handle_input_message
+ RESTAdapter ..> BridgeCore : calls handle_input_message
+```
diff --git a/simulation_bridge/docs/internal_architecture.md b/simulation_bridge/docs/internal_architecture.md
new file mode 100644
index 0000000..47af061
--- /dev/null
+++ b/simulation_bridge/docs/internal_architecture.md
@@ -0,0 +1,77 @@
+# Simulation Bridge Internal Architecture
+
+The Simulation Bridge is designed according to an event-driven architecture. Each simulation request is transformed into an independent event, enabling asynchronous and non-blocking process management. Converting requests into events allows decoupling the communication flow between clients and simulators.
+
+
+
+## Message Flow
+
+```mermaid
+sequenceDiagram
+ participant DT as Digital Twin (REST)
+ participant MockPT as Mock Physical Twin (RabbitMQ)
+ participant PT as Physical Twin (MQTT)
+ participant Bridge as Simulation Bridge
+ participant SimA as Simulator A
+ participant SimB as Simulator B
+
+ rect rgba(46, 64, 83, 0.1)
+ Note over DT,Bridge: REST Flow Example
+ DT->>+Bridge: Send Request (REST)
+ Note right of Bridge: Parse & convert to internal format
+ Bridge->>+SimA: Forward Request
+ SimA-->>-Bridge: Return Response
+ Note right of Bridge: Convert to REST format
+ Bridge-->>-DT: Deliver Response (REST)
+ end
+
+ rect rgba(39, 174, 96, 0.1)
+ Note over MockPT,Bridge: RabbitMQ Flow Example
+ MockPT->>+Bridge: Send Request (RabbitMQ)
+ Note right of Bridge: Parse & convert to internal format
+ Bridge->>+SimB: Forward Request
+ SimB-->>-Bridge: Return Response
+ Note right of Bridge: Convert to RabbitMQ format
+ Bridge-->>-MockPT: Deliver Response (RabbitMQ)
+ end
+
+ rect rgba(142, 68, 173, 0.1)
+ Note over PT,Bridge: MQTT Flow Example
+ PT->>+Bridge: Send Request (MQTT)
+ Note right of Bridge: Parse & convert to internal format
+ Bridge->>+SimA: Forward Request
+ SimA-->>-Bridge: Return Response
+ Note right of Bridge: Convert to MQTT format
+ Bridge-->>-PT: Deliver Response (MQTT)
+ end
+```
+
+## Signal System
+
+The Simulation Bridge implements an event-driven message dispatching system using Blinker for internal signal routing across protocols.
+
+The `SignalManager` class (located in `utils/signal_manager.py`) serves as the central component for managing signal flow:
+
+- Automatically loads signal definitions from protocol configuration files (e.g., `adapters_signal.json`)
+- Registers all protocol adapters and the `BridgeCore` instance
+- Maps signal names (e.g., `message_received_input_mqtt`) to their corresponding handler methods (e.g., `BridgeCore.handle_input_message`)
+- Facilitates clean disconnection of all signals during shutdown
+- Provides comprehensive logging for debugging and traceability
+
+This approach effectively decouples protocol-specific logic from the core business logic, enabling flexible signal routing based on the configured architecture.
+
+### Protocol Signal Reference
+
+| Protocol | Available Signals |
+| -------- | ----------------------------------------------------------------------- |
+| RabbitMQ | `message_received_input_rabbitmq` `message_received_result_rabbitmq` |
+| MQTT | `message_received_input_mqtt` `message_received_result_mqtt` |
+| REST | `message_received_input_rest` `message_received_result_rest` |
+
+## Threading Model
+
+The system uses a multi-threaded architecture:
+
+- Each adapter runs in its own thread
+- Main thread monitors adapter health
+- Clean shutdown mechanism
diff --git a/simulation_bridge/resources/README.md b/simulation_bridge/resources/README.md
new file mode 100644
index 0000000..233db32
--- /dev/null
+++ b/simulation_bridge/resources/README.md
@@ -0,0 +1,79 @@
+## Example Clients
+
+This folder contains three example clients that communicate with the Simulation Bridge using three different protocols:
+
+- **mqtt/** – MQTT Client
+- **rabbitmq/** – RabbitMQ Client
+- **rest/** – REST Client
+
+Each client is completely independent and demonstrates how to send a simulation request and handle real-time responses.
+
+### Folder Structure
+
+```
+client/
+├── README.md # you are here!
+├── simulation.yaml # API payload for simulation requests
+├── mqtt/
+│ ├── mqtt_client.py # MQTT-specific Python client
+│ ├── mqtt_use.yaml # MQTT client configuration
+│ └── requirements.txt # Python dependencies
+├── rabbitmq/
+│ ├── rabbitmq_client.py # RabbitMQ-specific Python client
+│ ├── rabbitmq_use.yaml # RabbitMQ client configuration
+│ └── requirements.txt # Python dependencies
+└── rest/
+ ├── rest_client.py # REST-specific Python client
+ ├── rest_use.yaml # REST client configuration
+ └── requirements.txt # Python dependencies
+```
+
+Each subfolder (mqtt/, rabbitmq/, rest/) contains:
+
+- `*_client.py` – Protocol-specific Python client
+- `*_use.yaml` – Client configuration file (network parameters, authentication, etc.)
+- `requirements.txt` – Python dependencies to run the client
+
+Additionally, in the root folder (client/) there is:
+
+- `simulation.yaml` – The API payload to use for making requests to the simulation bridge
+
+> **Note:** Make sure you have agents and simulation bridge configured and running before using any client.
+
+### How to use a client
+
+#### 1. Configure API payload
+
+Customize the `client/simulation.yaml` file with your distributed simulation parameters.
+
+#### 2. Configure the client
+
+In the subfolder of the client you want to use, modify `mqtt_use.yaml`, `rabbitmq_use.yaml` or `rest_use.yaml` based on the chosen protocol (e.g. host, port, topic, URL, etc.).
+
+#### 3. Install dependencies
+
+Navigate to the desired client folder, for example:
+
+```bash
+cd mqtt
+pip install -r requirements.txt
+```
+
+#### 4. Run the client
+
+Execute the Python script to send the request and start listening for responses:
+
+```bash
+python mqtt_client.py
+```
+
+Each client will send the request defined in `simulation.yaml` and remain listening to receive results.
+
+### Customization
+
+These clients are examples designed to be adapted. You can modify them to:
+
+- Integrate into your workflows
+- Automate decisions based on simulation results
+- Log or save results
+- Handle asynchronous simulation flows
diff --git a/src/simulation_bridge/__init__.py b/simulation_bridge/resources/__init__.py
similarity index 100%
rename from src/simulation_bridge/__init__.py
rename to simulation_bridge/resources/__init__.py
diff --git a/src/simulation_bridge/config/__init__.py b/simulation_bridge/resources/mqtt/__init__.py
similarity index 100%
rename from src/simulation_bridge/config/__init__.py
rename to simulation_bridge/resources/mqtt/__init__.py
diff --git a/simulation_bridge/resources/mqtt/mqtt_client.py b/simulation_bridge/resources/mqtt/mqtt_client.py
new file mode 100644
index 0000000..8e8749c
--- /dev/null
+++ b/simulation_bridge/resources/mqtt/mqtt_client.py
@@ -0,0 +1,123 @@
+"""MQTT Client for simulation bridge."""
+
+import os
+import ssl
+import json
+import sys
+import yaml
+import paho.mqtt.client as mqtt
+
+
+def load_config(config_path="mqtt_use.yaml"):
+ """Load YAML configuration file.
+
+ Args:
+ config_path: Path to the YAML configuration file.
+
+ Returns:
+ dict: Configuration data.
+
+ Raises:
+ SystemExit: If the file cannot be found or parsed.
+ """
+ try:
+ with open(config_path, "r", encoding="utf-8") as file:
+ return yaml.safe_load(file)
+ except FileNotFoundError:
+ print(f"Error: Configuration file '{config_path}' not found.")
+ sys.exit(1)
+ except yaml.YAMLError as exc:
+ print(f"Error parsing YAML file: {exc}")
+ sys.exit(1)
+
+
+class MQTTClient:
+ """MQTT Client for handling simulation data."""
+
+ def __init__(self, config):
+ """Initialize the MQTT client.
+
+ Args:
+ config: Dictionary containing configuration data.
+ """
+ self.config = config['mqtt']
+ self.payload_file = config.get('payload_file', 'simulation.yaml')
+ self.client = mqtt.Client()
+ self.client.username_pw_set(
+ self.config['username'],
+ self.config['password']
+ )
+ if self.config.get('tls', False):
+ self.client.tls_set(
+ cert_reqs=ssl.CERT_REQUIRED,
+ tls_version=ssl.PROTOCOL_TLS_CLIENT
+ )
+ self.client.tls_insecure_set(False)
+ self.client.on_message = self.on_message
+
+ def on_message(self, client, userdata, msg): # pylint: disable=unused-argument
+ """Callback for received messages.
+
+ Args:
+ client: MQTT client instance.
+ userdata: User data.
+ msg: Message received.
+ """
+ print("\n📥 Message received:")
+ print(f"🔹 Topic: {msg.topic}")
+ print(f"🔹 Payload: {msg.payload.decode()}")
+
+ def create_request(self):
+ """Load payload from YAML file.
+
+ Returns:
+ dict: Payload data.
+
+ Raises:
+ SystemExit: If the file cannot be loaded.
+ """
+ file_path = os.path.join(
+ os.path.dirname(
+ os.path.abspath(__file__)),
+ self.payload_file)
+ try:
+ with open(file_path, 'r', encoding='utf-8') as file:
+ payload = yaml.safe_load(file)
+ print("✅ Payload loaded:", payload)
+ return payload
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ print(f"❌ Error loading {self.payload_file}: {exc}")
+ sys.exit(1)
+
+ def connect_and_listen(self):
+ """Connect to MQTT broker, publish payload, and listen for messages."""
+ self.client.connect(
+ self.config['host'],
+ self.config['port'],
+ self.config['keepalive']
+ )
+
+ # Subscribe to output topic
+ self.client.subscribe(
+ self.config['output_topic'],
+ qos=self.config['qos'])
+
+ # Publish payload to input topic
+ payload = self.create_request()
+ self.client.publish(
+ self.config['input_topic'],
+ json.dumps(payload),
+ qos=self.config['qos']
+ )
+ print(f"📤 Message published to {self.config['input_topic']}")
+
+ print(
+ f"""📡 Listening on {
+ self.config['output_topic']}...\n(CTRL+C to terminate)""")
+ self.client.loop_forever()
+
+
+if __name__ == "__main__":
+ CONFIG = load_config()
+ MQTT_CLIENT = MQTTClient(CONFIG)
+ MQTT_CLIENT.connect_and_listen()
diff --git a/simulation_bridge/resources/mqtt/mqtt_use.yaml.template b/simulation_bridge/resources/mqtt/mqtt_use.yaml.template
new file mode 100644
index 0000000..88dbf1c
--- /dev/null
+++ b/simulation_bridge/resources/mqtt/mqtt_use.yaml.template
@@ -0,0 +1,12 @@
+mqtt:
+ host: "localhost"
+ port: 1883
+ keepalive: 60
+ qos: 0
+ input_topic: "bridge/input"
+ output_topic: "bridge/output"
+ username: "guest"
+ password: "guest"
+ tls: false
+
+payload_file: "../simulation.yaml"
diff --git a/simulation_bridge/resources/mqtt/requirements.txt b/simulation_bridge/resources/mqtt/requirements.txt
new file mode 100644
index 0000000..b1d837b
--- /dev/null
+++ b/simulation_bridge/resources/mqtt/requirements.txt
@@ -0,0 +1,2 @@
+paho-mqtt>=1.6.1
+PyYAML>=6.0
diff --git a/src/simulation_bridge/utils/__init__.py b/simulation_bridge/resources/rabbitmq/__init__.py
similarity index 100%
rename from src/simulation_bridge/utils/__init__.py
rename to simulation_bridge/resources/rabbitmq/__init__.py
diff --git a/simulation_bridge/resources/rabbitmq/rabbitmq_client.py b/simulation_bridge/resources/rabbitmq/rabbitmq_client.py
new file mode 100644
index 0000000..4222842
--- /dev/null
+++ b/simulation_bridge/resources/rabbitmq/rabbitmq_client.py
@@ -0,0 +1,189 @@
+"""RabbitMQ client for simulation bridge."""
+import os
+import ssl
+import sys
+import threading
+import uuid
+import time
+import pika
+import yaml
+
+
+def load_config(config_path="rabbitmq_use.yaml"):
+ """Load YAML configuration file."""
+ try:
+ with open(config_path, "r", encoding="utf-8") as file:
+ return yaml.safe_load(file)
+ except FileNotFoundError:
+ print(f"Error: Configuration file '{config_path}' not found.")
+ sys.exit(1)
+ except yaml.YAMLError as err:
+ print(f"Error parsing YAML file: {err}")
+ sys.exit(1)
+
+
+class RabbitMQClient:
+ """Digital Twin client for simulation bridge."""
+
+ def __init__(self, config):
+ """Initialize the Digital Twin with the given configuration."""
+ self.config = config
+ self.dt_id = config['digital_twin']['dt_id']
+
+ rabbitmq_cfg = config['rabbitmq']
+ credentials = pika.PlainCredentials(
+ username=rabbitmq_cfg['username'],
+ password=rabbitmq_cfg['password']
+ )
+ use_tls = rabbitmq_cfg.get('tls', False)
+
+ if use_tls:
+ context = ssl.create_default_context()
+ ssl_options = pika.SSLOptions(context, rabbitmq_cfg['host'])
+ parameters = pika.ConnectionParameters(
+ host=rabbitmq_cfg['host'],
+ port=rabbitmq_cfg.get('port', 5671),
+ virtual_host=rabbitmq_cfg.get('vhost', '/'),
+ credentials=credentials,
+ ssl_options=ssl_options,
+ heartbeat=rabbitmq_cfg.get('heartbeat', 600)
+ )
+ else:
+ parameters = pika.ConnectionParameters(
+ host=rabbitmq_cfg['host'],
+ port=rabbitmq_cfg.get('port', 5672),
+ virtual_host=rabbitmq_cfg.get('vhost', '/'),
+ credentials=credentials,
+ heartbeat=rabbitmq_cfg.get('heartbeat', 600)
+ )
+
+ self.connection = pika.BlockingConnection(parameters)
+ self.channel = self.connection.channel()
+ self.result_queue_name = None
+ self.setup_infrastructure()
+
+ def setup_infrastructure(self):
+ """Set up RabbitMQ exchanges and queues."""
+ input_ex = self.config['exchanges']['input_bridge']
+ result_ex = self.config['exchanges']['bridge_result']
+ queue_cfg = self.config['queue']
+
+ # Declare exchanges
+ self.channel.exchange_declare(
+ exchange=input_ex['name'],
+ exchange_type=input_ex['type'],
+ durable=input_ex['durable']
+ )
+
+ self.channel.exchange_declare(
+ exchange=result_ex['name'],
+ exchange_type=result_ex['type'],
+ durable=result_ex['durable']
+ )
+
+ # Declare and bind result queue
+ self.result_queue_name = f"{
+ queue_cfg['result_queue_prefix']}.{
+ self.dt_id}.result" # pylint: disable=line-too-long
+ self.channel.queue_declare(
+ queue=self.result_queue_name, durable=queue_cfg['durable'])
+ self.channel.queue_bind(
+ exchange=result_ex['name'],
+ queue=self.result_queue_name,
+ routing_key=queue_cfg['routing_key']
+ )
+
+ def send_simulation_request(self, payload_data):
+ """Send a simulation request to the bridge."""
+ payload = {
+ **payload_data,
+ 'request_id': str(uuid.uuid4()),
+ }
+
+ payload_yaml = yaml.dump(payload, default_flow_style=False)
+ routing_key = self.config['digital_twin']['routing_key_send']
+
+ self.channel.basic_publish(
+ exchange=self.config['exchanges']['input_bridge']['name'],
+ routing_key=routing_key,
+ body=payload_yaml,
+ properties=pika.BasicProperties(
+ delivery_mode=2,
+ content_type='application/x-yaml',
+ message_id=str(uuid.uuid4())
+ )
+ )
+
+ def handle_result(self, channel, method, properties, body): # pylint: disable=unused-argument
+ """Handle incoming simulation results."""
+ try:
+ source = method.routing_key.split('.')[0]
+ result = yaml.safe_load(body)
+
+ print(f"\n[{self.dt_id.upper()}] Received result from {source}:")
+ print(f"Result: {result}")
+ print("-" * 50)
+
+ channel.basic_ack(method.delivery_tag)
+
+ except yaml.YAMLError as err:
+ print(f"Error decoding YAML result: {err}")
+ channel.basic_nack(method.delivery_tag)
+ except Exception as err: # pylint: disable=broad-exception-caught
+ print(f"Error processing the result: {err}")
+ channel.basic_nack(method.delivery_tag)
+
+ def start_listening(self):
+ """Start listening for simulation results."""
+ self.channel.basic_consume(
+ queue=self.result_queue_name,
+ on_message_callback=self.handle_result
+ )
+ print(f" [{self.dt_id.upper()}] Listening for simulation results...")
+ self.channel.start_consuming()
+
+ @staticmethod
+ def load_yaml_file(file_path):
+ """Load and parse a YAML file."""
+ with open(file_path, 'r', encoding="utf-8") as file:
+ return yaml.safe_load(file)
+
+
+def start_dt_listener(config):
+ """Start a Digital Twin listener in a separate thread."""
+ dt = RabbitMQClient(config)
+ dt.start_listening()
+
+
+def main():
+ """Main program entry point."""
+ config = load_config()
+
+ # Start listener thread
+ listener_thread = threading.Thread(
+ target=start_dt_listener, args=(config,))
+ listener_thread.daemon = True
+ listener_thread.start()
+
+ # Create digital twin and send simulation request
+ dt = RabbitMQClient(config)
+
+ base_dir = os.path.dirname(os.path.abspath(__file__))
+ yaml_file_path = os.path.join(base_dir, config['payload_file'])
+
+ try:
+ simulation_payload = dt.load_yaml_file(yaml_file_path)
+ dt.send_simulation_request(simulation_payload)
+
+ print("\nPress Ctrl+C to terminate the program...")
+ while True:
+ time.sleep(1)
+
+ except KeyboardInterrupt:
+ print("\nProgram terminated by the user.")
+ except Exception as err: # pylint: disable=broad-exception-caught
+ print(f"Error: {err}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/simulation_bridge/resources/rabbitmq/rabbitmq_use.yaml.template b/simulation_bridge/resources/rabbitmq/rabbitmq_use.yaml.template
new file mode 100644
index 0000000..bb35f23
--- /dev/null
+++ b/simulation_bridge/resources/rabbitmq/rabbitmq_use.yaml.template
@@ -0,0 +1,29 @@
+rabbitmq:
+ host: localhost
+ port: 5672
+ vhost: /
+ username: guest
+ password: guest
+ tls: false
+
+exchanges:
+ input_bridge:
+ name: "ex.input.bridge"
+ type: "topic"
+ durable: true
+
+ bridge_result:
+ name: "ex.bridge.result"
+ type: "topic"
+ durable: true
+
+queue:
+ result_queue_prefix: "Q"
+ durable: true
+ routing_key: "*.result"
+
+digital_twin:
+ dt_id: "dt"
+ routing_key_send: "dt"
+
+payload_file: "../simulation.yaml"
diff --git a/simulation_bridge/resources/rabbitmq/requirements.txt b/simulation_bridge/resources/rabbitmq/requirements.txt
new file mode 100644
index 0000000..251628d
--- /dev/null
+++ b/simulation_bridge/resources/rabbitmq/requirements.txt
@@ -0,0 +1,2 @@
+pika>=1.3.2
+PyYAML>=6.0
diff --git a/simulation_bridge/resources/rest/__init__.py b/simulation_bridge/resources/rest/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/resources/rest/requirements.txt b/simulation_bridge/resources/rest/requirements.txt
new file mode 100644
index 0000000..51f70f4
--- /dev/null
+++ b/simulation_bridge/resources/rest/requirements.txt
@@ -0,0 +1,2 @@
+httpx[http2]>=0.27.0
+PyYAML>=6.0
diff --git a/simulation_bridge/resources/rest/rest_client.py b/simulation_bridge/resources/rest/rest_client.py
new file mode 100644
index 0000000..cb0162c
--- /dev/null
+++ b/simulation_bridge/resources/rest/rest_client.py
@@ -0,0 +1,95 @@
+"""REST client module for sending YAML data and streaming responses."""
+
+import asyncio
+import sys
+from pathlib import Path
+from typing import NoReturn, Dict, Any
+import yaml
+import httpx
+
+
+def load_config(config_path: str = "rest_use.yaml") -> Dict[str, Any]:
+ """Load configuration from YAML file.
+
+ Args:
+ config_path: Path to the YAML configuration file
+
+ Returns:
+ Dictionary containing configuration values
+
+ Exits:
+ If file not found or YAML parsing error occurs
+ """
+ try:
+ with open(config_path, "r", encoding="utf-8") as file:
+ return yaml.safe_load(file)
+ except FileNotFoundError:
+ print(f"Error: Config file '{config_path}' not found.")
+ sys.exit(1)
+ except yaml.YAMLError as exc:
+ print(f"Error parsing YAML config: {exc}")
+ sys.exit(1)
+
+
+class RESTClient:
+ """Client for sending YAML data to REST endpoints and streaming responses."""
+
+ def __init__(self, config: Dict[str, Any]):
+ """Initialize the REST client with configuration.
+
+ Args:
+ config: Dictionary containing client configuration
+ """
+ self.yaml_file = config["yaml_file"]
+ self.url = config["url"]
+ self.timeout = config.get("timeout", 600)
+
+ async def send_yaml_and_stream_response(self) -> None:
+ """Send YAML data to server and stream the response."""
+ headers = {
+ "Content-Type": "application/x-yaml",
+ "Accept": "application/x-ndjson"
+ }
+
+ try:
+ yaml_data = Path(self.yaml_file).read_bytes()
+ except FileNotFoundError:
+ print(f"Error: YAML file not found at '{self.yaml_file}'")
+ sys.exit(1)
+
+ async with httpx.AsyncClient(verify=False) as client:
+ try:
+ async with client.stream(
+ "POST", self.url,
+ headers=headers,
+ content=yaml_data,
+ timeout=self.timeout
+ ) as response:
+ print(f"Status: {response.status_code}")
+
+ if response.status_code >= 400:
+ print(
+ f"""Error: Server returned status code {
+ response.status_code}""")
+ return
+
+ async for line in response.aiter_lines():
+ if line.strip():
+ print(f"Received: {line}")
+ except httpx.RequestError as error:
+ print(
+ f"""An error occurred while requesting {
+ error.request.url!r}.\n"""
+ f"Error: {error}"
+ )
+
+
+def main() -> NoReturn:
+ """Run the REST client application."""
+ config = load_config()
+ client = RESTClient(config)
+ asyncio.run(client.send_yaml_and_stream_response())
+
+
+if __name__ == "__main__":
+ main()
diff --git a/simulation_bridge/resources/rest/rest_use.yaml.template b/simulation_bridge/resources/rest/rest_use.yaml.template
new file mode 100644
index 0000000..292009b
--- /dev/null
+++ b/simulation_bridge/resources/rest/rest_use.yaml.template
@@ -0,0 +1,3 @@
+url: https://localhost:5000/message # URL of the REST endpoint
+yaml_file: ../simulation.yaml # Path to the YAML file with simulation data
+timeout: 600 # Timeout for the request in seconds
diff --git a/simulation_bridge/resources/simulation.yaml.template b/simulation_bridge/resources/simulation.yaml.template
new file mode 100644
index 0000000..fcc1d10
--- /dev/null
+++ b/simulation_bridge/resources/simulation.yaml.template
@@ -0,0 +1,34 @@
+simulation:
+ request_id: abcdef12345
+ # (RequestID) to identify each request.
+
+ client_id: dt
+ # Unique identifier of the sender of this simulation request
+
+ simulator: matlab
+ # Specifies the target system for the simulation.
+ # Use 'matlab' to route the request to the MATLAB simulator.
+
+ type: streaming
+ # Specifies the simulation execution mode.
+ # Options:
+ # - 'batch': runs the simulation in batch mode, where results are returned only after the entire computation is complete.
+ # - 'streaming': runs the simulation in streaming mode, providing real-time updates at each computation step.
+
+ file: SimulationStreaming.m
+ # The name of the MATLAB script or function file to execute for this simulation.
+
+ inputs:
+ # Input variables to be passed to the simulation.
+ # Customize these key-value pairs as needed for your specific simulation.
+ i1: ..
+ i2: ..
+ i3: ..
+
+ outputs:
+ # Expected output variables from the simulation.
+ # Customize these keys based on what outputs your simulation provides.
+ o1: ..
+ o2: ..
+ o3: ..
+ o4: ..
diff --git a/simulation_bridge/src/__init__.py b/simulation_bridge/src/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/src/core/__init__.py b/simulation_bridge/src/core/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/src/core/bridge_core.py b/simulation_bridge/src/core/bridge_core.py
new file mode 100644
index 0000000..a104d0c
--- /dev/null
+++ b/simulation_bridge/src/core/bridge_core.py
@@ -0,0 +1,244 @@
+"""
+Core bridge module for message routing between different protocols.
+
+This module handles message routing between RabbitMQ, MQTT, and REST protocols,
+providing a unified interface for cross-protocol communication.
+"""
+
+from typing import Dict, Any
+import json
+import ssl
+import pika
+from pydantic import BaseModel
+from ..utils.config_manager import ConfigManager
+from ..utils.logger import get_logger
+
+# Constants for RabbitMQ connection parameters
+RABBITMQ_HEARTBEAT = 600 # 10 minutes heartbeat
+RABBITMQ_BLOCKED_CONNECTION_TIMEOUT = 300 # 5 minutes timeout
+RABBITMQ_CONNECTION_ATTEMPTS = 3 # Number of connection attempts
+RABBITMQ_RETRY_DELAY = 5 # Delay between retries in seconds
+
+
+logger = get_logger()
+
+# Pydantic models for message validation
+
+
+class SimulationModel(BaseModel):
+ "Represents the details of a simulation request."
+ request_id: str
+ client_id: str
+ simulator: str
+ type: str
+ file: str
+ inputs: Dict[str, Any]
+ outputs: Dict[str, Any]
+
+
+class MessageModel(BaseModel):
+ "Represents a message structure for simulation requests."
+ simulation: SimulationModel
+
+
+class BridgeCore:
+ """
+ Core bridge class for handling message routing between different protocols.
+
+ Manages connections to RabbitMQ, MQTT, and REST endpoints, and routes
+ messages between them based on protocol metadata.
+ """
+
+ def __init__(self, config_manager: ConfigManager, adapters: dict):
+ """
+ Initialize the bridge core with configuration and adapters.
+
+ Args:
+ config_manager: Configuration manager instance
+ adapters: Dictionary of protocol adapters
+ """
+ self.config = config_manager.get_rabbitmq_config()
+ self.connection = None
+ self.channel = None
+ self._initialize_rabbitmq_connection()
+ self.adapters = adapters
+ logger.debug("Signals connected and bridge core initialized")
+
+ def _initialize_rabbitmq_connection(self):
+ """Initialize or reinitialize the RabbitMQ connection."""
+ try:
+ if self.connection and not self.connection.is_closed:
+ self.connection.close()
+
+ try:
+ credentials = pika.PlainCredentials(
+ self.config['username'],
+ self.config['password']
+ )
+
+ if self.config.get('tls', False):
+ context = ssl.create_default_context()
+ ssl_options = pika.SSLOptions(context, self.config['host'])
+ connection_params = pika.ConnectionParameters(
+ host=self.config['host'],
+ port=self.config['port'],
+ virtual_host=self.config['vhost'],
+ credentials=credentials,
+ ssl_options=ssl_options
+ )
+ else:
+ connection_params = pika.ConnectionParameters(
+ host=self.config['host'],
+ port=self.config['port'],
+ virtual_host=self.config['vhost'],
+ credentials=credentials
+ )
+
+ self.connection = pika.BlockingConnection(connection_params)
+
+ except (pika.exceptions.AMQPConnectionError, ssl.SSLError) as e:
+ logger.error(
+ "Failed to connect to RabbitMQ at %s:%s with TLS=%s",
+ self.config['host'], self.config['port'], self.config.get(
+ 'tls', False)
+ )
+ logger.error("Error: %s", e)
+ raise RuntimeError(
+ "Connection failed. Check TLS settings and port.") from e
+
+ except Exception as e:
+ logger.error(
+ "Unexpected error while connecting to RabbitMQ: %s", e)
+ raise
+ self.channel = self.connection.channel()
+ logger.debug("RabbitMQ connection established successfully")
+ except pika.exceptions.AMQPConnectionError as e:
+ logger.error("Failed to initialize RabbitMQ connection: %s", e)
+ raise
+ except pika.exceptions.AMQPChannelError as e:
+ logger.error("Failed to initialize RabbitMQ channel: %s", e)
+ raise
+
+ def _ensure_connection(self):
+ """Ensure the RabbitMQ connection is active, reconnect if necessary."""
+ try:
+ if not self.connection or self.connection.is_closed:
+ logger.warning(
+ "RabbitMQ connection is closed, attempting to reconnect...")
+ self._initialize_rabbitmq_connection()
+ return True
+ except (pika.exceptions.AMQPConnectionError, pika.exceptions.AMQPChannelError) as e:
+ logger.error("Failed to ensure RabbitMQ connection: %s", e)
+ return False
+
+ def handle_input_message(self, sender, **kwargs): # pylint: disable=unused-argument
+ """
+ Handle incoming messages.
+
+ Args:
+ **kwargs: Keyword arguments containing message data
+ """
+ message_dict = kwargs.get('message', {})
+ try:
+ message = MessageModel.model_validate(message_dict)
+ except Exception as e: # pylint: disable=broad-exception-caught
+ logger.error("Invalid message format: %s", e)
+ return
+ simulation = message.simulation
+ if simulation is None:
+ request_id = 'unknown'
+ else:
+ request_id = simulation.request_id if simulation.request_id else 'unknown'
+ producer = kwargs.get('producer', 'unknown')
+ consumer = kwargs.get('consumer', 'unknown')
+ protocol = kwargs.get('protocol', 'unknown')
+ logger.info(
+ "[%s] Handling incoming simulation request with ID: %s", protocol.upper(), request_id)
+ self._publish_message(
+ producer,
+ consumer,
+ message.model_dump(),
+ protocol=protocol)
+
+ def handle_result_rabbitmq_message(self, sender, **kwargs): # pylint: disable=unused-argument
+ """
+ Handle RabbitMQ result messages.
+
+ Args:
+ **kwargs: Keyword arguments containing message data
+ """
+ message = kwargs.get('message', {})
+ producer = message.get('source', 'unknown')
+ consumer = "result"
+ self._publish_message(
+ producer,
+ consumer,
+ message,
+ exchange='ex.bridge.result',
+ protocol='rabbitmq')
+
+ def handle_result_unknown_message(self, sender, **kwargs): # pylint: disable=unused-argument
+ """
+ Handle RabbitMQ result messages.
+
+ Args:
+ **kwargs: Keyword arguments containing message data
+ """
+ message = kwargs.get('message', {})
+ logger.error(
+ "Received error result message with unknown protocol: %s", message['error'])
+
+ def _publish_message(self, producer, consumer, message, # pylint: disable=too-many-arguments, too-many-positional-arguments
+ exchange='ex.bridge.output', protocol='unknown'):
+ """
+ Publish message to RabbitMQ exchange.
+
+ Args:
+ producer: Message producer identifier
+ consumer: Message consumer identifier
+ message: Message payload
+ exchange: RabbitMQ exchange name
+ protocol: Protocol identifier
+ """
+ if not self._ensure_connection():
+ logger.error(
+ "Cannot publish message: RabbitMQ connection is not available")
+ return
+
+ routing_key = f"{producer}.{consumer}"
+ message['simulation']['bridge_meta'] = {
+ 'protocol': protocol
+ }
+ try:
+ self.channel.basic_publish(
+ exchange=exchange,
+ routing_key=routing_key,
+ body=json.dumps(message),
+ properties=pika.BasicProperties(
+ delivery_mode=2,
+ )
+ )
+ logger.debug(
+ "Message routed to exchange '%s': %s -> %s, protocol=%s",
+ exchange, producer, consumer, protocol)
+ except (pika.exceptions.AMQPConnectionError,
+ pika.exceptions.AMQPChannelError) as e:
+ logger.error("RabbitMQ connection error: %s", e)
+ self._initialize_rabbitmq_connection()
+ # Retry the publish operation once
+ try:
+ self.channel.basic_publish(
+ exchange=exchange,
+ routing_key=routing_key,
+ body=json.dumps(message),
+ properties=pika.BasicProperties(
+ delivery_mode=2,
+ )
+ )
+ logger.debug(
+ "Message routed to exchange '%s' after reconnection: %s -> %s",
+ exchange, producer, consumer)
+ except (pika.exceptions.AMQPConnectionError,
+ pika.exceptions.AMQPChannelError) as retry_e:
+ logger.error(
+ "Failed to publish message after reconnection: %s", retry_e)
diff --git a/simulation_bridge/src/core/bridge_infrastructure.py b/simulation_bridge/src/core/bridge_infrastructure.py
new file mode 100644
index 0000000..0ffc835
--- /dev/null
+++ b/simulation_bridge/src/core/bridge_infrastructure.py
@@ -0,0 +1,149 @@
+"""
+Module for managing RabbitMQ infrastructure for the simulation bridge.
+Handles setup of exchanges, queues and bindings based on configuration.
+"""
+
+import ssl
+import pika
+from ..utils.config_manager import ConfigManager
+from ..utils.logger import get_logger
+
+# Get a module-level logger using a factory function
+logger = get_logger()
+
+
+class RabbitMQInfrastructure:
+ """Class responsible for setting up and managing RabbitMQ infrastructure."""
+
+ def __init__(self, config_manager: ConfigManager):
+ """Initialize RabbitMQ infrastructure.
+
+ Args:
+ config_manager: Configuration manager object to retrieve RabbitMQ settings
+ """
+ self.config = config_manager.get_rabbitmq_config()
+ try:
+ credentials = pika.PlainCredentials(
+ self.config['username'],
+ self.config['password']
+ )
+
+ if self.config.get('tls', False):
+ context = ssl.create_default_context()
+ ssl_options = pika.SSLOptions(context, self.config['host'])
+ connection_params = pika.ConnectionParameters(
+ host=self.config['host'],
+ port=self.config['port'],
+ virtual_host=self.config['vhost'],
+ credentials=credentials,
+ ssl_options=ssl_options
+ )
+ else:
+ connection_params = pika.ConnectionParameters(
+ host=self.config['host'],
+ port=self.config['port'],
+ virtual_host=self.config['vhost'],
+ credentials=credentials
+ )
+
+ self.connection = pika.BlockingConnection(connection_params)
+
+ except (pika.exceptions.AMQPConnectionError, ssl.SSLError) as e:
+ logger.error(
+ "Failed to connect to RabbitMQ at %s:%s with TLS=%s",
+ self.config['host'],
+ self.config['port'],
+ self.config.get('tls', False)
+ )
+ logger.error("Error: %s", e)
+ raise RuntimeError(
+ "Connection failed. Check TLS settings and port.") from e
+ except Exception as e:
+ logger.error("Unexpected error while connecting to RabbitMQ: %s", e)
+ raise
+ self.channel = self.connection.channel()
+
+ def setup(self):
+ """Setup all exchanges, queues and bindings."""
+ try:
+ self._setup_exchanges()
+ self._setup_queues()
+ self._setup_bindings()
+ logger.info(
+ "Simulation Bridge infrastructure setup completed successfully")
+ except Exception as e:
+ logger.error("Error setting up RabbitMQ infrastructure: %s", e)
+ raise
+ finally:
+ self.connection.close()
+
+ def reconnect(self):
+ """Reconnect to RabbitMQ if connection was closed."""
+
+ if self.connection.is_closed:
+ credentials = pika.PlainCredentials(
+ self.config['username'],
+ self.config['password']
+ )
+ self.connection = pika.BlockingConnection(
+ pika.ConnectionParameters(
+ host=self.config['host'],
+ port=self.config['port'],
+ virtual_host=self.config['vhost'],
+ credentials=credentials
+ )
+ )
+ self.channel = self.connection.channel()
+ logger.info("Reconnected to RabbitMQ")
+ return self.connection
+
+ def _setup_exchanges(self):
+ """Declare all exchanges defined in configuration."""
+ for exchange in self.config['infrastructure']['exchanges']:
+ try:
+ self.channel.exchange_declare(
+ exchange=exchange['name'],
+ exchange_type=exchange['type'],
+ durable=exchange['durable'],
+ auto_delete=exchange['auto_delete'],
+ internal=exchange['internal']
+ )
+ logger.debug("Exchange declared: %s", exchange['name'])
+ except Exception as e:
+ logger.error(
+ "Error declaring exchange %s: %s",
+ exchange['name'], e)
+ raise
+
+ def _setup_queues(self):
+ """Declare all queues defined in configuration."""
+ for queue in self.config['infrastructure']['queues']:
+ try:
+ self.channel.queue_declare(
+ queue=queue['name'],
+ durable=queue['durable'],
+ exclusive=queue['exclusive'],
+ auto_delete=queue['auto_delete']
+ )
+ logger.debug("Queue declared: %s", queue['name'])
+ except Exception as e:
+ logger.error("Error declaring queue %s: %s", queue['name'], e)
+ raise
+
+ def _setup_bindings(self):
+ """Setup all queue-exchange bindings defined in configuration."""
+ for binding in self.config['infrastructure']['bindings']:
+ try:
+ self.channel.queue_bind(
+ exchange=binding['exchange'],
+ queue=binding['queue'],
+ routing_key=binding['routing_key']
+ )
+ logger.debug(
+ "Binding created: %s -> %s (%s)",
+ binding['queue'], binding['exchange'], binding['routing_key'])
+ except Exception as e:
+ logger.error(
+ "Error creating binding %s -> %s: %s",
+ binding['queue'], binding['exchange'], e)
+ raise
diff --git a/simulation_bridge/src/core/bridge_orchestrator.py b/simulation_bridge/src/core/bridge_orchestrator.py
new file mode 100644
index 0000000..01a115b
--- /dev/null
+++ b/simulation_bridge/src/core/bridge_orchestrator.py
@@ -0,0 +1,171 @@
+"""Bridge Orchestrator module for simulation bridge."""
+import time
+import importlib
+import threading
+from .bridge_core import BridgeCore
+from .bridge_infrastructure import RabbitMQInfrastructure
+from ..utils.config_manager import ConfigManager
+from ..utils.config_loader import load_protocol_config
+from ..utils.logger import get_logger
+from ..utils.signal_manager import SignalManager
+from ..utils.certs import ensure_certificates
+
+# Constants for RabbitMQ connection parameters
+POLL_INTERVAL_SECONDS = 60 # Continuously check adapter status every 60 seconds
+
+logger = get_logger()
+
+
+class BridgeOrchestrator:
+ """Orchestrates the simulation bridge components and lifecycle."""
+
+ def __init__(self, config_path: str = None):
+ """Initialize the bridge orchestrator.
+
+ Args:
+ simulation_bridge_id: Unique identifier for this bridge instance
+ config_path: Optional path to configuration file
+ """
+ self.config_manager = ConfigManager(config_path)
+ self.config = self.config_manager.get_config()
+
+ self.simulation_bridge_id = self.config['simulation_bridge']['bridge_id']
+ logger.info("Simulation bridge ID: %s", self.simulation_bridge_id)
+ # Validate and ensure SSL certificates are present
+ ensure_certificates(validity_days=365)
+
+ self.bridge = None
+ self.adapters = {}
+ self._running = False
+
+ self.protocol_config = load_protocol_config()
+ self.adapter_classes = self._import_adapter_classes()
+
+ def setup_interfaces(self):
+ """Set up all communication interfaces and the core bridge.
+
+ This method initializes infrastructure (e.g., RabbitMQ), creates adapter
+ instances for each protocol (e.g., MQTT, REST), registers them with the
+ SignalManager, and connects all defined signals to their respective
+ callbacks.
+ """
+ try:
+ # Set up RabbitMQ infrastructure
+ logger.debug("Setting up RabbitMQ infrastructure...")
+ infrastructure = RabbitMQInfrastructure(self.config_manager)
+ infrastructure.setup()
+
+ # Get list of enabled protocols
+ enabled_protocols = SignalManager.get_enabled_protocols()
+ if not enabled_protocols:
+ logger.warning(
+ "No protocol adapters are enabled — no messages will be received.")
+ else:
+ protocols_str = ", ".join(proto.upper()
+ for proto in enabled_protocols)
+ logger.info("Enabled protocols: %s", protocols_str)
+
+ # Instantiate and register each adapter only for enabled protocols
+ for name, adapter_class in self.adapter_classes.items():
+ if name not in enabled_protocols:
+ logger.debug(
+ "Skipping initialization of disabled protocol: %s",
+ name.upper())
+ continue
+
+ adapter = adapter_class(self.config_manager)
+ self.adapters[name] = adapter
+
+ # Register the adapter instance with SignalManager
+ SignalManager.register_adapter_instance(name, adapter)
+ logger.info("%s Adapter initialized correctly", name.upper())
+
+ # Create and register the core bridge component
+ self.bridge = BridgeCore(self.config_manager, self.adapters)
+ SignalManager.set_bridge_core(self.bridge)
+
+ # Connect all signals defined in protocol config (only for enabled
+ # protocols)
+ SignalManager.connect_all_signals()
+ logger.info(
+ "Bridge core initialized and signals connected for enabled protocols")
+
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error("Error setting up interfaces: %s", exc)
+ raise
+
+ def _start_adapters_async(self):
+ """Start all adapters in separate threads."""
+ for name, adapter in self.adapters.items():
+ thread = threading.Thread(
+ target=adapter.start,
+ name=f"{name}_adapter_thread",
+ daemon=True)
+ thread.start()
+ logger.debug("Started adapter %s in thread %s", name, thread.name)
+
+ def start(self):
+ """Start the bridge and all its components."""
+ # 1) Initial setup
+ self.setup_interfaces()
+ try:
+ # 2) Start all adapters
+ for adapter in self.adapters.values():
+ adapter.start()
+ logger.info("Simulation Bridge Running")
+ self._running = True
+ # 3) Polling loop
+ while self._running:
+ all_alive = all(
+ adapter.is_running for adapter in self.adapters.values())
+ if not all_alive:
+ logger.error(
+ "One or more adapters have stopped unexpectedly")
+ break
+ time.sleep(POLL_INTERVAL_SECONDS)
+ except KeyboardInterrupt:
+ # 4) Handle user Ctrl+C
+ logger.info("Shutdown requested by user (Ctrl+C)")
+ self._running = False
+ raise SystemExit
+ finally:
+ # 5) In any case (adapter error or Ctrl+C), stop everything
+ self.stop()
+ raise SystemExit("Simulation Bridge stopped")
+
+ def stop(self):
+ """Stop all components of the bridge cleanly."""
+ logger.debug("Stopping all components...")
+ self._running = False
+ try:
+ for name, adapter in self.adapters.items():
+ try:
+ adapter.stop()
+ # Join thread only if the adapter has a thread attribute
+ if hasattr(adapter, 'thread'):
+ adapter.thread.join()
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error("Error stopping %s adapter: %s", name, exc)
+ SignalManager.disconnect_all_signals()
+ logger.info("Simulation Bridge Stopped")
+
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error("Error during shutdown: %s", exc)
+
+ def _import_adapter_classes(self):
+ """
+ For each protocol specified in the loaded protocol configuration,
+ it extracts the full class path, imports the module relative to the
+ protocol_adapters package, retrieves the class object,
+ and stores it keyed by the protocol name.
+ """
+ classes = {}
+ for protocol, data in self.protocol_config.items():
+ class_path = data.get("class")
+ module_name, class_name = class_path.rsplit(".", 1)
+ module = importlib.import_module(
+ module_name,
+ package="simulation_bridge.src.protocol_adapters")
+ adapter_class = getattr(module, class_name)
+ classes[protocol] = adapter_class
+ return classes
diff --git a/simulation_bridge/src/main.py b/simulation_bridge/src/main.py
new file mode 100644
index 0000000..03f34c0
--- /dev/null
+++ b/simulation_bridge/src/main.py
@@ -0,0 +1,88 @@
+"""
+Simulation Bridge entry point.
+"""
+import logging
+import os
+
+import click
+
+from .core.bridge_orchestrator import BridgeOrchestrator
+from .utils.config_loader import load_config
+from .utils.logger import setup_logger
+from .utils.template import generate_default_config, generate_default_project
+
+CONFIG_FILENAME = 'config.yaml'
+
+
+@click.command()
+@click.option('--config-file', '-c', type=click.Path(exists=True),
+ default=None, help='Path to custom configuration file')
+@click.option('--generate-config', is_flag=True,
+ help='Generate a default configuration file in the current directory')
+@click.option('--generate-project', is_flag=True,
+ help='Generate default project files in the current directory')
+def main(
+ config_file=None,
+ generate_config=False,
+ generate_project=False) -> None:
+ """
+ Main function to start the Simulation Bridge.
+ """
+ if generate_config:
+ generate_default_config()
+ return
+
+ if generate_project:
+ generate_default_project()
+ return
+
+ if config_file:
+ run_bridge(config_file)
+ return
+
+ if not os.path.exists(CONFIG_FILENAME):
+ print(f"""
+Error: Configuration file {CONFIG_FILENAME} not found.
+
+To generate a default configuration file, run:
+simulation-bridge --generate-config
+
+You may customize the generated file as needed and re-run the program.
+
+Alternatively, if you already have a custom configuration file, use the
+--config-file option to specify its path:
+simulation-bridge --config-file /path/to/your/config.yaml
+ """)
+ return
+
+ run_bridge(CONFIG_FILENAME)
+
+
+def run_bridge(config_file):
+ """Initializes and starts a single MATLAB agent instance."""
+ config = load_config(config_file)
+ logging_level = config['logging']['level']
+ logging_file = config['logging']['file']
+
+ logger: logging.Logger = setup_logger(
+ level=getattr(logging, logging_level.upper(), logging.INFO),
+ log_file=logging_file
+ )
+ bridge = BridgeOrchestrator(config_path=config_file)
+ try:
+ logger.debug("Starting Simulation Bridge with config: %s", config)
+ bridge.start()
+ except KeyboardInterrupt:
+ logger.info("Stopping application via interrupt")
+ if bridge:
+ bridge.stop()
+ except OSError as e:
+ logger.error("OS error: %s", str(e), exc_info=True)
+ bridge.stop()
+ except ValueError as e:
+ logger.error("Configuration error: %s", str(e), exc_info=True)
+ bridge.stop()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/simulation_bridge/src/protocol_adapters/adapters_signal.json b/simulation_bridge/src/protocol_adapters/adapters_signal.json
new file mode 100644
index 0000000..0493cbf
--- /dev/null
+++ b/simulation_bridge/src/protocol_adapters/adapters_signal.json
@@ -0,0 +1,29 @@
+{
+ "protocols": {
+ "rabbitmq": {
+ "enabled": true,
+ "signals": {
+ "message_received_input_rabbitmq": "BridgeCore.handle_input_message",
+ "message_received_result_rabbitmq": "BridgeCore.handle_result_rabbitmq_message",
+ "message_received_result_unknown": "BridgeCore.handle_result_unknown_message"
+ },
+ "class": ".rabbitmq.rabbitmq_adapter.RabbitMQAdapter"
+ },
+ "mqtt": {
+ "enabled": true,
+ "signals": {
+ "message_received_input_mqtt": "BridgeCore.handle_input_message",
+ "message_received_result_mqtt": "MQTTAdapter.publish_result_message_mqtt"
+ },
+ "class": ".mqtt.mqtt_adapter.MQTTAdapter"
+ },
+ "rest": {
+ "enabled": true,
+ "signals": {
+ "message_received_input_rest": "BridgeCore.handle_input_message",
+ "message_received_result_rest": "RESTAdapter.publish_result_message_rest"
+ },
+ "class": ".rest.rest_adapter.RESTAdapter"
+ }
+ }
+}
diff --git a/simulation_bridge/src/protocol_adapters/base/__init__.py b/simulation_bridge/src/protocol_adapters/base/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/src/protocol_adapters/base/protocol_adapter.py b/simulation_bridge/src/protocol_adapters/base/protocol_adapter.py
new file mode 100644
index 0000000..582643c
--- /dev/null
+++ b/simulation_bridge/src/protocol_adapters/base/protocol_adapter.py
@@ -0,0 +1,75 @@
+"""Module providing the abstract base class for protocol adapters."""
+
+from abc import ABC, abstractmethod
+from typing import Dict, Any
+from ...utils.config_manager import ConfigManager
+from ...utils.logger import get_logger
+
+logger = get_logger()
+
+
+class ProtocolAdapter(ABC):
+ """
+ Abstract base class that defines the interface and common behavior
+ for all protocol adapters.
+
+ Subclasses must implement configuration loading, start/stop lifecycle,
+ and message handling methods.
+ """
+
+ def __init__(self, config_manager: ConfigManager):
+ """
+ Initialize the protocol adapter.
+
+ Args:
+ config_manager (ConfigManager): Configuration manager instance
+ """
+ self.config_manager = config_manager
+ self.config = self._get_config()
+ self._running = False
+ logger.debug(
+ "%s - Adapter initialized with config: %s",
+ self.__class__.__name__,
+ self.config)
+
+ @property
+ def is_running(self) -> bool:
+ """
+ Indicates whether the adapter is currently running.
+
+ Returns:
+ bool: True if running, False otherwise
+ """
+ return self._running
+
+ @abstractmethod
+ def _get_config(self) -> Dict[str, Any]:
+ """
+ Retrieve protocol-specific configuration.
+
+ Returns:
+ Dict[str, Any]: Configuration dictionary
+ """
+
+ @abstractmethod
+ def start(self) -> None:
+ """
+ Start the protocol adapter.
+ Should initiate connections, threads, or other resources.
+ """
+
+ @abstractmethod
+ def stop(self) -> None:
+ """
+ Stop the protocol adapter.
+ Should cleanly release resources and stop threads.
+ """
+
+ @abstractmethod
+ def _handle_message(self, message: Dict[str, Any]) -> None:
+ """
+ Handle an incoming message according to the protocol logic.
+
+ Args:
+ message (Dict[str, Any]): The message to handle
+ """
diff --git a/simulation_bridge/src/protocol_adapters/mqtt/__init__.py b/simulation_bridge/src/protocol_adapters/mqtt/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/src/protocol_adapters/mqtt/mqtt_adapter.py b/simulation_bridge/src/protocol_adapters/mqtt/mqtt_adapter.py
new file mode 100644
index 0000000..7ffe6d8
--- /dev/null
+++ b/simulation_bridge/src/protocol_adapters/mqtt/mqtt_adapter.py
@@ -0,0 +1,247 @@
+"""MQTT Adapter Module for the Simulation Bridge.
+
+This module implements an adapter for MQTT communication protocol.
+"""
+
+import json
+import threading
+from typing import Dict, Any
+
+import paho.mqtt.client as mqtt
+import yaml
+from blinker import signal
+
+from ...utils.config_manager import ConfigManager
+from ...utils.logger import get_logger
+from ..base.protocol_adapter import ProtocolAdapter
+
+logger = get_logger()
+
+
+class MQTTAdapter(ProtocolAdapter):
+ """MQTT Protocol Adapter implementation.
+
+ This adapter handles MQTT protocol communication, including connecting
+ to MQTT brokers, subscribing to topics, and processing incoming messages.
+ """
+
+ def _get_config(self) -> Dict[str, Any]:
+ """Retrieve MQTT-specific configuration.
+
+ Returns:
+ Dict[str, Any]: MQTT configuration dictionary
+ """
+ return self.config_manager.get_mqtt_config()
+
+ def __init__(self, config_manager: ConfigManager):
+ """Initialize the MQTT adapter.
+
+ Args:
+ config_manager: Configuration manager instance
+ """
+ super().__init__(config_manager)
+ self.mqtt_config = config_manager.get_mqtt_config()
+ self.client = mqtt.Client()
+ if 'username' in self.mqtt_config and 'password' in self.mqtt_config:
+ self.client.username_pw_set(
+ self.mqtt_config['username'], self.mqtt_config['password']
+ )
+ if self.mqtt_config.get('tls', False):
+ logger.debug("MQTT - TLS is enabled, setting up TLS context")
+ self.client.tls_set()
+ self.topic = self.config['input_topic']
+ self.client.on_connect = self.on_connect
+ self.client.on_message = self.on_message
+ self.client.on_disconnect = self.on_disconnect
+ self._client_thread = None
+ self._running = False
+ self.mqtt_client = mqtt.Client()
+
+ if 'username' in self.mqtt_config and 'password' in self.mqtt_config:
+ self.mqtt_client.username_pw_set(
+ self.mqtt_config['username'], self.mqtt_config['password']
+ )
+ if self.mqtt_config.get('tls', False):
+ logger.debug("MQTT - TLS is enabled for publishing client")
+ self.mqtt_client.tls_set()
+ self.mqtt_client.connect(
+ host=self.mqtt_config['host'],
+ port=self.mqtt_config['port'],
+ keepalive=self.mqtt_config['keepalive']
+ )
+
+ self.mqtt_client.loop_start()
+ logger.debug(
+ "MQTT - Adapter initialized with config: host=%s, port=%s, topic=%s",
+ self.config['host'], self.config['port'], self.topic)
+
+ def on_connect(self, client, userdata, flags, rc):
+ """Callback for when the client connects to the broker.
+
+ Args:
+ client: MQTT client instance
+ userdata: User data
+ flags: Connection flags
+ rc: Result code
+ """
+ if rc == 0:
+ self.client.subscribe(self.topic)
+ logger.debug("MQTT - Subscribed to topic: %s", self.topic)
+ else:
+ logger.error(
+ "MQTT - Failed to connect to broker at %s:%s, return code: %s",
+ self.config['host'], self.config['port'], rc)
+
+ def on_disconnect(self, client, userdata, rc):
+ """Callback for when the client disconnects from the broker.
+
+ Args:
+ client: MQTT client instance
+ userdata: User data
+ rc: Result code
+ """
+ if rc == 0:
+ logger.debug("MQTT - Cleanly disconnected from broker")
+ else:
+ logger.warning(
+ "MQTT - Unexpectedly disconnected from broker with code: %s", rc)
+
+ def on_message(self, client, userdata, msg):
+ """Callback for when a message is received from the broker.
+
+ Args:
+ client: MQTT client instance
+ userdata: User data
+ msg: Received message
+ """
+ try:
+ # Try to parse as YAML first, then JSON, then plain text
+ try:
+ message = yaml.safe_load(msg.payload)
+ except Exception:
+ try:
+ message = json.loads(msg.payload)
+ except Exception:
+ message = {
+ "content": msg.payload.decode('utf-8', errors='replace'),
+ "raw_message": True
+ }
+
+ if not isinstance(message, dict):
+ raise ValueError("Message is not a dictionary")
+
+ simulation = message.get('simulation', {})
+ producer = simulation.get('client_id', 'unknown')
+ consumer = simulation.get('simulator', 'unknown')
+
+ # Process message directly - no need for queuing
+ logger.debug(
+ "MQTT - Processing message %s, from producer: %s, simulator: %s",
+ message, producer, consumer)
+
+ # Send signal directly
+ signal('message_received_input_mqtt').send(
+ message=message,
+ producer=producer,
+ consumer=consumer,
+ protocol='mqtt'
+ )
+
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error("MQTT - Error processing message: %s", exc)
+
+ def _run_client(self):
+ """Run the MQTT client in a separate thread."""
+ logger.debug("MQTT client thread started")
+ try:
+ # Connect and start the MQTT client
+ logger.debug(
+ "MQTT - Attempting to connect to broker with keepalive: %s",
+ self.config['keepalive'])
+
+ self.client.connect(
+ self.config['host'],
+ self.config['port'],
+ self.config['keepalive'])
+
+ logger.debug("MQTT - Starting client loop")
+ self.client.loop_forever()
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error("MQTT - Error in client thread: %s", exc)
+ self._running = False
+ raise
+
+ def start(self) -> None:
+ """Start the MQTT adapter.
+
+ Connects to the MQTT broker and starts processing messages.
+
+ Raises:
+ Exception: If connection to the broker fails
+ """
+ logger.debug(
+ "MQTT - Starting adapter connection to %s:%s",
+ self.config['host'], self.config['port'])
+
+ try:
+ self._running = True
+ # Start client thread
+ self._client_thread = threading.Thread(
+ target=self._run_client, daemon=True)
+ self._client_thread.start()
+ logger.debug("MQTT client thread started successfully")
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error(
+ "MQTT - Error connecting to broker at %s:%s: %s",
+ self.config['host'], self.config['port'], exc)
+ self.stop()
+ raise
+
+ def stop(self) -> None:
+ """Stop the MQTT adapter.
+
+ Disconnects from the MQTT broker and stops all processing threads.
+ """
+ logger.debug("MQTT - Stopping adapter")
+ self._running = False
+ try:
+ self.client.disconnect()
+ logger.debug("MQTT - Successfully disconnected from broker")
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error("MQTT - Error during disconnection: %s", exc)
+
+ def send_result(self, message):
+ try:
+ output_topic = self.mqtt_config['output_topic']
+ self.mqtt_client.publish(
+ topic=output_topic,
+ payload=json.dumps(message),
+ qos=self.mqtt_config['qos']
+ )
+ logger.debug(
+ "Message published to MQTT topic '%s': %s", output_topic, message)
+ except (ConnectionError, TimeoutError) as e:
+ logger.error("Error publishing MQTT message: %s", e)
+
+ def _handle_message(self, message: Dict[str, Any]) -> None:
+ """Handle incoming messages (required by ProtocolAdapter).
+
+ Args:
+ message: The message to handle
+ """
+ self.on_message(None, None, message)
+
+ def publish_result_message_mqtt(self, sender, **kwargs): # pylint: disable=unused-argument
+ """
+ Publish result message to MQTT topic.
+
+ Args:
+ message: Message payload to publish
+ """
+ try:
+ message = kwargs.get('message', {})
+ self.send_result(message)
+ logger.debug(
+ "Succesfully scheduled result message for MQTT client")
+ except (ConnectionError, TimeoutError) as e:
+ logger.error("Error publishing MQTT message: %s", e)
diff --git a/simulation_bridge/src/protocol_adapters/rabbitmq/__init__.py b/simulation_bridge/src/protocol_adapters/rabbitmq/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/src/protocol_adapters/rabbitmq/rabbitmq_adapter.py b/simulation_bridge/src/protocol_adapters/rabbitmq/rabbitmq_adapter.py
new file mode 100644
index 0000000..6851e4b
--- /dev/null
+++ b/simulation_bridge/src/protocol_adapters/rabbitmq/rabbitmq_adapter.py
@@ -0,0 +1,248 @@
+"""RabbitMQ adapter for message transport between simulation components."""
+import json
+import threading
+import functools
+import ssl
+from typing import Dict, Any
+
+import pika
+import yaml
+from blinker import signal
+
+from ...utils.config_manager import ConfigManager
+from ...utils.logger import get_logger
+from ..base.protocol_adapter import ProtocolAdapter
+
+logger = get_logger()
+
+
+class RabbitMQAdapter(ProtocolAdapter):
+ """
+ Protocol adapter for RabbitMQ message broker.
+
+ Handles connections to RabbitMQ, subscribes to configured queues,
+ and processes incoming/outgoing messages.
+ """
+
+ def _get_config(self) -> Dict[str, Any]:
+ """Retrieve RabbitMQ configuration from config manager."""
+ return self.config_manager.get_rabbitmq_config()
+
+ def __init__(self, config_manager: ConfigManager):
+ """
+ Initialize RabbitMQ adapter with configuration.
+
+ Args:
+ config_manager: Configuration manager providing RabbitMQ settings
+ """
+ super().__init__(config_manager)
+ logger.debug("RabbitMQ adapter initialized")
+
+ try:
+ credentials = pika.PlainCredentials(
+ self.config['username'],
+ self.config['password']
+ )
+ if self.config.get('tls', False):
+ context = ssl.create_default_context()
+ ssl_options = pika.SSLOptions(context, self.config['host'])
+ connection_params = pika.ConnectionParameters(
+ host=self.config['host'],
+ port=self.config['port'],
+ virtual_host=self.config['vhost'],
+ credentials=credentials,
+ ssl_options=ssl_options
+ )
+ else:
+ connection_params = pika.ConnectionParameters(
+ host=self.config['host'],
+ port=self.config['port'],
+ virtual_host=self.config['vhost'],
+ credentials=credentials
+ )
+
+ self.connection = pika.BlockingConnection(connection_params)
+
+ except (pika.exceptions.AMQPConnectionError, ssl.SSLError) as e:
+ logger.error(
+ f"Failed to connect to RabbitMQ at {
+ self.config['host']}:{
+ self.config['port']} with TLS={
+ self.config.get(
+ 'tls',
+ False)}")
+ logger.error(f"Error: {e}")
+ raise RuntimeError(
+ f"Connection failed. Check TLS settings and port.") from e
+ except Exception as e:
+ logger.error(f"Unexpected error while connecting to RabbitMQ: {e}")
+ raise
+ self.channel = self.connection.channel()
+ self._consumer_thread = None
+ self._running = False
+
+ # Get all queues from config and register callback for each
+ queues = self.config.get('infrastructure', {}).get('queues', [])
+ for queue in queues:
+ queue_name = queue.get('name')
+ if queue_name:
+ cb = functools.partial(
+ self._process_message, queue_name=queue_name)
+ self.channel.basic_consume(
+ queue=queue_name,
+ on_message_callback=cb,
+ auto_ack=False
+ )
+ logger.debug("Subscribed to queue: %s", queue_name)
+ logger.debug("RabbitMQ adapter initialized and subscribed to queues")
+
+ def _process_message(self, ch, method, properties, body, queue_name):
+ """
+ Process incoming RabbitMQ message.
+
+ Args:
+ ch: Channel object
+ method: Method details
+ properties: Message properties
+ body: Message body
+ queue_name: Source queue name
+ """
+ try:
+ # Try to parse message as YAML first, then JSON, or fall back to
+ # raw string
+ try:
+ message = yaml.safe_load(body)
+ except Exception:
+ try:
+ message = json.loads(body)
+ except Exception:
+ message = {
+ "content": body.decode('utf-8', errors='replace'),
+ "raw_message": True
+ }
+
+ if not isinstance(message, dict):
+ raise ValueError("Message is not a dictionary")
+
+ simulation = message.get('simulation', {})
+ producer = simulation.get('client_id', 'unknown')
+ consumer = simulation.get('simulator', 'unknown')
+
+ signal_name = None
+ kwargs = {
+ "message": message,
+ "producer": producer,
+ "consumer": consumer,
+ }
+
+ if queue_name == 'Q.bridge.input':
+ signal_name = 'message_received_input_rabbitmq'
+ kwargs["protocol"] = 'rabbitmq'
+ elif queue_name == 'Q.bridge.result':
+ bridge_meta = message.get('bridge_meta', {})
+ if isinstance(bridge_meta, str):
+ if bridge_meta.strip().startswith('{'):
+ try:
+ bridge_meta = json.loads(bridge_meta)
+ except Exception:
+ logger.warning("Malformed JSON in bridge_meta: %s",
+ bridge_meta)
+ bridge_meta = {}
+ else:
+ logger.debug("bridge_meta is a non-JSON string: %s",
+ bridge_meta)
+ bridge_meta = {}
+ protocol = bridge_meta.get('protocol', 'unknown')
+ if protocol == 'rest':
+ signal_name = 'message_received_result_rest'
+ elif protocol == 'mqtt':
+ signal_name = 'message_received_result_mqtt'
+ elif protocol == 'rabbitmq':
+ signal_name = 'message_received_result_rabbitmq'
+ elif protocol == 'unknown':
+ signal_name = 'message_received_result_unknown'
+ signal(signal_name).send(self, **kwargs)
+ ch.basic_ack(delivery_tag=method.delivery_tag)
+ logger.debug(
+ "Message processed from queue %s: %s",
+ queue_name, method.routing_key
+ )
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ ch.basic_nack(delivery_tag=method.delivery_tag, requeue=False)
+ logger.error(
+ "Error processing message from %s: %s",
+ queue_name,
+ exc)
+
+ def _run_consumer(self):
+ """Run the RabbitMQ consumer in a separate thread."""
+ logger.debug("RabbitMQ consumer thread started")
+ try:
+ self._running = True
+ self.channel.start_consuming()
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ if self._running:
+ logger.error("RabbitMQ - Error in consumer thread: %s", exc)
+ finally:
+ logger.debug("RabbitMQ consumer thread exiting")
+ self._running = False
+
+ def start(self) -> None:
+ """Start the RabbitMQ consumer in a separate thread."""
+ logger.debug("RabbitMQ adapter starting...")
+ try:
+ self._consumer_thread = threading.Thread(
+ target=self._run_consumer, daemon=True)
+ self._consumer_thread.start()
+ logger.debug("RabbitMQ consumer thread started successfully")
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error("RabbitMQ - Error starting consumer thread: %s", exc)
+ self.stop()
+ raise
+
+ def stop(self) -> None:
+ """Stop the RabbitMQ adapter and clean up resources."""
+ logger.debug("RabbitMQ - Stopping adapter")
+ self._running = False
+ try:
+ if self.channel and self.channel.is_open:
+ def stop_consuming_from_thread():
+ try:
+ self.channel.stop_consuming()
+ except Exception as e:
+ logger.warning(
+ "RabbitMQ - Error stopping consuming: %s", e)
+ self.connection.add_callback_threadsafe(
+ stop_consuming_from_thread)
+ except Exception as e:
+ logger.error(
+ "RabbitMQ - Unexpected error while scheduling stop_consuming: %s", e)
+ try:
+ if self._consumer_thread and self._consumer_thread.is_alive():
+ self._consumer_thread.join(timeout=5)
+ except Exception as e:
+ logger.warning("RabbitMQ - Error joining consumer thread: %s", e)
+ try:
+ if self.connection and self.connection.is_open:
+ self.connection.close()
+ except Exception as e:
+ logger.warning("RabbitMQ - Error closing connection: %s", e)
+ logger.debug("RabbitMQ - Adapter stopped cleanly")
+
+ def _handle_message(self, message: Dict[str, Any]) -> None:
+ """
+ Handle incoming messages (required by ProtocolAdapter).
+
+ Args:
+ message: The message to process
+ """
+ self._process_message(None, None, None, message, 'Q.bridge.input')
+
+ def _start_adapter(self) -> None:
+ """Start the RabbitMQ consumer."""
+ logger.debug("RabbitMQ adapter started...")
+ try:
+ self.channel.start_consuming()
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ logger.error("RabbitMQ - Error in consumer: %s", exc)
+ raise
diff --git a/simulation_bridge/src/protocol_adapters/rest/__init__.py b/simulation_bridge/src/protocol_adapters/rest/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/src/protocol_adapters/rest/rest_adapter.py b/simulation_bridge/src/protocol_adapters/rest/rest_adapter.py
new file mode 100644
index 0000000..a84f8fe
--- /dev/null
+++ b/simulation_bridge/src/protocol_adapters/rest/rest_adapter.py
@@ -0,0 +1,218 @@
+from quart import Quart, request, Response
+from hypercorn.config import Config as HyperConfig
+from hypercorn.asyncio import serve
+import asyncio
+import yaml
+import json
+from typing import Dict, Any, Optional, AsyncGenerator
+from ...utils.config_manager import ConfigManager
+from ...utils.logger import get_logger
+from ..base.protocol_adapter import ProtocolAdapter
+from blinker import signal
+
+logger = get_logger()
+
+
+class RESTAdapter(ProtocolAdapter):
+ """REST protocol adapter implementation using Quart and Hypercorn."""
+
+ def _get_config(self) -> Dict[str, Any]:
+ """Get REST configuration from config manager."""
+ return self.config_manager.get_rest_config()
+
+ def __init__(self, config_manager: ConfigManager):
+ """Initialize REST adapter with configuration."""
+ super().__init__(config_manager)
+ self._active_streams = {} # Store active streams by client_id
+ self._loop: Optional[asyncio.AbstractEventLoop] = None
+ self._running = False
+ self.app = self._create_app()
+ logger.debug("REST - Adapter initialized with config: host=%s, port=%s",
+ self.config['host'], self.config['port'])
+
+ def _create_app(self) -> Quart:
+ """Factory method to create and configure the Quart app."""
+ app = Quart("simulation_rest_adapter")
+
+ @app.post(self.config['endpoint'])
+ async def handle_streaming_message() -> Response:
+ content_type = request.headers.get('content-type', '')
+ body = await request.get_data()
+
+ try:
+ message = self._parse_message(body, content_type)
+ except Exception as e:
+ logger.error("REST - Error parsing message: %s", e)
+ return Response(
+ response=json.dumps({"error": str(e)}),
+ status=400,
+ content_type='application/json'
+ )
+
+ if not isinstance(message, dict):
+ return Response(
+ response=json.dumps(
+ {"error": "Message is not a dictionary"}),
+ status=400,
+ content_type='application/json'
+ )
+
+ simulation = message.get('simulation', {})
+ producer = simulation.get('client_id', 'unknown')
+ consumer = simulation.get('simulator', 'unknown')
+
+ message['bridge_meta'] = {
+ 'protocol': 'rest',
+ 'producer': producer,
+ 'consumer': consumer
+ }
+
+ signal('message_received_input_rest').send(
+ message=message,
+ producer=producer,
+ consumer=consumer,
+ protocol='rest'
+ )
+
+ queue = asyncio.Queue()
+ self._active_streams[producer] = queue
+
+ return Response(
+ self._generate_response(producer, queue),
+ content_type='application/x-ndjson',
+ status=200
+ )
+
+ return app
+
+ def _parse_message(self, body: bytes, content_type: str) -> Dict[str, Any]:
+ """Parse message body based on content type."""
+ if 'yaml' in content_type:
+ logger.debug("REST - Attempting to parse message as YAML")
+ return yaml.safe_load(body)
+ elif 'json' in content_type:
+ logger.debug("REST - Attempting to parse message as JSON")
+ return json.loads(body)
+
+ # Fallback: try YAML, then JSON, then raw text
+ try:
+ logger.debug(
+ "REST - Attempting to parse message as YAML (fallback)")
+ return yaml.safe_load(body)
+ except Exception:
+ try:
+ logger.debug(
+ "REST - Attempting to parse message as JSON (fallback)")
+ return json.loads(body)
+ except Exception:
+ logger.debug("REST - Parsing as raw text (fallback)")
+ return {
+ "content": body.decode('utf-8', errors='replace'),
+ "raw_message": True
+ }
+
+ async def _generate_response(
+ self, producer: str, queue: asyncio.Queue
+ ) -> AsyncGenerator[str, None]:
+ """Generate streaming response."""
+ try:
+ yield json.dumps({"status": "processing"}) + "\n"
+ while True:
+ try:
+ result = await asyncio.wait_for(queue.get(), timeout=600)
+ yield json.dumps(result) + "\n"
+ # Check if the status is 'completed' and the execution time is greater than 1 second
+ # This helps prevent issues caused by executions that are
+ # too short or not properly finished
+ if result.get('status') == 'completed' and result.get(
+ 'metadata', {}).get('execution_time', 0) > 1:
+ logger.debug(
+ "REST - Final message sent, closing stream")
+ break
+
+ except asyncio.TimeoutError:
+ yield json.dumps({"status": "timeout", "error": "No response received within timeout"}) + "\n"
+ break
+ except Exception as e:
+ logger.error("REST - Error in stream: %s", e)
+ yield json.dumps({"status": "error", "error": str(e)}) + "\n"
+ break
+ finally:
+ self._active_streams.pop(producer, None)
+
+ async def send_result(self, producer: str, result: Dict[str, Any]) -> None:
+ """Send a result message to a specific client."""
+ if producer in self._active_streams:
+ await self._active_streams[producer].put(result)
+ else:
+ logger.warning(
+ "REST - No active stream found for producer: %s",
+ producer)
+
+ async def _start_server(self) -> None:
+ """Start the Hypercorn server."""
+ self._loop = asyncio.get_running_loop()
+ config = HyperConfig()
+ config.errorlog = logger
+ config.accesslog = logger
+ config.bind = [f"{self.config['host']}:{self.config['port']}"]
+ config.use_reloader = False
+ config.worker_class = "asyncio"
+ config.alpn_protocols = ["h2", "http/1.1"]
+
+ if self.config['certfile'] and self.config['keyfile']:
+ config.certfile = self.config['certfile']
+ config.keyfile = self.config['keyfile']
+
+ await serve(self.app, config)
+
+ def start(self) -> None:
+ """Start the REST server."""
+ logger.debug("REST - Starting adapter on %s:%s",
+ self.config['host'], self.config['port'])
+ try:
+ asyncio.run(self._start_server())
+ self._running = True
+ except Exception as e:
+ logger.error("REST - Error starting server: %s", e)
+ raise
+
+ def send_result_sync(self, producer: str, result: Dict[str, Any]) -> None:
+ """Synchronous wrapper for sending result messages."""
+ if producer not in self._active_streams:
+ logger.warning("REST - No active stream found for producer: %s. Available streams: %s",
+ producer, list(self._active_streams.keys()))
+ return
+
+ if self._loop and self._loop.is_running():
+ future = asyncio.run_coroutine_threadsafe(
+ self.send_result(producer, result),
+ self._loop
+ )
+ try:
+ future.result(timeout=5)
+ except Exception as e:
+ logger.error("REST - Error sending result: %s", e)
+ else:
+ logger.error("REST - Event loop not running; cannot send result.")
+
+ def stop(self) -> None:
+ """Stop the REST server."""
+ logger.debug("REST - Stopping adapter")
+ self._running = False
+
+ def _handle_message(self, message: Dict[str, Any]) -> None:
+ """(Not used in REST; handled via route)."""
+ pass
+
+ def publish_result_message_rest(self, sender, **kwargs):
+ """Publish result message via REST adapter."""
+ try:
+ message = kwargs.get('message', {})
+ destination = message.get('destinations', [])[0]
+ self.send_result_sync(destination, message)
+ logger.debug(
+ "Successfully scheduled result message for REST client: %s",
+ destination)
+ except (ConnectionError, TimeoutError) as e:
+ logger.error("Error sending result message to REST client: %s", e)
diff --git a/simulation_bridge/src/utils/__init__.py b/simulation_bridge/src/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/src/utils/certs.py b/simulation_bridge/src/utils/certs.py
new file mode 100644
index 0000000..625f10f
--- /dev/null
+++ b/simulation_bridge/src/utils/certs.py
@@ -0,0 +1,350 @@
+"""Certificate generator module with file existence checking."""
+
+import datetime
+from pathlib import Path
+from typing import Optional, Tuple
+
+from cryptography import x509
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import rsa
+from cryptography.x509.oid import NameOID
+
+from ..utils.logger import get_logger
+
+logger = get_logger()
+
+
+class CertificateGenerator:
+ """Class for generating SSL certificates and private keys."""
+
+ def __init__(
+ self,
+ key_size: int = 2048,
+ public_exponent: int = 65537,
+ validity_days: int = 365
+ ) -> None:
+ """
+ Initialize certificate generator.
+
+ Args:
+ key_size: RSA key size in bits
+ public_exponent: RSA public exponent
+ validity_days: Certificate validity in days
+ """
+ self.key_size = key_size
+ self.public_exponent = public_exponent
+ self.validity_days = validity_days
+
+ def _generate_private_key(self) -> rsa.RSAPrivateKey:
+ """
+ Generate RSA private key.
+
+ Returns:
+ RSA private key instance
+ """
+ return rsa.generate_private_key(
+ public_exponent=self.public_exponent,
+ key_size=self.key_size,
+ )
+
+ def _build_certificate_name( # pylint: disable=too-many-arguments
+ self,
+ country: str = "US",
+ state: str = "California",
+ locality: str = "San Francisco",
+ organization: str = "My Company",
+ common_name: str = "localhost"
+ ) -> x509.Name:
+ """
+ Build X.509 certificate name.
+
+ Args:
+ country: Country name
+ state: State or province name
+ locality: Locality name
+ organization: Organization name
+ common_name: Common name
+
+ Returns:
+ X.509 Name object
+ """
+ return x509.Name([
+ x509.NameAttribute(NameOID.COUNTRY_NAME, country),
+ x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, state),
+ x509.NameAttribute(NameOID.LOCALITY_NAME, locality),
+ x509.NameAttribute(NameOID.ORGANIZATION_NAME, organization),
+ x509.NameAttribute(NameOID.COMMON_NAME, common_name),
+ ])
+
+ def _create_certificate(
+ self,
+ private_key: rsa.RSAPrivateKey,
+ subject_name: x509.Name,
+ dns_names: Optional[list] = None
+ ) -> x509.Certificate:
+ """
+ Create X.509 certificate.
+
+ Args:
+ private_key: Private key for signing
+ subject_name: Subject name for certificate
+ dns_names: List of DNS names for SAN extension
+
+ Returns:
+ X.509 certificate
+ """
+ if dns_names is None:
+ dns_names = ["localhost"]
+
+ now = datetime.datetime.now(datetime.timezone.utc)
+ cert_builder = (
+ x509.CertificateBuilder()
+ .subject_name(subject_name)
+ .issuer_name(subject_name) # Self-signed
+ .public_key(private_key.public_key())
+ .serial_number(x509.random_serial_number())
+ .not_valid_before(now)
+ .not_valid_after(now + datetime.timedelta(days=self.validity_days))
+ )
+
+ # Add Subject Alternative Name extension
+ san_list = [x509.DNSName(name) for name in dns_names]
+ cert_builder = cert_builder.add_extension(
+ x509.SubjectAlternativeName(san_list),
+ critical=False,
+ )
+
+ return cert_builder.sign(private_key, hashes.SHA256())
+
+ def files_exist(self, cert_path: str, key_path: str) -> bool:
+ """
+ Check if certificate and key files already exist.
+
+ Args:
+ cert_path: Path to certificate file
+ key_path: Path to private key file
+
+ Returns:
+ True if both files exist, False otherwise
+ """
+ cert_file = Path(cert_path)
+ key_file = Path(key_path)
+ return cert_file.exists() and key_file.exists()
+
+ def _validate_certificates( # pylint: disable=too-many-locals
+ self, cert_path: str, key_path: str) -> Tuple[bool, str]:
+ """
+ Validate existing certificate and key files.
+
+ Args:
+ cert_path: Path to certificate file
+ key_path: Path to private key file
+
+ Returns:
+ Tuple of (is_valid: bool, reason: str)
+ """
+ try:
+ # Load certificate
+ with open(cert_path, "rb") as cert_file:
+ cert_data = cert_file.read()
+ certificate = x509.load_pem_x509_certificate(cert_data)
+
+ # Load private key
+ with open(key_path, "rb") as key_file:
+ key_data = key_file.read()
+ private_key = serialization.load_pem_private_key(
+ key_data, password=None)
+
+ # Check if certificate has expired
+ now = datetime.datetime.now(datetime.timezone.utc)
+ if certificate.not_valid_after_utc < now:
+ return False, "Certificate has expired"
+
+ # Check if certificate is not yet valid
+ if certificate.not_valid_before_utc > now:
+ return False, "Certificate is not yet valid"
+
+ # Check if certificate is expiring soon (within 30 days)
+ expires_soon = now + datetime.timedelta(days=30)
+ if certificate.not_valid_after_utc < expires_soon:
+ days_left = (certificate.not_valid_after_utc - now).days
+ return False, f"Certificate expires in {days_left} days"
+
+ # Verify that the private key matches the certificate
+ cert_public_key = certificate.public_key()
+ private_public_key = private_key.public_key()
+
+ # Compare key sizes and public numbers for RSA keys
+ if (hasattr(cert_public_key, 'key_size') and
+ hasattr(private_public_key, 'key_size')):
+ if cert_public_key.key_size != private_public_key.key_size:
+ return False, "Certificate and private key do not match"
+
+ cert_numbers = cert_public_key.public_numbers()
+ private_numbers = private_public_key.public_numbers()
+ if (cert_numbers.n != private_numbers.n or
+ cert_numbers.e != private_numbers.e):
+ return False, "Certificate and private key do not match"
+
+ return True, "Certificates are valid"
+
+ except FileNotFoundError as exc:
+ return False, f"Certificate file not found: {exc}"
+ except Exception as exc: # pylint: disable=broad-except
+ return False, f"Certificate validation error: {exc}"
+
+ def _ensure_directory_exists(self, file_path: str) -> None:
+ """
+ Ensure the directory for the file path exists.
+
+ Args:
+ file_path: Full path to file
+ """
+ directory = Path(file_path).parent
+ directory.mkdir(parents=True, exist_ok=True)
+
+ def _write_private_key(
+ self,
+ private_key: rsa.RSAPrivateKey,
+ key_path: str
+ ) -> None:
+ """
+ Write private key to file.
+
+ Args:
+ private_key: Private key to write
+ key_path: Path to write key file
+ """
+ self._ensure_directory_exists(key_path)
+
+ key_bytes = private_key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption(),
+ )
+
+ with open(key_path, "wb") as key_file:
+ key_file.write(key_bytes)
+
+ def _write_certificate(
+ self, certificate: x509.Certificate, cert_path: str) -> None:
+ """
+ Write certificate to file.
+
+ Args:
+ certificate: Certificate to write
+ cert_path: Path to write certificate file
+ """
+ self._ensure_directory_exists(cert_path)
+
+ cert_bytes = certificate.public_bytes(serialization.Encoding.PEM)
+
+ with open(cert_path, "wb") as cert_file:
+ cert_file.write(cert_bytes)
+
+ def generate_certificate_pair(
+ self,
+ cert_path: str = "certs/cert.pem",
+ key_path: str = "certs/key.pem",
+ force_overwrite: bool = False,
+ **name_kwargs
+ ) -> Tuple[bool, str]:
+ """
+ Generate certificate and private key pair.
+
+ Args:
+ cert_path: Path for certificate file
+ key_path: Path for private key file
+ force_overwrite: Whether to overwrite existing files
+ **name_kwargs: Additional arguments for certificate name
+
+ Returns:
+ Tuple of (success: bool, message: str)
+ """
+ try:
+ # Check if files already exist
+ if self.files_exist(cert_path, key_path) and not force_overwrite:
+ return False, f"Files already exist: {cert_path}, {key_path}. Use force_overwrite=True to overwrite." # pylint: disable=line-too-long
+
+ # Generate private key
+ private_key = self._generate_private_key()
+
+ # Build certificate name
+ subject_name = self._build_certificate_name(**name_kwargs)
+
+ # Create certificate
+ certificate = self._create_certificate(private_key, subject_name)
+
+ # Write files
+ self._write_private_key(private_key, key_path)
+ self._write_certificate(certificate, cert_path)
+
+ return True, f"Certificate and key generated successfully: {cert_path}, {key_path}"
+
+ except Exception as exc: # pylint: disable=broad-except
+ return False, f"Error generating certificate: {str(exc)}"
+
+
+def ensure_certificates(
+ cert_path: str = "certs/cert.pem",
+ key_path: str = "certs/key.pem",
+ validity_days: int = 365,
+ force_overwrite: bool = False,
+ **name_kwargs
+) -> None:
+ """
+ Ensure SSL certificates exist, generate them if missing.
+
+ This function handles all certificate logic internally and raises
+ exceptions on failure, making it simple to use from other modules.
+
+ Args:
+ cert_path: Path for certificate file
+ key_path: Path for private key file
+ validity_days: Certificate validity in days
+ force_overwrite: Whether to overwrite existing files
+ **name_kwargs: Additional arguments for certificate name (country, state, etc.)
+
+ Raises:
+ RuntimeError: If certificate generation fails
+ """
+
+ generator = CertificateGenerator(validity_days=validity_days)
+
+ # Check if certificates already exist
+ if generator.files_exist(cert_path, key_path):
+ # Validate existing certificates
+ is_valid, reason = generator._validate_certificates( # pylint: disable=protected-access
+ cert_path, key_path)
+
+ if is_valid and not force_overwrite:
+ logger.info(
+ "SSL certificates are valid")
+ logger.debug(
+ "Certificate path: %s, Key path: %s", cert_path, key_path)
+ return
+ if not is_valid:
+ logger.error(
+ "Existing certificates are invalid (%s), regenerating...",
+ reason)
+ force_overwrite = True
+ if force_overwrite:
+ logger.debug(
+ "Force overwrite requested, regenerating certificates...")
+ else:
+ logger.debug("SSL certificates not found, generating new ones...")
+
+ # Generate certificates
+ success, message = generator.generate_certificate_pair(
+ cert_path=cert_path,
+ key_path=key_path,
+ force_overwrite=force_overwrite,
+ **name_kwargs
+ )
+
+ if success:
+ logger.info("SSL certificates generated successfully")
+ logger.debug("Certificate path: %s, Key path: %s", cert_path, key_path)
+ else:
+ logger.error("Failed to generate SSL certificates: %s", message)
+ raise RuntimeError(f"Certificate generation failed: {message}")
diff --git a/simulation_bridge/src/utils/config_loader.py b/simulation_bridge/src/utils/config_loader.py
new file mode 100644
index 0000000..6c11ec3
--- /dev/null
+++ b/simulation_bridge/src/utils/config_loader.py
@@ -0,0 +1,98 @@
+"""
+config_loader.py - Configuration loader utility
+
+This module provides functionality to load configuration from YAML files,
+with support for environment variable substitution and validation.
+"""
+
+import os
+import json
+from typing import Dict, Any, Optional, Union
+from pathlib import Path
+from importlib import resources
+import yaml
+from ..utils.logger import get_logger
+
+# Configure logger
+logger = get_logger()
+
+
+def load_config(
+ config_path: Optional[Union[str, Path]] = None) -> Dict[str, Any]:
+ """
+ Load configuration from a YAML file.
+
+ Args:
+ config_path: Path to the configuration file (optional, defaults to 'config/config.yaml')
+
+ Returns:
+ Dictionary containing the configuration
+
+ Raises:
+ FileNotFoundError: If the configuration file does not exist
+ yaml.YAMLError: If the YAML file is invalid
+ """
+ if config_path is None:
+ try:
+ logger.debug("Loading default configuration file")
+ with resources.open_text("simulation_bridge.config", "config.yaml") as f:
+ config = yaml.safe_load(f)
+ except FileNotFoundError as exc:
+ raise FileNotFoundError(
+ "Default configuration file not found inside the package."
+ ) from exc
+ else:
+ logger.debug("Loading configuration file from path: %s", config_path)
+ config_file: Path = Path(config_path)
+ if not config_file.exists():
+ raise FileNotFoundError(
+ f"Configuration file not found: {config_file}")
+ with open(config_file, 'r', encoding='utf-8') as f:
+ config = yaml.safe_load(f)
+ config = _substitute_env_vars(config)
+
+ return config
+
+
+def _substitute_env_vars(
+ config: Union[Dict[str, Any], list, str]
+) -> Union[Dict[str, Any], list, str]:
+ """
+ Recursively substitute environment variables in configuration values.
+ Environment variables should be in the format ${ENV_VAR} or ${ENV_VAR:default_value}
+
+ Args:
+ config: Configuration dictionary
+
+ Returns:
+ Configuration with environment variables substituted
+ """
+ if isinstance(config, dict):
+ return {k: _substitute_env_vars(v) for k, v in config.items()}
+ if isinstance(config, list):
+ return [_substitute_env_vars(item) for item in config]
+ if isinstance(config, str) and "${" in config and "}" in config:
+ start_idx: int = config.find("${")
+ end_idx: int = config.find("}", start_idx)
+ if start_idx != -1 and end_idx != -1:
+ env_var: str = config[start_idx + 2:end_idx]
+
+ if ":" in env_var:
+ env_name, default = env_var.split(":", 1)
+ else:
+ env_name, default = env_var, ""
+
+ env_value: str = os.environ.get(env_name, default)
+ return config[:start_idx] + env_value + config[end_idx + 1:]
+
+ return config
+
+
+def load_protocol_config() -> Dict[str, list]:
+ """
+ Load the protocol configuration from a JSON file.
+ """
+ config_file = Path(__file__).parent.parent / \
+ "protocol_adapters/adapters_signal.json"
+ with open(config_file, 'r', encoding='utf-8') as f:
+ return json.load(f)["protocols"]
diff --git a/simulation_bridge/src/utils/config_manager.py b/simulation_bridge/src/utils/config_manager.py
new file mode 100644
index 0000000..52b3e4b
--- /dev/null
+++ b/simulation_bridge/src/utils/config_manager.py
@@ -0,0 +1,284 @@
+"""
+config_manager.py - Configuration manager utility
+
+This module provides functionality to load and manage application configuration
+using Pydantic models for validation and nested structure.
+"""
+from enum import Enum
+from typing import Optional, Dict, Any, List
+from pathlib import Path
+from pydantic import BaseModel, ValidationError, ConfigDict
+
+from .logger import get_logger
+from .config_loader import load_config
+
+logger = get_logger()
+
+
+class LogLevel(str, Enum):
+ """Supported logging levels."""
+ DEBUG = "DEBUG"
+ INFO = "INFO"
+ WARNING = "WARNING"
+ ERROR = "ERROR"
+ CRITICAL = "CRITICAL"
+
+
+class ExchangeConfig(BaseModel):
+ """Configuration for RabbitMQ exchanges."""
+ name: str
+ type: str
+ durable: bool
+ auto_delete: bool
+ internal: bool
+
+
+class QueueConfig(BaseModel):
+ """Configuration for RabbitMQ queues."""
+ name: str
+ durable: bool
+ exclusive: bool
+ auto_delete: bool
+
+
+class BindingConfig(BaseModel):
+ """Configuration for RabbitMQ bindings."""
+ queue: str
+ exchange: str
+ routing_key: str
+
+
+class RabbitMQInfrastructure(BaseModel):
+ """Configuration for RabbitMQ infrastructure."""
+ exchanges: List[ExchangeConfig]
+ queues: List[QueueConfig]
+ bindings: List[BindingConfig]
+
+
+class RabbitMQConfig(BaseModel):
+ """Configuration for RabbitMQ connection."""
+ host: str
+ port: int
+ vhost: str
+ username: str
+ password: str
+ tls: bool
+ infrastructure: RabbitMQInfrastructure
+
+
+class MQTTConfig(BaseModel):
+ """Configuration for MQTT connection."""
+ host: str
+ port: int
+ keepalive: int
+ input_topic: str
+ output_topic: str
+ qos: int
+ username: str
+ password: str
+ tls: bool
+
+
+class RESTConfig(BaseModel):
+ """Configuration for REST API."""
+ host: str
+ port: int
+ endpoint: str
+ debug: bool
+ certfile: Optional[str]
+ keyfile: Optional[str]
+
+
+class LoggingConfig(BaseModel):
+ """Configuration for logging."""
+ level: LogLevel
+ format: str
+ file: str
+
+
+class SimulationBridgeConfig(BaseModel):
+ """Configuration for simulation bridge."""
+ bridge_id: str
+
+
+class Config(BaseModel):
+ """Main configuration model using Pydantic for validation."""
+ model_config = ConfigDict(extra='ignore')
+
+ simulation_bridge: SimulationBridgeConfig
+ rabbitmq: RabbitMQConfig
+ mqtt: MQTTConfig
+ rest: RESTConfig
+ logging: LoggingConfig
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Convert the model to a dictionary with nested structure."""
+ return {
+ 'simulation_bridge': self.simulation_bridge.model_dump(),
+ 'rabbitmq': self.rabbitmq.model_dump(),
+ 'mqtt': self.mqtt.model_dump(),
+ 'rest': self.rest.model_dump(),
+ 'logging': self.logging.model_dump()
+ }
+
+ @classmethod
+ def from_dict(cls, config_dict: Dict[str, Any]) -> 'Config':
+ """Create a Config instance from a nested dictionary.
+
+ This method correctly handles nested models by constructing each
+ sub-model separately before assembling the final Config object.
+ """
+ # Process nested dictionaries into their respective model types
+ sim_bridge_config = SimulationBridgeConfig(
+ **config_dict.get('simulation_bridge', {}))
+
+ # Process RabbitMQ configuration with its nested infrastructure
+ rabbitmq_dict = config_dict.get('rabbitmq', {})
+ infra_dict = rabbitmq_dict.get('infrastructure', {})
+
+ # Create infrastructure objects
+ infrastructure = RabbitMQInfrastructure(
+ exchanges=[ExchangeConfig(**exc)
+ for exc in infra_dict.get('exchanges', [])],
+ queues=[QueueConfig(**queue)
+ for queue in infra_dict.get('queues', [])],
+ bindings=[BindingConfig(**binding)
+ for binding in infra_dict.get('bindings', [])]
+ )
+
+ # Create RabbitMQ config with the infrastructure
+ rabbit_config = RabbitMQConfig(
+ host=rabbitmq_dict.get('host', 'localhost'),
+ port=rabbitmq_dict.get('port', 5672),
+ vhost=rabbitmq_dict.get('vhost', '/'),
+ username=rabbitmq_dict.get('username', 'guest'),
+ password=rabbitmq_dict.get('password', 'guest'),
+ tls=rabbitmq_dict.get('tls', False),
+ infrastructure=infrastructure
+ )
+
+ # Create remaining configs
+ mqtt_config = MQTTConfig(**config_dict.get('mqtt', {}))
+ rest_config = RESTConfig(**config_dict.get('rest', {}))
+ logging_config = LoggingConfig(**config_dict.get('logging', {}))
+
+ # Assemble and return the complete Config object
+ return cls(
+ simulation_bridge=sim_bridge_config,
+ rabbitmq=rabbit_config,
+ mqtt=mqtt_config,
+ rest=rest_config,
+ logging=logging_config
+ )
+
+
+class ConfigManager:
+ """
+ Manager for loading and providing access to application configuration.
+ """
+
+ def __init__(self, config_path: Optional[str] = None) -> None:
+ """
+ Initialize the configuration manager.
+
+ Args:
+ config_path (Optional[str]): Path to the configuration file.
+ If None, uses the default location.
+ """
+ self.config_path: Path = Path(config_path) if config_path else Path(
+ __file__).parent.parent.parent.parent / "config.yaml"
+ try:
+ raw_config = load_config(self.config_path)
+ self.config = self._validate_config(raw_config)
+ except (FileNotFoundError, ValidationError) as e:
+ logger.warning("Using defaults value: %s", str(e))
+ self.config = self.get_default_config()
+ except (IOError, PermissionError) as e:
+ logger.error("File access error: %s, using defaults.", str(e))
+ self.config = self.get_default_config()
+ except Exception as e: # pylint: disable=broad-exception-caught
+ logger.error("Unexpected error: %s, using defaults.", str(e))
+ logger.exception("Full traceback:")
+ self.config = self.get_default_config()
+
+ def _validate_config(self, config_data: Dict[str, Any]) -> Dict[str, Any]:
+ """Validate configuration using Pydantic model."""
+ try:
+ # Create Config instance from nested dictionary
+ config_instance = Config.from_dict(config_data)
+ # Convert back to nested dictionary format
+ validated_config = config_instance.to_dict()
+ logger.info("Configuration validated successfully.")
+ return validated_config
+ except ValidationError as e:
+ logger.error("Configuration validation failed: %s", str(e))
+ raise
+
+ def get_default_config(self) -> Dict[str, Any]:
+ """Get default configuration as dictionary."""
+ return Config(
+ simulation_bridge=SimulationBridgeConfig(
+ bridge_id="simulation_bridge"),
+ rabbitmq=RabbitMQConfig(
+ host="localhost",
+ port=5672,
+ vhost="/",
+ username="guest",
+ password="guest",
+ tls=False,
+ infrastructure=RabbitMQInfrastructure(
+ exchanges=[],
+ queues=[],
+ bindings=[]
+ )
+ ),
+ mqtt=MQTTConfig(
+ host="localhost",
+ port=1883,
+ keepalive=60,
+ input_topic="bridge/input",
+ output_topic="bridge/output",
+ qos=0,
+ username="guest",
+ password="guest",
+ tls=False,
+ ),
+ rest=RESTConfig(
+ host="0.0.0.0",
+ port=5000,
+ endpoint="/message",
+ debug=False,
+ certfile=None,
+ keyfile=None
+ ),
+ logging=LoggingConfig(
+ level=LogLevel.INFO,
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ file="logs/sim_bridge.log"
+ )
+ ).to_dict()
+
+ def get_config(self) -> Dict[str, Any]:
+ """
+ Get the loaded configuration.
+
+ Returns:
+ Dict[str, Any]: Configuration parameters
+ """
+ return self.config
+
+ def get_rabbitmq_config(self) -> Dict[str, Any]:
+ """Get RabbitMQ configuration."""
+ return self.config.get('rabbitmq', {})
+
+ def get_mqtt_config(self) -> Dict[str, Any]:
+ """Get MQTT configuration."""
+ return self.config.get('mqtt', {})
+
+ def get_rest_config(self) -> Dict[str, Any]:
+ """Get REST configuration."""
+ return self.config.get('rest', {})
+
+ def get_logging_config(self) -> Dict[str, Any]:
+ """Get logging configuration."""
+ return self.config.get('logging', {})
diff --git a/src/simulation_bridge/utils/logger.py b/simulation_bridge/src/utils/logger.py
similarity index 52%
rename from src/simulation_bridge/utils/logger.py
rename to simulation_bridge/src/utils/logger.py
index 742689f..ea6669c 100644
--- a/src/simulation_bridge/utils/logger.py
+++ b/simulation_bridge/src/utils/logger.py
@@ -1,39 +1,53 @@
-# utils/logger.py - Centralized logging system
+"""
+Logger configuration module for the Simulation Bridge.
+
+This module provides utilities for setting up and configuring logging functionality
+with the following features:
+- File logging with rotation capability
+- Console logging with color-coded output
+- Configurable log levels and formats
+
+The module includes two main functions:
+- setup_logger: Creates and configures a new logger instance
+- get_logger: Retrieves an existing logger instance
+"""
import logging
import sys
from logging.handlers import RotatingFileHandler
from pathlib import Path
-from typing import Optional
+import colorlog
DEFAULT_LOG_FORMAT: str = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
DEFAULT_LOG_LEVEL: int = logging.INFO
MAX_LOG_SIZE: int = 5 * 1024 * 1024 # 5 MB
BACKUP_COUNT: int = 3
+
def setup_logger(
- name: str = 'SIM-BRIDGE',
+ name: str = 'SIMULATION-BRIDGE',
level: int = DEFAULT_LOG_LEVEL,
log_format: str = DEFAULT_LOG_FORMAT,
- log_file: str = 'logs/sim-bridge.log',
+ log_file: str = 'logs/sim_bridge.log',
enable_console: bool = True
) -> logging.Logger:
"""
- Configures a logger with handlers for file and console.
-
+ Configures a logger with handlers for file and console, with
+ optional colorization for console logs.
+
Args:
name: Name of the logger
level: Logging level
log_format: Format of the log messages
log_file: Path to the log file
- enable_console: Enables logging to the console
-
+ enable_console: Enables logging to the console with optional color
+
Returns:
Configured logger instance
"""
logger: logging.Logger = logging.getLogger(name)
logger.setLevel(level)
- # If the logger already has handlers, return it
+ # If the logger already has handlers, return it as is
if logger.handlers:
return logger
@@ -53,25 +67,41 @@ def setup_logger(
file_handler.setFormatter(file_formatter)
logger.addHandler(file_handler)
- # Configure console handler if enabled
+ # Configure console handler with color if enabled
if enable_console:
- console_handler: logging.StreamHandler = logging.StreamHandler(sys.stdout)
+ # Create a ColorFormatter for console logs
+ console_handler: logging.StreamHandler = logging.StreamHandler(
+ sys.stdout)
console_handler.setLevel(level)
- console_formatter: logging.Formatter = logging.Formatter(log_format)
- console_handler.setFormatter(console_formatter)
+
+ # Define a colorized log format for console output
+ console_format = '%(log_color)s%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+ color_formatter = colorlog.ColoredFormatter(
+ console_format,
+ datefmt='%Y-%m-%d %H:%M:%S',
+ log_colors={
+ 'DEBUG': 'cyan',
+ 'INFO': 'green',
+ 'WARNING': 'yellow',
+ 'ERROR': 'red',
+ 'CRITICAL': 'bold_red',
+ }
+ )
+
+ console_handler.setFormatter(color_formatter)
logger.addHandler(console_handler)
return logger
-def get_logger(name: str = 'SIM-BRIDGE') -> logging.Logger:
+
+def get_logger(name: str = 'SIMULATION-BRIDGE') -> logging.Logger:
"""
Returns an instance of the already configured logger.
-
+
Args:
name: Name of the logger
-
+
Returns:
Logger instance
"""
return logging.getLogger(name)
-
\ No newline at end of file
diff --git a/simulation_bridge/src/utils/signal_manager.py b/simulation_bridge/src/utils/signal_manager.py
new file mode 100644
index 0000000..939445e
--- /dev/null
+++ b/simulation_bridge/src/utils/signal_manager.py
@@ -0,0 +1,137 @@
+"""
+Signal Manager for the Simulation Bridge.
+This module provides a signal management system that handles the registration and
+automatic subscription of signals for different protocols in the simulation bridge.
+"""
+from typing import Callable, Dict, List
+from blinker import signal
+from .logger import get_logger
+from .config_loader import load_protocol_config
+
+logger = get_logger()
+
+
+class SignalManager:
+ """Manages signal registration and automatic subscription for protocols."""
+
+ PROTOCOL_CONFIG = load_protocol_config()
+ _bridge_core_instance = None
+ _adapter_instances: Dict[str, object] = {}
+
+ @classmethod
+ def set_bridge_core(cls, bridge_core_instance):
+ """Store the BridgeCore instance for use in signals."""
+ cls._bridge_core_instance = bridge_core_instance
+
+ @classmethod
+ def register_adapter_instance(
+ cls, protocol: str, adapter_instance: object):
+ """Store adapter instances to bind their methods to signals."""
+ cls._adapter_instances[protocol] = adapter_instance
+
+ @classmethod
+ def get_available_signals(cls, protocol: str) -> List[str]:
+ """Return the list of signals available for a given protocol."""
+ protocol_data = cls.PROTOCOL_CONFIG.get(protocol)
+ return list(protocol_data.get("signals", {}).keys()
+ ) if protocol_data else []
+
+ @classmethod
+ def get_enabled_protocols(cls) -> List[str]:
+ """Return the list of enabled protocols."""
+ enabled_protocols = []
+ for protocol, protocol_data in cls.PROTOCOL_CONFIG.items():
+ if protocol_data.get(
+ "enabled", True): # Default to True for backward compatibility
+ enabled_protocols.append(protocol)
+ return enabled_protocols
+
+ @classmethod
+ def is_protocol_enabled(cls, protocol: str) -> bool:
+ """Check if a protocol is enabled."""
+ protocol_data = cls.PROTOCOL_CONFIG.get(protocol)
+ if not protocol_data:
+ return False
+ # Default to True for backward compatibility
+ return protocol_data.get("enabled", True)
+
+ @classmethod
+ def connect_all_signals(cls):
+ """Auto-connect all signals to the appropriate functions for enabled protocols only."""
+ for protocol, protocol_data in cls.PROTOCOL_CONFIG.items():
+ # Skip disabled protocols
+ if not protocol_data.get("enabled", True):
+ logger.debug(
+ "Skipping signals for disabled protocol: %s",
+ protocol)
+ continue
+
+ for sig_name, func_path in protocol_data.get(
+ "signals", {}).items():
+ callback = cls._resolve_callback(func_path, protocol)
+ if not callback:
+ logger.warning(
+ "Skipping signal '%s': callback not found", sig_name)
+ continue
+ try:
+ signal(sig_name).connect(callback)
+ logger.debug(
+ "Connected signal '%s' to '%s' for protocol '%s'",
+ sig_name,
+ func_path,
+ protocol)
+ except Exception as e: # pylint: disable=broad-exception-caught
+ logger.error(
+ "Failed to connect signal '%s': %s", sig_name, e)
+
+ @classmethod
+ def _resolve_callback(cls, func_path: str, protocol: str) -> Callable:
+ """Resolve a callback function given its string path."""
+ if "." not in func_path:
+ return None
+
+ class_or_module, func_name = func_path.rsplit(".", 1)
+
+ if class_or_module == "BridgeCore":
+ if not cls._bridge_core_instance:
+ logger.error(
+ "BridgeCore instance not set but required for signal binding")
+ return None
+ return getattr(cls._bridge_core_instance, func_name, None)
+
+ adapter_instance = cls._adapter_instances.get(protocol)
+ if adapter_instance:
+ return getattr(adapter_instance, func_name, None)
+
+ logger.warning(
+ "No adapter instance registered for protocol '%s'",
+ protocol)
+ return None
+
+ @classmethod
+ def disconnect_all_signals(cls):
+ """Disconnect all signals from their connected callbacks for enabled protocols only."""
+ for protocol, protocol_data in cls.PROTOCOL_CONFIG.items():
+ # Skip disabled protocols (they shouldn't have connected signals
+ # anyway)
+ if not protocol_data.get("enabled", True):
+ logger.debug(
+ "Skipping signal disconnection for disabled protocol: %s",
+ protocol)
+ continue
+
+ for sig_name, func_path in protocol_data.get(
+ "signals", {}).items():
+ callback = cls._resolve_callback(func_path, protocol)
+ if not callback:
+ logger.warning(
+ "Skipping disconnect of signal '%s': callback not found",
+ sig_name)
+ continue
+ try:
+ signal(sig_name).disconnect(callback)
+ logger.debug("Disconnected signal '%s' from '%s' for protocol '%s'",
+ sig_name, func_path, protocol)
+ except Exception as e: # pylint: disable=broad-exception-caught
+ logger.error("Failed to disconnect signal '%s': %s", sig_name,
+ e)
diff --git a/simulation_bridge/src/utils/template.py b/simulation_bridge/src/utils/template.py
new file mode 100644
index 0000000..d2cd558
--- /dev/null
+++ b/simulation_bridge/src/utils/template.py
@@ -0,0 +1,213 @@
+"""
+Utility functions for generating default project files and configuration.
+"""
+
+import os
+
+try:
+ from importlib.resources import files # only available in Python 3.9+
+ pkg_resources = None # pylint: disable=invalid-name
+except ImportError:
+ files = None
+ import pkg_resources
+
+CONFIG_FILENAME = 'config.yaml'
+CONFIG_TEMPLATE_FILENAME = 'config.yaml.template'
+CONFIG_PATH = 'simulation_bridge.config'
+
+
+def copy_resource(package, resource, target_path):
+ """Copy a resource file to the given target path."""
+ try:
+ if files:
+ template_path = files(package).joinpath(resource)
+ with open(template_path, 'rb') as src, open(target_path, 'wb') as dst:
+ dst.write(src.read())
+ else:
+ template_content = pkg_resources.resource_string(package, resource)
+ with open(target_path, 'wb') as dst:
+ dst.write(template_content)
+ return True
+ except (FileNotFoundError, OSError, IOError):
+ return False
+
+
+def create_directory(file_path, full_path):
+ """Create a directory and return status."""
+ if os.path.exists(full_path):
+ print(f"Directory already exists: {file_path}")
+ return 'skipped', None
+ try:
+ os.makedirs(full_path, exist_ok=True)
+ print(f"✓ Created directory: {file_path}")
+ return 'created', None
+ except OSError as error:
+ error_msg = f"Failed to create directory {file_path}: {error}"
+ print(f"✗ {error_msg}")
+ return 'error', error_msg
+
+
+def create_file(file_path, full_path, package, resource):
+ """Create a file and return status."""
+ if os.path.exists(full_path):
+ print(f"File already exists (skipping): {file_path}")
+ return 'skipped', None
+
+ parent_dir = os.path.dirname(full_path)
+ if parent_dir and not os.path.exists(parent_dir):
+ try:
+ os.makedirs(parent_dir, exist_ok=True)
+ except OSError as error:
+ error_msg = f"Failed to create parent directory for {file_path}: {error}"
+ print(f"✗ {error_msg}")
+ return 'error', error_msg
+
+ if copy_resource(package, resource, full_path):
+ print(f"✓ Created file: {file_path}")
+ return 'created', None
+
+ error_msg = f"Failed to create {file_path} from {package}/{resource}"
+ print(f"✗ {error_msg}")
+ return 'error', error_msg
+
+
+def print_summary(created, skipped, errors, descriptions):
+ """Print a summary of the project generation results."""
+ print("-" * 50)
+ print("Project generation summary:")
+ print(f"✓ Created: {len(created)} files/directories")
+ print(f"⏭ Skipped: {len(skipped)} files/directories (already exist)")
+ print(f"✗ Errors: {len(errors)} files/directories")
+
+ if created:
+ print("\nCreated files and directories:")
+ for item in created:
+ description = descriptions.get(item, "")
+ print(f" • {item} - {description}")
+
+ if skipped:
+ print("\nSkipped files and directories:")
+ for item in skipped:
+ description = descriptions.get(item, "")
+ print(f" • {item} - {description}")
+
+ if errors:
+ print("\nErrors encountered:")
+ for error in errors:
+ print(f" • {error}")
+
+ print("\nProject structure generated successfully!")
+
+
+def get_files_to_generate():
+ """Return the dictionary of files to generate."""
+ return {
+ CONFIG_FILENAME: (CONFIG_PATH, CONFIG_TEMPLATE_FILENAME),
+ 'client/simulation.yaml': ('simulation_bridge.resources',
+ 'simulation.yaml.template'),
+ 'client/mqtt/mqtt_client.py': ('simulation_bridge.resources.mqtt',
+ 'mqtt_client.py'),
+ 'client/mqtt/mqtt_use.yaml': ('simulation_bridge.resources.mqtt',
+ 'mqtt_use.yaml.template'),
+ 'client/mqtt/requirements.txt': ('simulation_bridge.resources.mqtt',
+ 'requirements.txt'),
+ 'client/rabbitmq/rabbitmq_client.py': ('simulation_bridge.resources.rabbitmq',
+ 'rabbitmq_client.py'),
+ 'client/rabbitmq/rabbitmq_use.yaml': ('simulation_bridge.resources.rabbitmq',
+ 'rabbitmq_use.yaml.template'),
+ 'client/rabbitmq/requirements.txt': ('simulation_bridge.resources.rabbitmq',
+ 'requirements.txt'),
+ 'client/rest/rest_client.py': ('simulation_bridge.resources.rest',
+ 'rest_client.py'),
+ 'client/rest/rest_use.yaml': ('simulation_bridge.resources.rest',
+ 'rest_use.yaml.template'),
+ 'client/rest/requirements.txt': ('simulation_bridge.resources.rest',
+ 'requirements.txt'),
+ 'client/README.md': ('simulation_bridge.resources',
+ 'README.md'),
+ }
+
+
+def get_file_descriptions():
+ """Return the dictionary of file descriptions."""
+ return {
+ CONFIG_FILENAME: "Main configuration file for the simulation bridge",
+ 'client/simulation.yaml': "Example payload for simulation requests",
+ 'client/mqtt/mqtt_client.py': "MQTT protocol client implementation",
+ 'client/mqtt/mqtt_use.yaml': "MQTT usage configuration (example)",
+ 'client/mqtt/requirements.txt': "MQTT protocol client requirements",
+ 'client/rabbitmq/rabbitmq_client.py': "RabbitMQ protocol client implementation",
+ 'client/rabbitmq/rabbitmq_use.yaml': "RabbitMQ usage configuration (example)",
+ 'client/rabbitmq/requirements.txt': "RabbitMQ protocol client requirements",
+ 'client/rest/rest_client.py': "REST protocol client implementation",
+ 'client/rest/rest_use.yaml': "REST usage configuration (example)",
+ 'client/rest/requirements.txt': "REST protocol client requirements",
+ 'client/README.md': "Documentation for the Clients",
+ }
+
+
+def copy_config_template(config_path):
+ """Copy configuration template using available import method."""
+ try:
+ if files:
+ template_path = files(CONFIG_PATH).joinpath(
+ CONFIG_TEMPLATE_FILENAME)
+ with open(template_path, 'rb') as src, open(config_path, 'wb') as dst:
+ dst.write(src.read())
+ else:
+ template_content = pkg_resources.resource_string(
+ CONFIG_PATH, CONFIG_TEMPLATE_FILENAME)
+ with open(config_path, 'wb') as dst:
+ dst.write(template_content)
+ except (ImportError, AttributeError, FileNotFoundError) as e:
+ raise FileNotFoundError(
+ f"Template configuration file not found: {e}") from e
+
+
+def generate_default_config():
+ """Copy the template configuration file to the current directory if not already present."""
+ config_path = os.path.join(os.getcwd(), CONFIG_FILENAME)
+ if os.path.exists(config_path):
+ print(f"File already exists at path: {config_path}")
+ return
+
+ try:
+ copy_config_template(config_path)
+ print(f"Configuration template copied to: {config_path}")
+ except FileNotFoundError:
+ print("Error: Template configuration file not found.")
+ except (OSError, IOError) as e:
+ print(f"Error generating configuration file: {e}")
+
+
+def generate_default_project():
+ """Generate default project files and directories in the current directory."""
+ files_to_generate = get_files_to_generate()
+ file_descriptions = get_file_descriptions()
+ current_dir = os.getcwd()
+
+ created_files = []
+ skipped_files = []
+ errors = []
+
+ print("Generating default project structure...")
+ print(f"Target directory: {current_dir}")
+ print("-" * 50)
+
+ for file_path, (package, resource) in files_to_generate.items():
+ full_path = os.path.join(current_dir, file_path)
+
+ if file_path.endswith('/'):
+ status, error = create_directory(file_path, full_path)
+ else:
+ status, error = create_file(
+ file_path, full_path, package, resource)
+
+ if status == 'created':
+ created_files.append(file_path)
+ elif status == 'skipped':
+ skipped_files.append(file_path)
+ elif status == 'error' and error:
+ errors.append(error)
+
+ print_summary(created_files, skipped_files, errors, file_descriptions)
diff --git a/simulation_bridge/test/__init__.py b/simulation_bridge/test/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/test/integration/__init__.py b/simulation_bridge/test/integration/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/test/integration/test_integration.py b/simulation_bridge/test/integration/test_integration.py
new file mode 100644
index 0000000..aabfe82
--- /dev/null
+++ b/simulation_bridge/test/integration/test_integration.py
@@ -0,0 +1,797 @@
+
+"""Integration tests for simulation bridge components."""
+import logging
+from unittest.mock import patch, MagicMock
+import datetime
+
+import pytest
+from cryptography import x509
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import rsa
+from cryptography.x509.oid import NameOID
+
+from simulation_bridge.src.core.bridge_orchestrator import BridgeOrchestrator
+from simulation_bridge.src.core.bridge_core import BridgeCore
+from simulation_bridge.src.core.bridge_infrastructure import RabbitMQInfrastructure
+from simulation_bridge.src.utils.signal_manager import SignalManager
+from simulation_bridge.src.utils.certs import CertificateGenerator, ensure_certificates
+
+# pylint: disable=too-many-arguments, unused-argument, protected-access, import-outside-toplevel
+# pylint: disable=redefined-outer-name, attribute-defined-outside-init, reimported
+
+
+@pytest.fixture
+def mock_config_manager():
+ """Mock configuration manager fixture."""
+ mock = MagicMock()
+ mock.get_config.return_value = {
+ 'simulation_bridge': {'bridge_id': 'test-bridge'},
+ 'rabbitmq': {
+ 'host': 'localhost',
+ 'port': 5672,
+ 'username': 'guest',
+ 'password': 'guest',
+ 'vhost': '/',
+ 'infrastructure': {
+ 'exchanges': [],
+ 'queues': [],
+ 'bindings': []
+ }
+ }
+ }
+ mock.get_rabbitmq_config.return_value = mock.get_config.return_value['rabbitmq']
+ return mock
+
+
+def test_bridge_infrastructure_setup(mock_config_manager):
+ """Test bridge infrastructure setup."""
+ with patch("pika.BlockingConnection"):
+ infra = RabbitMQInfrastructure(mock_config_manager)
+ with patch.object(infra, "_setup_exchanges") as mock_ex, \
+ patch.object(infra, "_setup_queues") as mock_qu, \
+ patch.object(infra, "_setup_bindings") as mock_bi, \
+ patch.object(infra.connection, "close"):
+ infra.setup()
+ mock_ex.assert_called_once()
+ mock_qu.assert_called_once()
+ mock_bi.assert_called_once()
+
+
+def test_bridge_infrastructure_setup_exception(mock_config_manager):
+ """Test bridge infrastructure setup with exception."""
+ with patch("pika.BlockingConnection"):
+ infra = RabbitMQInfrastructure(mock_config_manager)
+ with patch.object(infra, "_setup_exchanges", side_effect=Exception("fail")), \
+ patch.object(infra.connection, "close"):
+ with pytest.raises(Exception):
+ infra.setup()
+
+
+def test_bridge_infrastructure_reconnect(mock_config_manager):
+ """Test bridge infrastructure reconnection."""
+ with patch("pika.BlockingConnection"):
+ infra = RabbitMQInfrastructure(mock_config_manager)
+ infra.connection.is_closed = True
+ with patch("pika.BlockingConnection"):
+ conn = infra.reconnect()
+ assert conn is not None
+
+
+def test_bridge_core_init(mock_config_manager):
+ """Test bridge core initialization."""
+ adapters = {}
+ with patch("pika.BlockingConnection"):
+ core = BridgeCore(mock_config_manager, adapters)
+ assert core.adapters == adapters
+ assert core.channel is not None
+
+
+def test_bridge_core_handle_input_message_valid(
+ monkeypatch, mock_config_manager):
+ """Test bridge core handling valid input message."""
+ adapters = {}
+ with patch("pika.BlockingConnection"):
+ core = BridgeCore(mock_config_manager, adapters)
+ # Patch _publish_message to avoid side effects
+ monkeypatch.setattr(core, "_publish_message", lambda *a, **kw: None)
+ valid_message = {
+ 'simulation': {
+ 'request_id': '1',
+ 'client_id': 'c',
+ 'simulator': 'sim',
+ 'type': 't',
+ 'file': 'f',
+ 'inputs': {},
+ 'outputs': {}
+ }
+ }
+ core.handle_input_message(
+ None,
+ message=valid_message,
+ producer='p',
+ consumer='c',
+ protocol='rabbitmq')
+
+
+def test_bridge_core_handle_input_message_invalid(
+ monkeypatch, mock_config_manager):
+ """Test bridge core handling invalid input message."""
+ adapters = {}
+ with patch("pika.BlockingConnection"):
+ core = BridgeCore(mock_config_manager, adapters)
+ monkeypatch.setattr(core, "_publish_message", lambda *a, **kw: None)
+ # message missing 'simulation'
+ core.handle_input_message(
+ None,
+ message={},
+ producer='p',
+ consumer='c',
+ protocol='rabbitmq')
+
+
+def test_bridge_core_handle_result_rabbitmq_message(
+ monkeypatch, mock_config_manager):
+ """Test bridge core handling result RabbitMQ message."""
+ adapters = {}
+ with patch("pika.BlockingConnection"):
+ core = BridgeCore(mock_config_manager, adapters)
+ monkeypatch.setattr(core, "_publish_message", lambda *a, **kw: None)
+ core.handle_result_rabbitmq_message(None, message={'source': 'src'})
+
+
+def test_bridge_core_handle_result_unknown_message(mock_config_manager):
+ """Test bridge core handling unknown result message."""
+ adapters = {}
+ with patch("pika.BlockingConnection"):
+ core = BridgeCore(mock_config_manager, adapters)
+ # Pass the 'error' key as well
+ core.handle_result_unknown_message(
+ None, message={'foo': 'bar', 'error': 'some error'})
+
+
+def test_bridge_orchestrator_setup_interfaces_enabled(mock_config_manager):
+ """Test bridge orchestrator setup interfaces when enabled."""
+ mock_infra_path = "simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure"
+ mock_signal_path = "simulation_bridge.src.core.bridge_orchestrator.SignalManager"
+ mock_core_path = "simulation_bridge.src.core.bridge_orchestrator.BridgeCore"
+ mock_cert_path = "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates"
+ mock_proto_path = "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config"
+
+ with patch(mock_infra_path) as mock_infra, \
+ patch(mock_signal_path) as mock_signal_manager, \
+ patch(mock_core_path) as mock_bridge_core, \
+ patch(mock_cert_path), \
+ patch(mock_proto_path) as mock_proto_conf:
+
+ mock_proto_conf.return_value = {
+ "mqtt": {"class": "mqtt_adapter.MQTTAdapter", "enabled": True, "signals": {}}
+ }
+ mock_signal_manager.get_enabled_protocols.return_value = ["mqtt"]
+ mock_signal_manager.register_adapter_instance.return_value = None
+ mock_signal_manager.set_bridge_core.return_value = None
+ mock_signal_manager.connect_all_signals.return_value = None
+
+ with patch("importlib.import_module") as mock_import:
+ mock_adapter_class = MagicMock()
+ mock_import.return_value = MagicMock(MQTTAdapter=mock_adapter_class)
+ orchestrator = BridgeOrchestrator()
+ orchestrator.config_manager = mock_config_manager
+ orchestrator.protocol_config = mock_proto_conf.return_value
+ orchestrator.adapter_classes = {"mqtt": mock_adapter_class}
+ orchestrator.setup_interfaces()
+ mock_infra.assert_called_once()
+ mock_bridge_core.assert_called_once()
+
+
+def test_bridge_orchestrator_setup_interfaces_no_enabled(mock_config_manager):
+ """Test bridge orchestrator setup interfaces when none enabled."""
+ mock_infra_path = "simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure"
+ mock_signal_path = "simulation_bridge.src.core.bridge_orchestrator.SignalManager"
+ mock_core_path = "simulation_bridge.src.core.bridge_orchestrator.BridgeCore"
+ mock_cert_path = "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates"
+ mock_proto_path = "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config"
+
+ with patch(mock_infra_path) as mock_infra, \
+ patch(mock_signal_path) as mock_signal_manager, \
+ patch(mock_core_path) as mock_bridge_core, \
+ patch(mock_cert_path), \
+ patch(mock_proto_path) as mock_proto_conf:
+
+ mock_proto_conf.return_value = {
+ "mqtt": {"class": "mqtt_adapter.MQTTAdapter", "enabled": False, "signals": {}}
+ }
+ mock_signal_manager.get_enabled_protocols.return_value = []
+ with patch("importlib.import_module"):
+ orchestrator = BridgeOrchestrator()
+ orchestrator.config_manager = mock_config_manager
+ orchestrator.protocol_config = mock_proto_conf.return_value
+ orchestrator.adapter_classes = {}
+ orchestrator.setup_interfaces()
+ mock_infra.assert_called_once()
+ mock_bridge_core.assert_called_once_with(mock_config_manager, {})
+
+
+def test_bridge_orchestrator_setup_interfaces_exception(mock_config_manager):
+ """Test bridge orchestrator setup interfaces with exception."""
+ mock_infra_path = "simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure"
+ mock_cert_path = "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates"
+ mock_proto_path = "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config"
+
+ with patch(mock_infra_path, side_effect=Exception("fail")), \
+ patch(mock_cert_path), \
+ patch(mock_proto_path) as mock_proto_conf:
+ mock_proto_conf.return_value = {}
+ orchestrator = BridgeOrchestrator()
+ orchestrator.config_manager = mock_config_manager
+ orchestrator.protocol_config = {}
+ orchestrator.adapter_classes = {}
+ with pytest.raises(Exception):
+ orchestrator.setup_interfaces()
+
+
+def test_bridge_orchestrator_signal_manager_integration(mock_config_manager):
+ """Test bridge orchestrator signal manager integration."""
+ mock_infra_path = "simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure"
+ mock_signal_path = "simulation_bridge.src.core.bridge_orchestrator.SignalManager"
+ mock_core_path = "simulation_bridge.src.core.bridge_orchestrator.BridgeCore"
+ mock_cert_path = "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates"
+ mock_proto_path = "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config"
+
+ # Patch all external dependencies and protocol adapters
+ with patch(mock_infra_path), \
+ patch(mock_signal_path) as mock_signal_manager, \
+ patch(mock_core_path), \
+ patch(mock_cert_path), \
+ patch(mock_proto_path) as mock_proto_conf:
+
+ # Simulate protocol configuration with signals
+ mock_proto_conf.return_value = {
+ "mqtt": {
+ "class": "mqtt_adapter.MQTTAdapter",
+ "enabled": True,
+ "signals": {
+ "on_message": "MQTTAdapter.on_message"
+ }
+ }
+ }
+ mock_signal_manager.get_enabled_protocols.return_value = ["mqtt"]
+ mock_signal_manager.register_adapter_instance.return_value = None
+ mock_signal_manager.set_bridge_core.return_value = None
+ mock_signal_manager.connect_all_signals.return_value = None
+
+ # Simulate importlib for the adapter
+ with patch("importlib.import_module") as mock_import:
+ mock_adapter_class = MagicMock()
+ mock_import.return_value = MagicMock(MQTTAdapter=mock_adapter_class)
+ orchestrator = BridgeOrchestrator()
+ orchestrator.config_manager = mock_config_manager
+ orchestrator.protocol_config = mock_proto_conf.return_value
+ orchestrator.adapter_classes = {"mqtt": mock_adapter_class}
+ orchestrator.setup_interfaces()
+
+ # Verify that SignalManager was used to register and connect signals
+ mock_signal_manager.register_adapter_instance.assert_called_with(
+ "mqtt", mock_adapter_class(mock_config_manager))
+ mock_signal_manager.set_bridge_core.assert_called()
+ mock_signal_manager.connect_all_signals.assert_called()
+
+
+def test_signal_manager_connect_and_disconnect(monkeypatch):
+ """Test signal manager connect and disconnect."""
+ # Use real SignalManager, patch logger and config
+ class DummyAdapter:
+ """Dummy adapter for testing."""
+
+ def on_message(self):
+ """Dummy message handler."""
+
+ dummy_adapter = DummyAdapter()
+ protocol = "dummy"
+ func_path = "DummyAdapter.on_message"
+
+ monkeypatch.setattr(SignalManager, "PROTOCOL_CONFIG", {
+ "dummy": {
+ "enabled": True,
+ "signals": {"on_message": func_path}
+ }
+ })
+ SignalManager.register_adapter_instance(protocol, dummy_adapter)
+
+ # Patch _resolve_callback to return the correct method
+ monkeypatch.setattr(
+ SignalManager,
+ "_resolve_callback",
+ lambda func_path, protocol: dummy_adapter.on_message)
+
+ # Connect and disconnect should not raise exceptions
+ SignalManager.connect_all_signals()
+ SignalManager.disconnect_all_signals()
+
+
+def test_bridge_orchestrator_init_calls_ensure_certificates(monkeypatch):
+ """Test bridge orchestrator init calls ensure certificates."""
+ called = {}
+
+ def fake_ensure_certificates(**kwargs): # pylint: disable=unused-argument
+ called['called'] = True
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ fake_ensure_certificates)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ MagicMock())
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+ BridgeOrchestrator()
+ assert called.get('called')
+
+
+def test_bridge_orchestrator_init_ensure_certificates_exception(monkeypatch):
+ """Test bridge orchestrator init with ensure certificates exception."""
+ def fake_ensure_certificates(**kwargs): # pylint: disable=unused-argument
+ raise RuntimeError("Cert error")
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ fake_ensure_certificates)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ MagicMock())
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+ with pytest.raises(RuntimeError, match="Cert error"):
+ BridgeOrchestrator()
+
+
+def test_bridge_orchestrator_init_certificates_custom_days(monkeypatch):
+ """Test bridge orchestrator init with custom certificate validity days."""
+ params = {}
+
+ def fake_ensure_certificates(**kwargs):
+ params.update(kwargs)
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ fake_ensure_certificates)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ MagicMock())
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+ BridgeOrchestrator()
+ assert params.get("validity_days") == 365
+
+
+def test_bridge_orchestrator_start_and_stop(monkeypatch, mock_config_manager):
+ """Test that start and stop call SignalManager.disconnect_all_signals and stop on adapters."""
+ monkeypatch.setattr(
+ "simulation_bridge.src.utils.certs.ensure_certificates",
+ lambda **kwargs: None)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a, **k: mock_config_manager)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+ orchestrator = BridgeOrchestrator()
+ orchestrator.adapters = {"mqtt": MagicMock(), "rest": MagicMock()}
+ signal_path = "simulation_bridge.src.utils.signal_manager.SignalManager.disconnect_all_signals"
+ with patch(signal_path) as disconnect_mock:
+ orchestrator.stop()
+ disconnect_mock.assert_called_once()
+ for adapter in orchestrator.adapters.values():
+ adapter.stop.assert_called_once()
+
+
+def test_bridge_orchestrator_logs_bridge_id(monkeypatch, caplog):
+ """Test that bridge ID is logged during initialization."""
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ lambda **kwargs: None)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a, **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+ with caplog.at_level(logging.INFO):
+ BridgeOrchestrator()
+ assert "Simulation bridge ID: test-bridge" in caplog.text
+
+
+def test_bridge_orchestrator_logs_enabled_protocols(monkeypatch, caplog):
+ """Test that enabled protocols are logged."""
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ lambda **kwargs: None)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a, **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {
+ "mqtt": {
+ "class": "mqtt_adapter.MQTTAdapter",
+ "enabled": True}})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {"mqtt": MagicMock(return_value=MagicMock())})
+
+ mock_infra_path = "simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure"
+ mock_signal_path = "simulation_bridge.src.core.bridge_orchestrator.SignalManager"
+ mock_core_path = "simulation_bridge.src.core.bridge_orchestrator.BridgeCore"
+
+ with patch(mock_infra_path), \
+ patch(mock_signal_path) as mock_signal_manager, \
+ patch(mock_core_path):
+ mock_signal_manager.get_enabled_protocols.return_value = ["mqtt"]
+ mock_signal_manager.register_adapter_instance.return_value = None
+ mock_signal_manager.set_bridge_core.return_value = None
+ mock_signal_manager.connect_all_signals.return_value = None
+ orchestrator = BridgeOrchestrator()
+ with caplog.at_level(logging.INFO):
+ orchestrator.setup_interfaces()
+ assert "Enabled protocols: MQTT" in caplog.text
+
+
+def test_bridge_orchestrator_logs_no_enabled_protocols(monkeypatch, caplog):
+ """Test that warning is logged when no protocols are enabled."""
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ lambda **kwargs: None)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a, **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {
+ "mqtt": {
+ "class": "mqtt_adapter.MQTTAdapter",
+ "enabled": False}})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {"mqtt": MagicMock(return_value=MagicMock())})
+
+ mock_infra_path = "simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure"
+ mock_signal_path = "simulation_bridge.src.core.bridge_orchestrator.SignalManager"
+ mock_core_path = "simulation_bridge.src.core.bridge_orchestrator.BridgeCore"
+
+ with patch(mock_infra_path), \
+ patch(mock_signal_path) as mock_signal_manager, \
+ patch(mock_core_path):
+ mock_signal_manager.get_enabled_protocols.return_value = []
+ orchestrator = BridgeOrchestrator()
+ with caplog.at_level(logging.WARNING):
+ orchestrator.setup_interfaces()
+ assert "No protocol adapters are enabled" in caplog.text
+
+
+def test_bridge_orchestrator_logs_error_on_exception(monkeypatch, caplog):
+ """Test that error is logged when setup_interfaces raises exception."""
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ lambda **kwargs: None)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a, **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+
+ mock_infra_path = "simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure"
+ with patch(mock_infra_path, side_effect=Exception("fail")):
+ orchestrator = BridgeOrchestrator()
+ with caplog.at_level(logging.ERROR):
+ with pytest.raises(Exception):
+ orchestrator.setup_interfaces()
+ assert "Error setting up interfaces: fail" in caplog.text
+
+
+def test_bridge_orchestrator_certificates_validation_valid(
+ monkeypatch, tmp_path):
+ """Test that bridge orchestrator works with valid existing certificates."""
+ # Create valid certificates
+ cert_path = tmp_path / "cert.pem"
+ key_path = tmp_path / "key.pem"
+ generator = CertificateGenerator()
+ success, _ = generator.generate_certificate_pair(
+ str(cert_path), str(key_path))
+ assert success
+
+ # Mock ensure_certificates to use our test paths
+ def mock_ensure_certificates(**kwargs):
+ kwargs['cert_path'] = str(cert_path)
+ kwargs['key_path'] = str(key_path)
+ # Call real function with test paths
+ ensure_certificates(**kwargs)
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ mock_ensure_certificates)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a, **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+
+ # Should not raise exception
+ BridgeOrchestrator()
+
+
+def test_bridge_orchestrator_certificates_validation_expired(
+ monkeypatch, tmp_path, caplog):
+ """Test that bridge orchestrator regenerates expired certificates."""
+ # Create expired certificates manually
+ cert_path = tmp_path / "cert.pem"
+ key_path = tmp_path / "key.pem"
+
+ # Generate private key
+ private_key = rsa.generate_private_key(
+ public_exponent=65537,
+ key_size=2048,
+ )
+
+ # Create expired certificate (expired yesterday)
+ now = datetime.datetime.now(datetime.timezone.utc)
+ expired_date = now - datetime.timedelta(days=1)
+
+ subject_name = x509.Name([
+ x509.NameAttribute(NameOID.COUNTRY_NAME, "US"),
+ x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "California"),
+ x509.NameAttribute(NameOID.LOCALITY_NAME, "San Francisco"),
+ x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Test Company"),
+ x509.NameAttribute(NameOID.COMMON_NAME, "localhost"),
+ ])
+
+ cert = (
+ x509.CertificateBuilder()
+ .subject_name(subject_name)
+ .issuer_name(subject_name)
+ .public_key(private_key.public_key())
+ .serial_number(x509.random_serial_number())
+ .not_valid_before(expired_date - datetime.timedelta(days=365))
+ .not_valid_after(expired_date) # Expired
+ .sign(private_key, hashes.SHA256())
+ )
+
+ # Write the expired certificate and key
+ with open(cert_path, "wb") as file:
+ file.write(cert.public_bytes(serialization.Encoding.PEM))
+
+ with open(key_path, "wb") as file:
+ file.write(private_key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.TraditionalOpenSSL,
+ encryption_algorithm=serialization.NoEncryption(),
+ ))
+
+ def mock_ensure_certificates(**kwargs):
+ kwargs['cert_path'] = str(cert_path)
+ kwargs['key_path'] = str(key_path)
+ ensure_certificates(**kwargs)
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ mock_ensure_certificates)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a, **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+
+ with caplog.at_level(logging.ERROR):
+ BridgeOrchestrator()
+ assert "Existing certificates are invalid" in caplog.text
+
+
+def test_bridge_orchestrator_certificates_missing_files(
+ monkeypatch, tmp_path, caplog):
+ """Test that bridge orchestrator generates certificates when files are missing."""
+ cert_path = tmp_path / "cert.pem"
+ key_path = tmp_path / "key.pem"
+
+ def mock_ensure_certificates(**kwargs):
+ kwargs['cert_path'] = str(cert_path)
+ kwargs['key_path'] = str(key_path)
+ from simulation_bridge.src.utils.certs import ensure_certificates as real_ensure
+ real_ensure(**kwargs)
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ mock_ensure_certificates)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a,
+ **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {
+ 'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+
+ with caplog.at_level(logging.INFO):
+ BridgeOrchestrator()
+ assert "SSL certificates generated successfully" in caplog.text
+ assert cert_path.exists()
+ assert key_path.exists()
+
+
+def test_bridge_orchestrator_certificates_corrupted_files(
+ monkeypatch, tmp_path, caplog):
+ """Test that bridge orchestrator handles corrupted certificate files."""
+ cert_path = tmp_path / "cert.pem"
+ key_path = tmp_path / "key.pem"
+
+ # Create corrupted files
+ cert_path.write_text("corrupted cert data")
+ key_path.write_text("corrupted key data")
+
+ def mock_ensure_certificates(**kwargs):
+ kwargs['cert_path'] = str(cert_path)
+ kwargs['key_path'] = str(key_path)
+ from simulation_bridge.src.utils.certs import ensure_certificates as real_ensure
+ real_ensure(**kwargs)
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ mock_ensure_certificates)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a,
+ **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {
+ 'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+
+ with caplog.at_level(logging.ERROR):
+ BridgeOrchestrator()
+ assert "Existing certificates are invalid" in caplog.text
+
+
+def test_bridge_orchestrator_certificates_permission_error(
+ monkeypatch, tmp_path):
+ """Test that bridge orchestrator handles permission errors during certificate generation."""
+ def mock_ensure_certificates(**kwargs):
+ # Simulate permission error without actually creating read-only files
+ raise RuntimeError(
+ "Certificate generation failed: [Errno 13] Permission denied: '/test/path/cert.pem'")
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ensure_certificates",
+ mock_ensure_certificates)
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.ConfigManager",
+ lambda *a,
+ **k: MagicMock(
+ get_config=lambda: {
+ 'simulation_bridge': {
+ 'bridge_id': 'test-bridge'},
+ 'rabbitmq': {}}))
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.load_protocol_config",
+ lambda: {})
+ monkeypatch.setattr(
+ "simulation_bridge.src.core.bridge_orchestrator.BridgeOrchestrator._import_adapter_classes",
+ lambda self: {})
+
+ with pytest.raises(RuntimeError, match="Certificate generation failed"):
+ BridgeOrchestrator()
+
+
+def test_logger_setup_and_get_logger(tmp_path, caplog):
+ """Test setup_logger creates file and console handlers, and
+ get_logger returns the same instance."""
+ from simulation_bridge.src.utils import logger as logger_mod
+
+ log_file = tmp_path / "test.log"
+ # Setup logger
+ log = logger_mod.setup_logger(
+ name="TEST-LOGGER",
+ level=logger_mod.logging.DEBUG,
+ log_file=str(log_file),
+ enable_console=True
+ )
+ # Log a message
+ log.info("Logger integration test message")
+ # Check file was created and contains the message
+ log.handlers[0].flush()
+ with open(log_file, "r", encoding="utf-8") as f:
+ content = f.read()
+ assert "Logger integration test message" in content
+
+ # get_logger returns the same logger
+ log2 = logger_mod.get_logger("TEST-LOGGER")
+ assert log is log2
+
+
+def test_logger_console_colored_output(monkeypatch, capsys):
+ """Test that console handler uses colorlog.ColoredFormatter."""
+ from simulation_bridge.src.utils import logger as logger_mod
+
+ # Patch colorlog.ColoredFormatter to track usage
+ called = {}
+ orig_colored_formatter = logger_mod.colorlog.ColoredFormatter
+
+ def fake_colored_formatter(*args, **kwargs):
+ called['used'] = True
+ return orig_colored_formatter(*args, **kwargs)
+ monkeypatch.setattr(
+ logger_mod.colorlog,
+ "ColoredFormatter",
+ fake_colored_formatter)
+
+ log = logger_mod.setup_logger(name="COLOR-LOGGER", enable_console=True)
+ log.info("Color test message")
+ assert called.get('used')
+
+
+def test_logger_no_duplicate_handlers(tmp_path):
+ """Test that setup_logger does not add duplicate handlers if called twice."""
+ from simulation_bridge.src.utils import logger as logger_mod
+
+ log_file = tmp_path / "dup.log"
+ log = logger_mod.setup_logger(name="DUP-LOGGER", log_file=str(log_file))
+ n_handlers = len(log.handlers)
+ # Call again, should not add more handlers
+ log2 = logger_mod.setup_logger(name="DUP-LOGGER", log_file=str(log_file))
+ assert len(log2.handlers) == n_handlers
diff --git a/simulation_bridge/test/unit/__init__.py b/simulation_bridge/test/unit/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/simulation_bridge/test/unit/test_bridge_core.py b/simulation_bridge/test/unit/test_bridge_core.py
new file mode 100644
index 0000000..89afaa0
--- /dev/null
+++ b/simulation_bridge/test/unit/test_bridge_core.py
@@ -0,0 +1,238 @@
+"""
+Test suite for bridge_core.py using pytest and unittest.mock.
+
+This module contains structured and focused tests for BridgeCore class
+and related functions, verifying message handling, connection management,
+and error handling behaviors.
+"""
+
+from unittest.mock import MagicMock, patch
+import pytest
+from pika.exceptions import AMQPConnectionError, AMQPChannelError
+
+from simulation_bridge.src.core import bridge_core
+
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name
+
+
+@pytest.fixture
+def config_manager_mock():
+ """Fixture providing a ConfigManager mock with RabbitMQ config."""
+ cm = MagicMock()
+ cm.get_rabbitmq_config.return_value = {
+ 'host': 'localhost',
+ 'port': 5672,
+ 'username': 'user',
+ 'password': 'pass',
+ 'vhost': '/',
+ 'tls': False,
+ }
+ return cm
+
+
+@pytest.fixture
+def adapters_mock():
+ """Fixture providing dummy adapters dict."""
+ return {'dummy_protocol': MagicMock()}
+
+
+@pytest.fixture
+def bridge_core_instance(config_manager_mock, adapters_mock):
+ """Fixture to create a BridgeCore instance with mocks."""
+ with patch('simulation_bridge.src.core.bridge_core.pika.BlockingConnection') as blocking_conn:
+ connection_mock = MagicMock()
+ channel_mock = MagicMock()
+ connection_mock.channel.return_value = channel_mock
+ connection_mock.is_closed = False
+ blocking_conn.return_value = connection_mock
+ core = bridge_core.BridgeCore(config_manager_mock, adapters_mock)
+ yield core
+
+
+@pytest.fixture
+def mock_logger():
+ """Fixture to patch the logger in bridge_core."""
+ with patch('simulation_bridge.src.core.bridge_core.logger') as log_mock:
+ yield log_mock
+
+
+@pytest.fixture
+def patch_basic_publish(bridge_core_instance):
+ """Fixture to patch channel.basic_publish."""
+ with patch.object(bridge_core_instance.channel, 'basic_publish') as bp:
+ yield bp
+
+
+class TestInitialization:
+ """Tests for BridgeCore initialization and connection setup."""
+
+ def test_initialize_rabbitmq_connection_success(self, config_manager_mock,
+ adapters_mock, mock_logger):
+ """Verify successful RabbitMQ connection initialization."""
+ with patch('simulation_bridge.src.core.bridge_core.pika.BlockingConnection') as blocking_conn: # pylint: disable=line-too-long
+ conn_mock = MagicMock()
+ chan_mock = MagicMock()
+ conn_mock.channel.return_value = chan_mock
+ blocking_conn.return_value = conn_mock
+
+ core = bridge_core.BridgeCore( # pylint: disable=unused-variable
+ config_manager_mock,
+ adapters_mock)
+
+ blocking_conn.assert_called_once()
+ conn_mock.channel.assert_called_once()
+ mock_logger.debug.assert_any_call(
+ "RabbitMQ connection established successfully")
+
+
+class TestEnsureConnection:
+ """Tests for _ensure_connection method ensuring connection health."""
+
+ def test_ensure_connection_active(self, bridge_core_instance):
+ """Return True when connection is active."""
+ bridge_core_instance.connection.is_closed = False
+ result = bridge_core_instance._ensure_connection()
+ assert result is True
+
+ def test_ensure_connection_closed_reconnects(self, bridge_core_instance,
+ mock_logger):
+ """Reconnect when connection is closed and return True."""
+ bridge_core_instance.connection.is_closed = True
+ with patch.object(bridge_core_instance, '_initialize_rabbitmq_connection') as init_conn:
+ init_conn.return_value = None
+ result = bridge_core_instance._ensure_connection()
+ init_conn.assert_called_once()
+ assert result is True
+
+ def test_ensure_connection_fails_returns_false(self, bridge_core_instance,
+ mock_logger):
+ """Return False if reconnection fails with AMQP errors."""
+ bridge_core_instance.connection = None
+ with patch.object(bridge_core_instance, '_initialize_rabbitmq_connection',
+ side_effect=AMQPChannelError("chan error")), \
+ patch('simulation_bridge.src.core.bridge_core.logger') as log_mock:
+ result = bridge_core_instance._ensure_connection()
+ assert result is False
+ log_mock.error.assert_called_once()
+
+
+class TestHandleInputMessage:
+ "Tests for handle_input_message method processing input messages."
+
+ def test_handle_input_message_valid(self, bridge_core_instance,
+ patch_basic_publish, mock_logger):
+ """Handle valid input message and publish to RabbitMQ."""
+ message = {
+ 'simulation': {
+ 'request_id': '123',
+ 'client_id': 'clientA',
+ 'simulator': 'simX',
+ 'type': 'typeA',
+ 'file': 'file1',
+ 'inputs': {},
+ 'outputs': {}
+ }
+ }
+ kwargs = {
+ 'message': message,
+ 'producer': 'prod',
+ 'consumer': 'cons',
+ 'protocol': 'mqtt'
+ }
+ bridge_core_instance.handle_input_message(None, **kwargs)
+ patch_basic_publish.assert_called_once()
+ call_args = patch_basic_publish.call_args
+ args = call_args[0] # pylint: disable=unused-variable
+ kwargs = call_args[1]
+ assert kwargs.get('exchange') == 'ex.bridge.output'
+ assert kwargs.get('routing_key') == 'prod.cons'
+ body = kwargs.get('body')
+ assert isinstance(body, str)
+ assert '"request_id": "123"' in body
+ assert 'properties' in kwargs
+ mock_logger.info.assert_called_once()
+
+ def test_handle_input_message_missing_simulation(self, bridge_core_instance,
+ patch_basic_publish, mock_logger):
+ """Handle message with missing simulation key gracefully."""
+ message = {'simulation': {
+ 'request_id': 'unknown',
+ 'client_id': '',
+ 'simulator': '',
+ 'type': '',
+ 'file': '',
+ 'inputs': {},
+ 'outputs': {}
+ }}
+ kwargs = {
+ 'message': message,
+ 'producer': 'prod',
+ 'consumer': 'cons',
+ 'protocol': 'rest'
+ }
+ bridge_core_instance.handle_input_message(None, **kwargs)
+ patch_basic_publish.assert_called_once()
+ mock_logger.info.assert_called_once()
+
+
+class TestHandleResultMessages: # pylint: disable=too-few-public-methods
+ "Tests for handling result messages from RabbitMQ and other protocols."""
+
+ def test_handle_result_rabbitmq_message_publishes(self, bridge_core_instance,
+ patch_basic_publish):
+ """Publishes RabbitMQ result message correctly."""
+ message = {
+ 'source': 'src',
+ 'simulation': {},
+ 'data': 'result'
+ }
+ bridge_core_instance.handle_result_rabbitmq_message(
+ None, message=message)
+ patch_basic_publish.assert_called_once()
+ kwargs = patch_basic_publish.call_args[1]
+ assert kwargs['exchange'] == 'ex.bridge.result'
+
+
+class TestPublishMessage:
+ "Tests for _publish_message method publishing messages to RabbitMQ."""
+
+ def test_publish_message_success(self, bridge_core_instance, patch_basic_publish,
+ mock_logger):
+ """Successfully publish a message on RabbitMQ."""
+ message = {'simulation': {'request_id': '1'}}
+ bridge_core_instance._publish_message(
+ 'prod', 'cons', message, exchange='ex.test', protocol='test')
+ patch_basic_publish.assert_called_once()
+
+ calls = mock_logger.debug.call_args_list
+ assert any(
+ call.args[0] == "Message routed to exchange '%s': %s -> %s, protocol=%s" and
+ call.args[1] == 'ex.test' and
+ call.args[2] == 'prod' and
+ call.args[3] == 'cons' and
+ call.args[4] == 'test'
+ for call in calls
+ )
+
+ def test_publish_message_connection_lost_then_retries(self, bridge_core_instance,
+ patch_basic_publish,
+ mock_logger):
+ """Reconnect and retry publish once if first attempt raises AMQP error."""
+ def side_effect(*args, **kwargs):
+ if not hasattr(side_effect, 'called'):
+ side_effect.called = True
+ raise AMQPConnectionError("conn lost")
+ return True
+
+ patch_basic_publish.side_effect = side_effect
+ message = {'simulation': {'request_id': '1'}}
+ with patch.object(bridge_core_instance, '_initialize_rabbitmq_connection') as init_conn:
+ bridge_core_instance._publish_message(
+ 'prod', 'cons', message, exchange='ex.test', protocol='test')
+ assert patch_basic_publish.call_count == 2
+ init_conn.assert_called_once()
+ calls = mock_logger.debug.call_args_list
+ assert any(
+ call.args[0] % call.args[1:] == "Message routed to exchange 'ex.test' after reconnection: prod -> cons" # pylint: disable=line-too-long
+ for call in calls
+ )
diff --git a/simulation_bridge/test/unit/test_bridge_infrastructure.py b/simulation_bridge/test/unit/test_bridge_infrastructure.py
new file mode 100644
index 0000000..63fcf00
--- /dev/null
+++ b/simulation_bridge/test/unit/test_bridge_infrastructure.py
@@ -0,0 +1,195 @@
+"""
+Test module for simulation_bridge.src.core.bridge_infrastructure.
+
+Uses pytest and unittest.mock to validate the behavior of RabbitMQInfrastructure.
+"""
+
+# pylint: disable=too-many-arguments, unused-argument, protected-access,
+# pylint: disable=redefined-outer-name, attribute-defined-outside-init
+
+from unittest import mock
+import pytest
+
+from simulation_bridge.src.core import bridge_infrastructure
+
+
+@pytest.fixture
+def config_fixture():
+ """Returns a mock config manager with valid RabbitMQ config."""
+ config = {
+ 'username': 'user',
+ 'password': 'pass',
+ 'host': 'localhost',
+ 'port': 5672,
+ 'vhost': '/',
+ 'infrastructure': {
+ 'exchanges': [
+ {
+ 'name': 'ex1',
+ 'type': 'direct',
+ 'durable': True,
+ 'auto_delete': False,
+ 'internal': False
+ }
+ ],
+ 'queues': [
+ {
+ 'name': 'q1',
+ 'durable': True,
+ 'exclusive': False,
+ 'auto_delete': False
+ }
+ ],
+ 'bindings': [
+ {
+ 'exchange': 'ex1',
+ 'queue': 'q1',
+ 'routing_key': 'rk1'
+ }
+ ]
+ }
+ }
+ manager = mock.Mock()
+ manager.get_rabbitmq_config.return_value = config
+ return manager
+
+
+@pytest.fixture
+def pika_fixture():
+ """Provides a mocked connection and channel for RabbitMQ."""
+ connection = mock.Mock()
+ channel = mock.Mock()
+ connection.channel.return_value = channel
+ return connection, channel
+
+
+class TestSetupMethod:
+ """Tests for setup() method in RabbitMQInfrastructure."""
+
+ @pytest.fixture(autouse=True)
+ def _setup(self, config_fixture, monkeypatch, pika_fixture):
+ """Patches Pika components and initializes the class under test."""
+ conn, chan = pika_fixture
+ monkeypatch.setattr('pika.PlainCredentials', lambda u, p: mock.Mock())
+ monkeypatch.setattr('pika.BlockingConnection', lambda p: conn)
+ monkeypatch.setattr(
+ 'pika.ConnectionParameters',
+ lambda **kwargs: mock.Mock())
+
+ self.conn = conn
+ self.chan = chan
+ self.infra = bridge_infrastructure.RabbitMQInfrastructure(
+ config_fixture)
+
+ def test_setup_successful(self):
+ """Test that all setup steps complete and close is called."""
+ self.infra.setup()
+ self.chan.exchange_declare.assert_called_once()
+ self.chan.queue_declare.assert_called_once()
+ self.chan.queue_bind.assert_called_once()
+ self.conn.close.assert_called_once()
+
+ def test_setup_raises_on_exchange_error(self):
+ """Test setup raises exception and logs when exchange setup fails."""
+ self.chan.exchange_declare.side_effect = Exception("Exchange error")
+ with pytest.raises(Exception, match="Exchange error"):
+ self.infra.setup()
+
+ def test_setup_raises_on_queue_error(self):
+ """Test setup raises exception when queue declaration fails."""
+ self.chan.queue_declare.side_effect = Exception("Queue error")
+ with pytest.raises(Exception, match="Queue error"):
+ self.infra.setup()
+
+ def test_setup_raises_on_binding_error(self):
+ """Test setup raises exception when binding setup fails."""
+ self.chan.queue_bind.side_effect = Exception("Binding error")
+ with pytest.raises(Exception, match="Binding error"):
+ self.infra.setup()
+
+
+class TestReconnectMethod:
+ """Tests for reconnect() method in RabbitMQInfrastructure."""
+
+ @pytest.fixture(autouse=True)
+ def _setup(self, config_fixture, monkeypatch, pika_fixture):
+ """Prepare mocks and monkeypatches for reconnect tests."""
+ conn, chan = pika_fixture
+ monkeypatch.setattr('pika.PlainCredentials', lambda u, p: mock.Mock())
+ monkeypatch.setattr('pika.BlockingConnection', lambda p: conn)
+ monkeypatch.setattr(
+ 'pika.ConnectionParameters',
+ lambda **kwargs: mock.Mock())
+
+ self.conn = conn
+ self.chan = chan
+ self.infra = bridge_infrastructure.RabbitMQInfrastructure(
+ config_fixture)
+
+ def test_reconnect_when_closed(self):
+ """Should reconnect if the connection is closed."""
+ self.conn.is_closed = True
+ new_conn = mock.Mock()
+ new_conn.channel.return_value = mock.Mock()
+
+ with mock.patch('pika.BlockingConnection', return_value=new_conn):
+ result = self.infra.reconnect()
+ assert result == new_conn
+
+ def test_reconnect_when_open(self):
+ """Should not reconnect if the connection is already open."""
+ self.conn.is_closed = False
+ result = self.infra.reconnect()
+ assert result == self.conn
+
+
+class TestPrivateSetupMethods:
+ """Tests for private _setup_exchanges, _setup_queues, _setup_bindings."""
+
+ @pytest.fixture(autouse=True)
+ def _setup(self, config_fixture, monkeypatch, pika_fixture):
+ """Set up mocks and test instance."""
+ conn, chan = pika_fixture
+ monkeypatch.setattr('pika.PlainCredentials', lambda u, p: mock.Mock())
+ monkeypatch.setattr('pika.BlockingConnection', lambda p: conn)
+ monkeypatch.setattr(
+ 'pika.ConnectionParameters',
+ lambda **kwargs: mock.Mock())
+
+ self.conn = conn
+ self.chan = chan
+ self.infra = bridge_infrastructure.RabbitMQInfrastructure(
+ config_fixture)
+
+ def test_exchanges_success(self):
+ """Test _setup_exchanges declares configured exchanges."""
+ self.infra._setup_exchanges()
+ self.chan.exchange_declare.assert_called_once()
+
+ def test_exchanges_exception(self):
+ """Test _setup_exchanges raises and logs on error."""
+ self.chan.exchange_declare.side_effect = Exception("exch error")
+ with pytest.raises(Exception, match="exch error"):
+ self.infra._setup_exchanges()
+
+ def test_queues_success(self):
+ """Test _setup_queues declares configured queues."""
+ self.infra._setup_queues()
+ self.chan.queue_declare.assert_called_once()
+
+ def test_queues_exception(self):
+ """Test _setup_queues raises and logs on error."""
+ self.chan.queue_declare.side_effect = Exception("queue error")
+ with pytest.raises(Exception, match="queue error"):
+ self.infra._setup_queues()
+
+ def test_bindings_success(self):
+ """Test _setup_bindings binds queues correctly."""
+ self.infra._setup_bindings()
+ self.chan.queue_bind.assert_called_once()
+
+ def test_bindings_exception(self):
+ """Test _setup_bindings raises and logs on error."""
+ self.chan.queue_bind.side_effect = Exception("bind error")
+ with pytest.raises(Exception, match="bind error"):
+ self.infra._setup_bindings()
diff --git a/simulation_bridge/test/unit/test_bridge_orchestrator.py b/simulation_bridge/test/unit/test_bridge_orchestrator.py
new file mode 100644
index 0000000..5014673
--- /dev/null
+++ b/simulation_bridge/test/unit/test_bridge_orchestrator.py
@@ -0,0 +1,141 @@
+"""Test suite for the BridgeOrchestrator module."""
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name
+
+from unittest.mock import patch, MagicMock
+import pytest
+from simulation_bridge.src.core import bridge_orchestrator
+
+
+@pytest.fixture
+def mock_config():
+ """Mock base configuration with complete RabbitMQ fields."""
+ return {
+ 'simulation_bridge': {'bridge_id': 'bridge-123'},
+ 'rabbitmq': {
+ 'host': 'localhost',
+ 'port': 5672,
+ 'username': 'guest',
+ 'password': 'guest',
+ 'vhost': '/',
+ 'exchange': 'sim_exchange',
+ }
+ }
+
+
+@pytest.fixture
+def config_manager_mock(mock_config):
+ """Fixture for mocking ConfigManager with valid config."""
+ with patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.ConfigManager'
+ ) as mock_cm:
+ instance = mock_cm.return_value
+ instance.get_config.return_value = mock_config
+ instance.get_rabbitmq_config.return_value = mock_config['rabbitmq']
+ yield instance
+
+
+@pytest.fixture
+def orchestrator(config_manager_mock):
+ """Returns BridgeOrchestrator instance with dependencies mocked."""
+ with patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.get_logger'
+ ), patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.ensure_certificates'
+ ), patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.load_protocol_config',
+ return_value={'mqtt': {'class': 'mqtt_adapter.MQTTAdapter'}}
+ ), patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.importlib.import_module'
+ ) as import_mod:
+ mock_class = MagicMock()
+ import_mod.return_value = MagicMock(MQTTAdapter=mock_class)
+ return bridge_orchestrator.BridgeOrchestrator()
+
+
+class TestSetupInterfaces:
+ """Tests for BridgeOrchestrator.setup_interfaces method."""
+
+ def test_setup_success(self, orchestrator):
+ """Test successful setup with enabled protocols."""
+ with patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure'
+ ) as rabbit_mock, patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.SignalManager'
+ ) as signal_mock, patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.BridgeCore'
+ ) as core_mock:
+ rabbit_instance = rabbit_mock.return_value
+ rabbit_instance.setup.return_value = None
+
+ signal_mock.get_enabled_protocols.return_value = ['mqtt']
+ orchestrator.setup_interfaces()
+
+ rabbit_instance.setup.assert_called_once()
+ signal_mock.register_adapter_instance.assert_called_once()
+ signal_mock.set_bridge_core.assert_called_once_with(
+ core_mock.return_value)
+ signal_mock.connect_all_signals.assert_called_once()
+
+ def test_setup_raises_exception(self, orchestrator):
+ """Test setup raises exception if infrastructure fails."""
+ with patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.RabbitMQInfrastructure'
+ ) as rabbit_mock:
+ rabbit_instance = rabbit_mock.return_value
+ rabbit_instance.setup.side_effect = ValueError("fail")
+ with pytest.raises(ValueError):
+ orchestrator.setup_interfaces()
+
+
+class TestStartStop:
+ """Tests for BridgeOrchestrator start/stop behavior."""
+
+ def test_stop_clean_shutdown(self, orchestrator):
+ """Ensure adapters and signals shut down cleanly."""
+ adapter_mock = MagicMock()
+ adapter_mock.thread = MagicMock()
+ orchestrator.adapters = {'mqtt': adapter_mock}
+ with patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.SignalManager'
+ ) as signal:
+ orchestrator.stop()
+ adapter_mock.stop.assert_called_once()
+ adapter_mock.thread.join.assert_called_once()
+ signal.disconnect_all_signals.assert_called_once()
+
+ def test_start_keyboard_interrupt(self, orchestrator):
+ adapter = MagicMock()
+ adapter.is_running = True
+ orchestrator.adapters = {'mqtt': adapter}
+ with patch.object(orchestrator, 'setup_interfaces'), \
+ patch('simulation_bridge.src.core.bridge_orchestrator.time.sleep',
+ side_effect=KeyboardInterrupt), \
+ patch.object(orchestrator, 'stop') as stop_mock:
+ try:
+ orchestrator.start()
+ except SystemExit:
+ pass
+ adapter.start.assert_called_once()
+ stop_mock.assert_called_once()
+
+
+class TestAdapterImport: # pylint: disable=too-few-public-methods
+ """Tests for internal _import_adapter_classes logic."""
+
+ def test_import_adapter_classes(self, config_manager_mock):
+ """Test dynamic loading of adapter classes from config."""
+ with patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.load_protocol_config',
+ return_value={'mqtt': {'class': 'mqtt_adapter.MQTTAdapter'}}
+ ), patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.importlib.import_module'
+ ) as import_mod, patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.get_logger'
+ ), patch(
+ 'simulation_bridge.src.core.bridge_orchestrator.ensure_certificates'
+ ):
+ mock_class = MagicMock()
+ import_mod.return_value = MagicMock(MQTTAdapter=mock_class)
+ orchestrator = bridge_orchestrator.BridgeOrchestrator()
+ assert 'mqtt' in orchestrator.adapter_classes
+ assert orchestrator.adapter_classes['mqtt'] == mock_class
diff --git a/simulation_bridge/test/unit/test_certs.py b/simulation_bridge/test/unit/test_certs.py
new file mode 100644
index 0000000..8e6ddd0
--- /dev/null
+++ b/simulation_bridge/test/unit/test_certs.py
@@ -0,0 +1,273 @@
+"""
+Test suite for simulation_bridge.src.utils.certs module.
+
+Uses pytest and unittest.mock for structured, isolated tests
+of CertificateGenerator and ensure_certificates functionalities.
+"""
+
+import datetime
+from unittest import mock
+import pytest
+from simulation_bridge.src.utils import certs
+
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name
+
+
+@pytest.fixture
+def cert_generator():
+ """Fixture providing a CertificateGenerator instance with default params."""
+ return certs.CertificateGenerator()
+
+
+@pytest.fixture
+def mock_logger(monkeypatch):
+ """Fixture to mock the module-level logger."""
+ mock_log = mock.Mock()
+ monkeypatch.setattr(certs, "logger", mock_log)
+ return mock_log
+
+
+class TestFilesExist:
+ """Tests for CertificateGenerator.files_exist method."""
+
+ def test_files_exist_both_present(self, monkeypatch, cert_generator):
+ """Return True if both cert and key files exist."""
+ monkeypatch.setattr(certs.Path, "exists", lambda self: True)
+ assert cert_generator.files_exist("cert.pem", "key.pem") is True
+
+ def test_files_exist_one_missing(self, monkeypatch, cert_generator):
+ """Return False if either cert or key file is missing."""
+
+ def exists_side_effect(self):
+ return self.name == "cert.pem"
+
+ monkeypatch.setattr(certs.Path, "exists", exists_side_effect)
+ assert cert_generator.files_exist("cert.pem", "key.pem") is False
+
+
+class TestValidateCertificates:
+ """Tests for CertificateGenerator._validate_certificates."""
+
+ def test_validate_certificates_valid(self, monkeypatch):
+ """
+ Test _validate_certificates returns True for valid cert and matching key.
+ """
+ cert_generator = certs.CertificateGenerator()
+
+ mock_cert = mock.Mock()
+ now = datetime.datetime.now(datetime.timezone.utc)
+ mock_cert.not_valid_before_utc = now - datetime.timedelta(days=1)
+ mock_cert.not_valid_after_utc = now + datetime.timedelta(days=365)
+
+ mock_public_key = mock.Mock()
+ mock_public_key.key_size = 2048
+ mock_public_key.public_numbers.return_value = mock.Mock(n=123, e=65537)
+ mock_cert.public_key.return_value = mock_public_key
+
+ mock_private_key = mock.Mock()
+ mock_private_public_key = mock.Mock()
+ mock_private_public_key.key_size = 2048
+ mock_private_public_key.public_numbers.return_value = mock.Mock(
+ n=123, e=65537)
+ mock_private_key.public_key.return_value = mock_private_public_key
+
+ monkeypatch.setattr("builtins.open", mock.mock_open(read_data=b"data"))
+ monkeypatch.setattr(
+ certs.x509,
+ "load_pem_x509_certificate",
+ lambda data: mock_cert)
+ monkeypatch.setattr(
+ certs.serialization, "load_pem_private_key", lambda data,
+ password=None: mock_private_key
+ )
+
+ valid, msg = cert_generator._validate_certificates(
+ "cert.pem", "key.pem")
+ assert valid is True, f"Expected True but got False with msg: {msg}"
+
+ def test_validate_certificates_expired(self, monkeypatch, cert_generator):
+ """Return False with expiration message if cert expired."""
+ mock_cert = mock.Mock()
+ now = datetime.datetime.now(datetime.timezone.utc)
+ mock_cert.not_valid_after_utc = now - datetime.timedelta(days=1)
+ mock_cert.not_valid_before_utc = now - datetime.timedelta(days=10)
+
+ monkeypatch.setattr("builtins.open", mock.mock_open(read_data=b"data"))
+ monkeypatch.setattr(certs.x509, "load_pem_x509_certificate",
+ lambda data: mock_cert)
+ monkeypatch.setattr(certs.serialization, "load_pem_private_key",
+ lambda data, password=None: mock.Mock())
+
+ valid, msg = cert_generator._validate_certificates(
+ "cert.pem", "key.pem")
+ assert valid is False
+ assert "expired" in msg
+
+ def test_validate_certificates_file_not_found(self, cert_generator):
+ """Return False with file not found message if file missing."""
+
+ def open_side_effect(*args, **kwargs):
+ raise FileNotFoundError("No such file")
+
+ with mock.patch("builtins.open", side_effect=open_side_effect):
+ valid, msg = cert_generator._validate_certificates(
+ "cert.pem", "key.pem")
+
+ assert valid is False
+ assert "not found" in msg
+
+ def test_validate_certificates_key_mismatch(
+ self, monkeypatch, cert_generator):
+ """Return False if certificate and private key do not match."""
+ now = datetime.datetime.now(datetime.timezone.utc)
+ mock_cert = mock.Mock()
+ mock_cert.not_valid_after_utc = now + datetime.timedelta(days=60)
+ mock_cert.not_valid_before_utc = now - datetime.timedelta(days=1)
+
+ cert_pub_key = mock.Mock(
+ key_size=2048, public_numbers=mock.Mock(
+ n=123, e=65537))
+ priv_pub_key = mock.Mock(
+ key_size=2048, public_numbers=mock.Mock(
+ n=999, e=65537))
+
+ mock_cert.public_key.return_value = cert_pub_key
+ mock_private_key = mock.Mock()
+ mock_private_key.public_key.return_value = priv_pub_key
+
+ monkeypatch.setattr("builtins.open", mock.mock_open(read_data=b"data"))
+ monkeypatch.setattr(
+ certs.x509,
+ "load_pem_x509_certificate",
+ lambda data: mock_cert)
+ monkeypatch.setattr(certs.serialization, "load_pem_private_key",
+ lambda data, password=None: mock_private_key)
+
+ valid, msg = cert_generator._validate_certificates(
+ "cert.pem", "key.pem")
+ assert valid is False
+ assert "do not match" in msg
+
+
+class TestGenerateCertificatePair:
+ """Tests for CertificateGenerator.generate_certificate_pair."""
+
+ def test_generate_success(self, monkeypatch, cert_generator):
+ """Generate cert/key successfully when no existing files or forced."""
+ monkeypatch.setattr(cert_generator, "files_exist", lambda c, k: False)
+ monkeypatch.setattr(
+ cert_generator,
+ "files_exist",
+ mock.Mock(
+ return_value=False))
+ monkeypatch.setattr(cert_generator, "_build_certificate_name",
+ lambda **kwargs: mock.Mock())
+ monkeypatch.setattr(cert_generator, "_create_certificate",
+ lambda key, name, dns=None: mock.Mock())
+ monkeypatch.setattr(
+ cert_generator,
+ "_write_private_key",
+ lambda key,
+ path: None)
+ monkeypatch.setattr(
+ cert_generator,
+ "_write_certificate",
+ lambda cert,
+ path: None)
+
+ success, msg = cert_generator.generate_certificate_pair()
+ assert success is True
+ assert "successfully" in msg
+
+ def test_generate_existing_files_no_force(
+ self, monkeypatch, cert_generator):
+ """Return False and message if files exist and no force overwrite."""
+ monkeypatch.setattr(cert_generator, "files_exist", lambda c, k: True)
+
+ success, msg = cert_generator.generate_certificate_pair(
+ force_overwrite=False)
+ assert success is False
+ assert "already exist" in msg
+
+ def test_generate_exception_handling(self, monkeypatch, cert_generator):
+ """Return False and error message if exception raised during generation."""
+ monkeypatch.setattr(cert_generator, "files_exist", lambda c, k: False)
+ monkeypatch.setattr(cert_generator, "_generate_private_key",
+ lambda: (_ for _ in ()).throw(ValueError("fail")))
+
+ success, msg = cert_generator.generate_certificate_pair()
+ assert success is False
+ assert "Error generating" in msg
+
+
+class TestEnsureCertificates:
+ """Tests for the ensure_certificates function."""
+
+ def test_ensure_certificates_valid_existing(self, monkeypatch, mock_logger):
+ """Does nothing if valid certs exist and no force overwrite."""
+ gen_mock = mock.Mock()
+ gen_mock.files_exist.return_value = True
+ gen_mock._validate_certificates.return_value = (True, "valid")
+ monkeypatch.setattr(
+ certs,
+ "CertificateGenerator",
+ lambda **kwargs: gen_mock)
+
+ certs.ensure_certificates(force_overwrite=False)
+
+ gen_mock.files_exist.assert_called_once()
+ gen_mock._validate_certificates.assert_called_once()
+ mock_logger.info.assert_called_with("SSL certificates are valid")
+
+ def test_ensure_certificates_invalid_existing_force(
+ self, monkeypatch, mock_logger):
+ """Regenerates certs if existing certs are invalid."""
+ gen_mock = mock.Mock()
+ gen_mock.files_exist.return_value = True
+ gen_mock._validate_certificates.return_value = (False, "expired")
+ gen_mock.generate_certificate_pair.return_value = (True, "generated")
+ monkeypatch.setattr(
+ certs,
+ "CertificateGenerator",
+ lambda **kwargs: gen_mock)
+
+ certs.ensure_certificates(force_overwrite=False)
+
+ gen_mock.generate_certificate_pair.assert_called_once()
+ mock_logger.error.assert_called_with(
+ "Existing certificates are invalid (%s), regenerating...", "expired"
+ )
+ mock_logger.info.assert_called_with(
+ "SSL certificates generated successfully")
+
+ def test_ensure_certificates_not_exist(self, monkeypatch, mock_logger):
+ """Generates certs if files do not exist."""
+ gen_mock = mock.Mock()
+ gen_mock.files_exist.return_value = False
+ gen_mock.generate_certificate_pair.return_value = (True, "generated")
+ monkeypatch.setattr(
+ certs,
+ "CertificateGenerator",
+ lambda **kwargs: gen_mock)
+
+ certs.ensure_certificates()
+
+ gen_mock.generate_certificate_pair.assert_called_once()
+ mock_logger.debug.assert_any_call(
+ "SSL certificates not found, generating new ones...")
+ mock_logger.info.assert_called_with(
+ "SSL certificates generated successfully")
+
+ def test_ensure_certificates_generation_failure(self, monkeypatch):
+ """Raises RuntimeError if generation fails."""
+ gen_mock = mock.Mock()
+ gen_mock.files_exist.return_value = False
+ gen_mock.generate_certificate_pair.return_value = (False, "fail reason")
+ monkeypatch.setattr(
+ certs,
+ "CertificateGenerator",
+ lambda **kwargs: gen_mock)
+
+ with pytest.raises(RuntimeError) as excinfo:
+ certs.ensure_certificates()
+ assert "Certificate generation failed" in str(excinfo.value)
diff --git a/simulation_bridge/test/unit/test_config_loader.py b/simulation_bridge/test/unit/test_config_loader.py
new file mode 100644
index 0000000..df98e25
--- /dev/null
+++ b/simulation_bridge/test/unit/test_config_loader.py
@@ -0,0 +1,183 @@
+# pylint: disable=redefined-outer-name
+"""
+test_config_loader.py - Unit tests for simulation_bridge.src.utils.config_loader
+
+Tests for configuration loading utilities, focusing on file loading, environment
+variable substitution, error handling, and protocol config loading.
+"""
+
+import io
+import json
+from pathlib import Path
+from unittest import mock
+
+import pytest
+
+from simulation_bridge.src.utils import config_loader
+
+
+@pytest.fixture
+def sample_yaml():
+ """Fixture providing sample YAML content."""
+ return """
+ database:
+ host: localhost
+ port: 5432
+ user: ${DB_USER:defaultuser}
+ password: ${DB_PASS:defaultpass}
+ """
+
+
+@pytest.fixture
+def protocol_json():
+ """Fixture providing sample protocol JSON content."""
+ return {
+ "protocols": [
+ {"name": "proto1", "version": "1.0"},
+ {"name": "proto2", "version": "2.0"},
+ ]
+ }
+
+
+@pytest.fixture
+def logger_mock():
+ """Fixture mocking the logger."""
+ with mock.patch("simulation_bridge.src.utils.config_loader.logger") as m_logger:
+ yield m_logger
+
+
+class TestLoadConfig:
+ """Test cases for load_config function."""
+
+ def test_load_default_config_success(
+ self, sample_yaml, logger_mock, monkeypatch):
+ """Test loading default config from package resources successfully."""
+ mock_file = io.StringIO(sample_yaml)
+ monkeypatch.setattr(
+ "simulation_bridge.src.utils.config_loader.resources.open_text",
+ lambda pkg, fname: mock_file
+ )
+ config = config_loader.load_config()
+ assert config["database"]["host"] == "localhost"
+ assert config["database"]["user"] == "defaultuser"
+ logger_mock.debug.assert_called_with(
+ "Loading default configuration file")
+
+ def test_load_default_config_file_not_found(self, monkeypatch):
+ """Test FileNotFoundError when default config file missing in package."""
+ def raise_file_not_found(pkg, fname):
+ raise FileNotFoundError
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.utils.config_loader.resources.open_text",
+ raise_file_not_found
+ )
+ with pytest.raises(FileNotFoundError, match="Default configuration file not found"):
+ config_loader.load_config()
+
+ def test_load_config_from_path_success(
+ self, sample_yaml, tmp_path, logger_mock):
+ """Test loading config from a specified path successfully."""
+ config_file = tmp_path / "config.yaml"
+ config_file.write_text(sample_yaml, encoding="utf-8")
+
+ config = config_loader.load_config(str(config_file))
+ assert config["database"]["port"] == 5432
+ assert config["database"]["password"] == "defaultpass"
+ logger_mock.debug.assert_called_with(
+ "Loading configuration file from path: %s", str(config_file)
+ )
+
+ def test_load_config_file_not_found(self, tmp_path):
+ """Test FileNotFoundError raised if config file path does not exist."""
+ missing_path = tmp_path / "missing.yaml"
+ with pytest.raises(FileNotFoundError, match="Configuration file not found"):
+ config_loader.load_config(str(missing_path))
+
+
+class TestEnvVarSubstitution:
+ """Test cases for _substitute_env_vars function."""
+
+ # pylint: disable=protected-access
+
+ def test_substitute_env_vars_dict_with_defaults(self, monkeypatch):
+ """Test environment variables substitution with default values in dict."""
+ config = {
+ "path": "/data/${DATA_DIR:default_dir}",
+ "nested": {"user": "${USER_NAME:anonymous}"}
+ }
+ monkeypatch.setenv("USER_NAME", "tester")
+ result = config_loader._substitute_env_vars(config)
+ assert result["path"] == "/data/default_dir"
+ assert result["nested"]["user"] == "tester"
+
+ def test_substitute_env_vars_list(self, monkeypatch):
+ """Test substitution in list items."""
+ config = ["${VAR1:def1}", "${VAR2:def2}", "no_substitution"]
+ monkeypatch.setenv("VAR2", "value2")
+ result = config_loader._substitute_env_vars(config)
+ assert result == ["def1", "value2", "no_substitution"]
+
+ def test_substitute_env_vars_string_no_env(self):
+ """Test string without environment variables remains unchanged."""
+ assert config_loader._substitute_env_vars(
+ "plainstring") == "plainstring"
+
+ def test_substitute_env_vars_partial_env_string(self, monkeypatch):
+ """Test string with embedded environment variable replaced correctly."""
+ monkeypatch.setenv("HOME", "/home/user")
+ value = "Path is ${HOME}/folder"
+ expected = "Path is /home/user/folder"
+ assert config_loader._substitute_env_vars(value) == expected
+
+ def test_substitute_env_vars_empty_default(self):
+ """Test substitution with no default and unset env var results in empty string."""
+ config = "${UNSET_ENV:}"
+ assert config_loader._substitute_env_vars(config) == ""
+
+
+class TestLoadProtocolConfig: # pylint: disable=too-few-public-methods
+ """Test cases for load_protocol_config function."""
+
+ def test_load_protocol_config_success(self, protocol_json):
+ """Test successful loading and parsing of protocol config JSON file."""
+ json_str = json.dumps(protocol_json)
+ mock_open = mock.mock_open(read_data=json_str)
+
+ config_file = Path(config_loader.__file__).parent.parent / \
+ "protocol_adapters/adapters_signal.json"
+
+ with mock.patch("builtins.open", mock_open) as m_open:
+ protocols = config_loader.load_protocol_config()
+
+ m_open.assert_called_once_with(config_file, 'r', encoding='utf-8')
+ assert isinstance(protocols, list)
+ assert protocols == protocol_json["protocols"]
+
+
+class TestErrorHandling:
+ """Test cases for exception and error handling."""
+
+ def test_load_config_handles_yaml_error(self, tmp_path):
+ """Test load_config raises yaml.YAMLError on invalid YAML content."""
+ bad_yaml = "key: value: another" # Invalid YAML syntax
+ file_path = tmp_path / "bad.yaml"
+ file_path.write_text(bad_yaml, encoding="utf-8")
+
+ with pytest.raises(Exception) as exc_info:
+ config_loader.load_config(str(file_path))
+ assert "mapping values are not allowed" in str(exc_info.value) or \
+ "could not find expected ':'" in str(exc_info.value)
+
+ def test_load_config_handles_keyboard_interrupt(self, monkeypatch):
+ """Test load_config propagates KeyboardInterrupt exceptions."""
+ def raise_keyboard_interrupt(*args, **kwargs):
+ raise KeyboardInterrupt
+
+ monkeypatch.setattr(
+ "simulation_bridge.src.utils.config_loader.resources.open_text",
+ raise_keyboard_interrupt
+ )
+
+ with pytest.raises(KeyboardInterrupt):
+ config_loader.load_config()
diff --git a/simulation_bridge/test/unit/test_config_manager.py b/simulation_bridge/test/unit/test_config_manager.py
new file mode 100644
index 0000000..f79e655
--- /dev/null
+++ b/simulation_bridge/test/unit/test_config_manager.py
@@ -0,0 +1,198 @@
+"""Unit tests for simulation_bridge.src.utils.config_manager module."""
+
+from unittest import mock
+
+import pytest
+from pydantic import ValidationError
+
+from simulation_bridge.src.utils import config_manager
+
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name,line-too-long
+
+
+@pytest.fixture
+def sample_valid_config_dict():
+ """Fixture che fornisce un dizionario di configurazione valido di esempio."""
+ return {
+ "simulation_bridge": {"bridge_id": "test_bridge"},
+ "rabbitmq": {
+ "host": "localhost",
+ "port": 5672,
+ "vhost": "/",
+ "username": "guest",
+ "password": "guest",
+ "tls": False,
+ "infrastructure": {
+ "exchanges": [],
+ "queues": [],
+ "bindings": []
+ }
+ },
+ "mqtt": {
+ "host": "mqtt.local",
+ "port": 1883,
+ "keepalive": 60,
+ "input_topic": "input",
+ "output_topic": "output",
+ "qos": 1,
+ "username": "user",
+ "password": "pass",
+ "tls": False
+ },
+ "rest": {
+ "host": "127.0.0.1",
+ "port": 8000,
+ "endpoint": "/api",
+ "debug": False,
+ "certfile": "/path/to/cert.pem",
+ "keyfile": "/path/to/key.pem"
+ },
+ "logging": {
+ "level": "INFO",
+ "format": "%(message)s",
+ "file": "logfile.log"
+ }
+ }
+
+
+@pytest.fixture
+def logger_mock(monkeypatch):
+ """Fixture che sostituisce il logger con un mock."""
+ logger = mock.Mock()
+ monkeypatch.setattr(config_manager, "logger", logger)
+ return logger
+
+
+@pytest.fixture
+def load_config_mock(monkeypatch):
+ """Fixture che sostituisce load_config con un mock."""
+ patcher = mock.patch(
+ "simulation_bridge.src.utils.config_manager.load_config")
+ yield patcher.start()
+ patcher.stop()
+
+
+class TestConfigManagerInit:
+ """Test per il costruttore di ConfigManager e casi di caricamento config."""
+
+ def test_init_loads_valid_config(
+ self, sample_valid_config_dict, load_config_mock):
+ """Verifica che ConfigManager carichi correttamente una config valida."""
+ load_config_mock.return_value = sample_valid_config_dict
+
+ manager = config_manager.ConfigManager("fake_path.yaml")
+
+ assert manager.config == manager._validate_config(
+ sample_valid_config_dict)
+ load_config_mock.assert_called_once_with(manager.config_path)
+
+ def test_init_fallback_default_on_file_not_found(
+ self, load_config_mock, logger_mock):
+ """Se il file non esiste, deve loggare warning e usare config di default."""
+ load_config_mock.side_effect = FileNotFoundError("file missing")
+
+ manager = config_manager.ConfigManager("missing.yaml")
+
+ logger_mock.warning.assert_called_once()
+ assert manager.config == manager.get_default_config()
+
+ def test_init_fallback_default_on_validation_error(
+ self, load_config_mock, logger_mock):
+ """Se la validazione fallisce, logga errore e usa config di default."""
+ load_config_mock.return_value = {"invalid": "data"}
+
+ with pytest.raises(ValidationError):
+ config_manager.ConfigManager._validate_config(
+ config_manager.ConfigManager, {"invalid": "data"}
+ )
+
+ manager = config_manager.ConfigManager("bad.yaml")
+
+ logger_mock.error.assert_called()
+ assert manager.config == manager.get_default_config()
+
+
+class TestConfigManagerValidate:
+ """Test specifici per il metodo di validazione config."""
+
+ def test_validate_config_returns_validated_dict(
+ self, sample_valid_config_dict):
+ """Verifica che _validate_config converta e ritorni dict validato."""
+ validated = config_manager.ConfigManager._validate_config(
+ config_manager.ConfigManager, sample_valid_config_dict
+ )
+ assert isinstance(validated, dict)
+ assert "simulation_bridge" in validated
+ assert validated["simulation_bridge"]["bridge_id"] == "test_bridge"
+
+ def test_validate_config_raises_on_invalid_data(self):
+ """Verifica che _validate_config lanci ValidationError se i dati sono invalidi."""
+ invalid_data = {"rabbitmq": {"port": "not_an_int"}}
+ with pytest.raises(ValidationError):
+ config_manager.ConfigManager._validate_config(
+ config_manager.ConfigManager, invalid_data
+ )
+
+
+class TestConfigManagerGetters:
+ """Test per i metodi getter di ConfigManager."""
+
+ @pytest.fixture
+ def manager_with_config(self, sample_valid_config_dict, load_config_mock):
+ """Istanzia ConfigManager con config valida mockata."""
+ load_config_mock.return_value = sample_valid_config_dict
+ manager = config_manager.ConfigManager("dummy.yaml")
+ return manager
+
+ def test_get_config_returns_full_config(self, manager_with_config):
+ """get_config deve restituire il dict di configurazione completo."""
+ config = manager_with_config.get_config()
+ assert isinstance(config, dict)
+ assert "rabbitmq" in config
+
+ def test_get_rabbitmq_config_returns_rabbitmq_section(
+ self, manager_with_config):
+ """get_rabbitmq_config ritorna la sezione RabbitMQ."""
+ rabbit = manager_with_config.get_rabbitmq_config()
+ assert rabbit.get("host") == "localhost"
+
+ def test_get_mqtt_config_returns_mqtt_section(self, manager_with_config):
+ """get_mqtt_config ritorna la sezione MQTT."""
+ mqtt = manager_with_config.get_mqtt_config()
+ assert mqtt.get("host") == "mqtt.local"
+
+ def test_get_rest_config_returns_rest_section(self, manager_with_config):
+ """get_rest_config ritorna la sezione REST."""
+ rest = manager_with_config.get_rest_config()
+ assert rest.get("host") == "127.0.0.1"
+
+ def test_get_logging_config_returns_logging_section(
+ self, manager_with_config):
+ """get_logging_config ritorna la sezione Logging."""
+ log = manager_with_config.get_logging_config()
+ assert log.get("level") == "INFO"
+
+
+class TestConfigManagerErrorHandling:
+ """Test su gestione errori inattesi durante l'inizializzazione."""
+
+ def test_init_handles_ioerror_and_uses_default(
+ self, load_config_mock, logger_mock):
+ """IOError causa logging error e uso di configurazione default."""
+ load_config_mock.side_effect = IOError("disk error")
+
+ manager = config_manager.ConfigManager("any.yaml")
+
+ logger_mock.error.assert_called()
+ assert manager.config == manager.get_default_config()
+
+ def test_init_handles_generic_exception_and_uses_default(
+ self, load_config_mock, logger_mock):
+ """Eccezione generica viene gestita con logging e config default."""
+ load_config_mock.side_effect = Exception("unexpected")
+
+ manager = config_manager.ConfigManager("any.yaml")
+
+ logger_mock.error.assert_called()
+ logger_mock.exception.assert_called()
+ assert manager.config == manager.get_default_config()
diff --git a/simulation_bridge/test/unit/test_logger.py b/simulation_bridge/test/unit/test_logger.py
new file mode 100644
index 0000000..902746c
--- /dev/null
+++ b/simulation_bridge/test/unit/test_logger.py
@@ -0,0 +1,121 @@
+"""
+Unit tests for the logger module of the simulation bridge.
+"""
+import logging
+import io
+import sys
+from unittest import mock
+
+import pytest
+
+from simulation_bridge.src.utils import logger
+
+
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name
+
+
+@pytest.fixture(autouse=True)
+def clear_logger_handlers():
+ """Clear handlers from the default logger before each test."""
+ log = logging.getLogger('SIMULATION-BRIDGE')
+ handlers = log.handlers[:]
+ for h in handlers:
+ log.removeHandler(h)
+ yield
+ # Clean up after test as well
+ for h in log.handlers[:]:
+ log.removeHandler(h)
+
+
+@pytest.fixture
+def mock_mkdir(monkeypatch):
+ """Mock Path.mkdir to avoid filesystem writes."""
+ m = mock.Mock()
+ monkeypatch.setattr("simulation_bridge.src.utils.logger.Path.mkdir", m)
+ return m
+
+
+@pytest.fixture
+def mock_rotating_file_handler(monkeypatch):
+ """Mock RotatingFileHandler constructor."""
+ m = mock.Mock()
+ # Patch the RotatingFileHandler constructor inside logger module
+ monkeypatch.setattr(
+ "simulation_bridge.src.utils.logger.RotatingFileHandler", m)
+ return m
+
+
+@pytest.fixture
+def mock_colorlog(monkeypatch):
+ """Mock colorlog.ColoredFormatter."""
+ m = mock.Mock()
+ monkeypatch.setattr(
+ "simulation_bridge.src.utils.logger.colorlog.ColoredFormatter", m)
+ return m
+
+
+@pytest.fixture
+def fake_stdout(monkeypatch):
+ """Replace sys.stdout with a StringIO to avoid io.UnsupportedOperation errors."""
+ fake = io.StringIO()
+ monkeypatch.setattr(sys, "stdout", fake)
+ return fake
+
+
+class TestSetupLoggerFileHandler:
+ """Tests for file handler setup in setup_logger."""
+
+ def test_rotating_file_handler_created_with_correct_args(
+ self, mock_rotating_file_handler
+ ):
+ """Verify RotatingFileHandler is instantiated with expected parameters."""
+ logger.setup_logger(log_file="logs/test.log", enable_console=False)
+ mock_rotating_file_handler.assert_called_once_with(
+ filename="logs/test.log",
+ maxBytes=logger.MAX_LOG_SIZE,
+ backupCount=logger.BACKUP_COUNT,
+ encoding="utf-8",
+ )
+
+
+class TestSetupLoggerBehavior:
+ """General behavior tests for setup_logger."""
+
+ def test_returns_same_logger_instance_if_handlers_exist(self):
+ """If logger already has handlers, setup_logger returns it without changes."""
+ name = "TEST-LOGGER"
+ log = logging.getLogger(name)
+ handler = logging.StreamHandler()
+ log.addHandler(handler)
+
+ returned_logger = logger.setup_logger(name=name)
+ assert returned_logger is log
+ # Clean up added handler to maintain test isolation
+ log.removeHandler(handler)
+
+ def test_logger_level_is_set_correctly(self):
+ """Logger level is set to the provided level."""
+ lvl = logging.WARNING
+ log = logger.setup_logger(level=lvl, enable_console=False)
+ assert log.level == lvl
+
+ def test_file_handler_level_set_to_debug(self, mock_rotating_file_handler):
+ """File handler is set to DEBUG level regardless of logger level."""
+ logger.setup_logger(enable_console=False)
+ fh_instance = mock_rotating_file_handler.return_value
+ fh_instance.setLevel.assert_called_once_with(logging.DEBUG)
+
+
+class TestGetLogger:
+ """Tests for get_logger function."""
+
+ def test_get_logger_returns_logger_instance(self):
+ """get_logger returns a logging.Logger instance."""
+ log = logger.get_logger()
+ assert isinstance(log, logging.Logger)
+
+ def test_get_logger_returns_named_logger(self):
+ """get_logger returns a logger with the correct name."""
+ name = "MYLOGGER"
+ log = logger.get_logger(name=name)
+ assert log.name == name
diff --git a/simulation_bridge/test/unit/test_main.py b/simulation_bridge/test/unit/test_main.py
new file mode 100644
index 0000000..d7f6585
--- /dev/null
+++ b/simulation_bridge/test/unit/test_main.py
@@ -0,0 +1,191 @@
+"""
+Unit tests for the main module of the simulation bridge.
+Tests the command line interface and main functionality.
+"""
+
+from unittest import mock
+import pytest
+
+import simulation_bridge.src.main as main_module
+
+
+@pytest.fixture
+def mock_config_file(tmp_path):
+ """Create a temporary config file for testing."""
+ config_path = tmp_path / "config.yaml"
+ config_path.write_text("logging:\n level: INFO\n file: log.txt")
+ return str(config_path)
+
+
+@pytest.fixture
+def mock_logger():
+ """Create a mock logger for testing."""
+ return mock.Mock()
+
+
+@pytest.fixture
+def mock_config_data():
+ """Create mock configuration data for testing."""
+ return {
+ "logging": {"level": "INFO", "file": "log.txt"}
+ }
+
+
+def test_generate_config_flag(monkeypatch):
+ """Test the --generate-config flag functionality."""
+ generate_mock = mock.Mock()
+ monkeypatch.setattr(main_module, "generate_default_config", generate_mock)
+
+ main_module.main.main(
+ ["--generate-config"], standalone_mode=False
+ ) # click command simulates entrypoint
+
+ generate_mock.assert_called_once()
+
+
+def test_generate_project_flag(monkeypatch):
+ """Test the --generate-project flag functionality."""
+ generate_mock = mock.Mock()
+ monkeypatch.setattr(main_module, "generate_default_project", generate_mock)
+
+ main_module.main.main(
+ ["--generate-project"], standalone_mode=False
+ )
+
+ generate_mock.assert_called_once()
+
+
+def test_main_with_config_file(monkeypatch, mock_config_file): # pylint: disable=redefined-outer-name
+ """Test main function with a specific config file."""
+ run_mock = mock.Mock()
+ monkeypatch.setattr(main_module, "run_bridge", run_mock)
+
+ main_module.main.main(
+ ["--config-file", mock_config_file], standalone_mode=False)
+
+ run_mock.assert_called_once_with(mock_config_file)
+
+
+def test_main_no_config_prints(monkeypatch):
+ """Test main function behavior when no config file is found."""
+ monkeypatch.setattr(main_module.os.path, "exists", lambda _: False)
+
+ print_mock = mock.Mock()
+ monkeypatch.setattr("builtins.print", print_mock)
+
+ main_module.main.main([], standalone_mode=False)
+
+ # Flatten all print args and join them into one string for assertion
+ printed_output = " ".join(
+ str(arg)
+ for call in print_mock.call_args_list
+ for arg in call.args
+ )
+
+ assert "Configuration file config.yaml not found." in printed_output
+
+
+def test_main_fallback_config(monkeypatch, tmp_path):
+ """Test main function with fallback to default config."""
+ default_config_path = tmp_path / "config.yaml"
+ default_config_path.write_text("logging:\n level: INFO\n file: log.txt")
+
+ monkeypatch.setattr(main_module.os.path, "exists", lambda _: True)
+ monkeypatch.setattr(
+ main_module,
+ "CONFIG_FILENAME",
+ str(default_config_path))
+
+ run_mock = mock.Mock()
+ monkeypatch.setattr(main_module, "run_bridge", run_mock)
+
+ main_module.main.main([], standalone_mode=False)
+
+ run_mock.assert_called_once_with(str(default_config_path))
+
+
+def test_run_bridge_success(monkeypatch, mock_config_file, mock_config_data, mock_logger): # pylint: disable=redefined-outer-name
+ """Test successful bridge execution."""
+ mock_bridge = mock.Mock()
+
+ monkeypatch.setattr(main_module, "load_config", lambda _: mock_config_data)
+ monkeypatch.setattr(
+ main_module,
+ "setup_logger",
+ lambda **kwargs: mock_logger)
+ monkeypatch.setattr(
+ main_module,
+ "BridgeOrchestrator",
+ lambda config_path: mock_bridge)
+
+ main_module.run_bridge(mock_config_file)
+
+ mock_logger.debug.assert_called_once()
+ mock_bridge.start.assert_called_once()
+
+
+def test_run_bridge_keyboard_interrupt(monkeypatch, mock_config_file, mock_config_data, mock_logger): # pylint: disable=redefined-outer-name,line-too-long
+ """Test bridge execution with keyboard interrupt."""
+ mock_bridge = mock.Mock()
+ mock_bridge.start.side_effect = KeyboardInterrupt
+
+ monkeypatch.setattr(main_module, "load_config", lambda _: mock_config_data)
+ monkeypatch.setattr(
+ main_module,
+ "setup_logger",
+ lambda **kwargs: mock_logger)
+ monkeypatch.setattr(
+ main_module,
+ "BridgeOrchestrator",
+ lambda config_path: mock_bridge)
+
+ main_module.run_bridge(mock_config_file)
+
+ mock_logger.info.assert_called_with("Stopping application via interrupt")
+ mock_bridge.stop.assert_called_once()
+
+
+def test_run_bridge_os_error(monkeypatch, mock_config_file, mock_config_data, mock_logger): # pylint: disable=redefined-outer-name
+ """Test bridge execution with OS error."""
+ mock_bridge = mock.Mock()
+ mock_bridge.start.side_effect = OSError("disk full")
+
+ monkeypatch.setattr(main_module, "load_config", lambda _: mock_config_data)
+ monkeypatch.setattr(
+ main_module,
+ "setup_logger",
+ lambda **kwargs: mock_logger)
+ monkeypatch.setattr(
+ main_module,
+ "BridgeOrchestrator",
+ lambda config_path: mock_bridge)
+
+ main_module.run_bridge(mock_config_file)
+
+ mock_logger.error.assert_called_with(
+ "OS error: %s", "disk full", exc_info=True)
+ mock_bridge.stop.assert_called_once()
+
+
+def test_run_bridge_value_error(monkeypatch, mock_config_file, mock_config_data, mock_logger): # pylint: disable=redefined-outer-name
+ """Test bridge execution with value error."""
+ mock_bridge = mock.Mock()
+ mock_bridge.start.side_effect = ValueError("invalid format")
+
+ monkeypatch.setattr(main_module, "load_config", lambda _: mock_config_data)
+ monkeypatch.setattr(
+ main_module,
+ "setup_logger",
+ lambda **kwargs: mock_logger)
+ monkeypatch.setattr(
+ main_module,
+ "BridgeOrchestrator",
+ lambda config_path: mock_bridge)
+
+ main_module.run_bridge(mock_config_file)
+
+ mock_logger.error.assert_called_with(
+ "Configuration error: %s",
+ "invalid format",
+ exc_info=True)
+ mock_bridge.stop.assert_called_once()
diff --git a/simulation_bridge/test/unit/test_mqtt_adapter.py b/simulation_bridge/test/unit/test_mqtt_adapter.py
new file mode 100644
index 0000000..7edefa7
--- /dev/null
+++ b/simulation_bridge/test/unit/test_mqtt_adapter.py
@@ -0,0 +1,159 @@
+# simulation_bridge/test/unit/test_mqtt_adapter.py
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name
+
+"""Unit tests for the MQTTAdapter class using pytest and unittest.mock."""
+
+from unittest.mock import MagicMock, patch
+import pytest
+
+from simulation_bridge.src.protocol_adapters.mqtt import mqtt_adapter
+
+
+@pytest.fixture
+def config_mock():
+ """Fixture for MQTT configuration."""
+ return {
+ 'host': 'localhost',
+ 'port': 1883,
+ 'keepalive': 60,
+ 'input_topic': 'test/input',
+ 'output_topic': 'test/output',
+ 'username': 'user',
+ 'password': 'pass',
+ 'qos': 1
+ }
+
+
+@pytest.fixture
+def config_manager_mock(config_mock):
+ """Fixture for mocked ConfigManager."""
+ mock = MagicMock()
+ mock.get_mqtt_config.return_value = config_mock
+ return mock
+
+
+@pytest.fixture
+def adapter(config_manager_mock):
+ """Fixture for MQTTAdapter instance."""
+ with patch('simulation_bridge.src.protocol_adapters.mqtt.mqtt_adapter.mqtt.Client'):
+ return mqtt_adapter.MQTTAdapter(config_manager_mock)
+
+
+class TestMQTTAdapterInitialization: # pylint: disable=too-few-public-methods
+ """Tests for MQTTAdapter initialization."""
+
+ def test_init_sets_client_config(self, config_manager_mock):
+ """Should set username and password for both MQTT clients."""
+ with patch('simulation_bridge.src.protocol_adapters.mqtt.mqtt_adapter.mqtt.Client') as client_mock: # pylint: disable=line-too-long
+ adapter = mqtt_adapter.MQTTAdapter(config_manager_mock)
+ assert client_mock.return_value.username_pw_set.call_count == 2
+ assert adapter.topic == 'test/input'
+ assert adapter._running is False
+
+
+class TestMQTTAdapterOnConnect:
+ """Tests for on_connect behavior."""
+
+ def test_on_connect_success(self, adapter):
+ """Should subscribe to input topic on successful connect."""
+ client = MagicMock()
+ adapter.client = client # internal client used in on_connect
+ adapter.on_connect(client, None, None, rc=0)
+ client.subscribe.assert_called_once_with('test/input')
+
+ def test_on_connect_failure_logs_error(self, adapter, monkeypatch):
+ """Should log error when connection fails."""
+ logger_mock = MagicMock()
+ monkeypatch.setattr(mqtt_adapter, 'logger', logger_mock)
+ adapter.on_connect(MagicMock(), None, None, rc=1)
+ logger_mock.error.assert_called_once()
+
+
+class TestMQTTAdapterOnDisconnect:
+ """Tests for on_disconnect behavior."""
+
+ def test_on_disconnect_clean(self, adapter, monkeypatch):
+ """Should log clean disconnect."""
+ logger_mock = MagicMock()
+ monkeypatch.setattr(mqtt_adapter, 'logger', logger_mock)
+ adapter.on_disconnect(MagicMock(), None, rc=0)
+ logger_mock.debug.assert_called_once()
+
+ def test_on_disconnect_unexpected(self, adapter, monkeypatch):
+ """Should log unexpected disconnect."""
+ logger_mock = MagicMock()
+ monkeypatch.setattr(mqtt_adapter, 'logger', logger_mock)
+ adapter.on_disconnect(MagicMock(), None, rc=1)
+ logger_mock.warning.assert_called_once()
+
+
+class TestMQTTAdapterOnMessage:
+ """Tests for message handling."""
+
+ def test_on_message_valid_yaml(self, adapter, monkeypatch):
+ """Should handle valid YAML message."""
+ send_mock = MagicMock()
+ signal_mock = MagicMock(return_value=MagicMock(send=send_mock))
+ monkeypatch.setattr(mqtt_adapter, 'signal', signal_mock)
+
+ payload = b"simulation:\n client_id: test_client\n simulator: test_sim"
+ msg = MagicMock(payload=payload)
+
+ adapter.on_message(None, None, msg)
+
+ send_mock.assert_called_once()
+
+ def test_on_message_invalid_payload(self, adapter, monkeypatch):
+ """Should fallback to raw message on invalid JSON/YAML."""
+ send_mock = MagicMock()
+ signal_mock = MagicMock(return_value=MagicMock(send=send_mock))
+ monkeypatch.setattr(mqtt_adapter, 'signal', signal_mock)
+
+ msg = MagicMock(payload=b"not a valid format: }")
+
+ adapter.on_message(None, None, msg)
+
+ send_mock.assert_called_once()
+
+
+class TestMQTTAdapterStartStop:
+ """Tests for adapter start and stop behavior."""
+
+ def test_start_creates_thread(self, adapter, monkeypatch):
+ """Should start a client thread."""
+ thread_mock = MagicMock()
+ monkeypatch.setattr(
+ mqtt_adapter.threading,
+ 'Thread',
+ lambda **kwargs: thread_mock)
+ adapter._run_client = MagicMock()
+
+ adapter.start()
+ assert adapter._running is True
+ thread_mock.start.assert_called_once()
+
+ def test_stop_disconnects_client(self, adapter):
+ """Should disconnect client on stop."""
+ client_mock = MagicMock()
+ adapter.client = client_mock
+ adapter.stop()
+ client_mock.disconnect.assert_called_once()
+
+
+class TestMQTTAdapterPublish:
+ """Tests for sending messages."""
+
+ def test_send_result_success(self, adapter):
+ """Should publish message to output topic."""
+ mqtt_client = MagicMock()
+ adapter.mqtt_client = mqtt_client
+ message = {'test': 'value'}
+
+ adapter.send_result(message)
+ mqtt_client.publish.assert_called_once()
+
+ def test_publish_result_message_mqtt(self, adapter):
+ """Should call send_result with message from kwargs."""
+ adapter.send_result = MagicMock()
+ adapter.publish_result_message_mqtt(None, message={'a': 1})
+ adapter.send_result.assert_called_once_with({'a': 1})
diff --git a/simulation_bridge/test/unit/test_mqtt_client.py b/simulation_bridge/test/unit/test_mqtt_client.py
new file mode 100644
index 0000000..cf320b6
--- /dev/null
+++ b/simulation_bridge/test/unit/test_mqtt_client.py
@@ -0,0 +1,139 @@
+"""Test suite for MQTT Client using pytest and unittest.mock."""
+
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name
+
+import json
+from unittest.mock import MagicMock
+import pytest
+from simulation_bridge.resources.mqtt import mqtt_client
+
+
+@pytest.fixture
+def mock_config():
+ """Fixture to provide mock MQTT configuration."""
+ return {
+ 'mqtt': {
+ 'host': 'localhost',
+ 'port': 1883,
+ 'keepalive': 60,
+ 'input_topic': 'test/input',
+ 'output_topic': 'test/output',
+ 'username': 'user',
+ 'password': 'pass',
+ 'qos': 1
+ },
+ 'payload_file': 'test_payload.yaml'
+ }
+
+
+@pytest.fixture
+def client(mock_config):
+ """Fixture to create an MQTTClient instance."""
+ return mqtt_client.MQTTClient(mock_config)
+
+
+class TestLoadConfig:
+ """Tests for load_config function."""
+
+ def test_load_valid_config(self, tmp_path, monkeypatch):
+ """Test loading a valid YAML config."""
+ config_path = tmp_path / "config.yaml"
+ config_path.write_text("mqtt:\n host: test\n", encoding='utf-8')
+
+ config = mqtt_client.load_config(str(config_path))
+ assert config["mqtt"]["host"] == "test"
+
+ def test_file_not_found_exits(self, capsys):
+ """Test SystemExit is raised when config file is missing."""
+ with pytest.raises(SystemExit):
+ mqtt_client.load_config("missing.yaml")
+ out, _ = capsys.readouterr()
+ assert "Configuration file 'missing.yaml' not found." in out
+
+ def test_yaml_error_exits(self, tmp_path, capsys):
+ """Test SystemExit is raised on invalid YAML content."""
+ config_path = tmp_path / "invalid.yaml"
+ config_path.write_text("::: invalid :::", encoding="utf-8")
+
+ with pytest.raises(SystemExit):
+ mqtt_client.load_config(str(config_path))
+ out, _ = capsys.readouterr()
+ assert "Error parsing YAML file:" in out
+
+
+class TestMQTTClientInit: # pylint: disable=too-few-public-methods
+ """Tests for MQTTClient initialization."""
+
+ def test_initializes_client_attributes(self, mock_config):
+ """Test that MQTTClient sets config and callbacks correctly."""
+ client = mqtt_client.MQTTClient(mock_config)
+ assert client.config['host'] == 'localhost'
+ assert client.payload_file == 'test_payload.yaml'
+ assert client.client.on_message == client.on_message # pylint: disable=comparison-with-callable
+
+
+class TestCreateRequest:
+ """Tests for the create_request method."""
+
+ def test_loads_payload_successfully(
+ self, tmp_path, mock_config, monkeypatch):
+ """Test create_request loads a YAML payload correctly."""
+ payload_content = {'a': 1}
+ payload_path = tmp_path / "test_payload.yaml"
+ payload_path.write_text("a: 1", encoding="utf-8")
+
+ monkeypatch.setattr(mqtt_client, '__file__', str(payload_path))
+ mock_config['payload_file'] = 'test_payload.yaml'
+
+ client = mqtt_client.MQTTClient(mock_config)
+ result = client.create_request()
+ assert result == payload_content
+
+ def test_payload_file_missing_raises_exit(self, monkeypatch, mock_config):
+ """Test SystemExit when payload file is missing or unreadable."""
+ mock_config['payload_file'] = 'nonexistent.yaml'
+ monkeypatch.setattr(mqtt_client, '__file__', '/fake/path/client.py')
+
+ client = mqtt_client.MQTTClient(mock_config)
+ with pytest.raises(SystemExit):
+ client.create_request()
+
+
+class TestConnectAndListen: # pylint: disable=too-few-public-methods
+ """Tests for connect_and_listen method."""
+
+ def test_connects_and_publishes(self, client, monkeypatch):
+ """Test MQTT client connects, subscribes, publishes and loops."""
+ fake_payload = {"data": 123}
+ client.create_request = MagicMock(return_value=fake_payload)
+
+ client.client.connect = MagicMock()
+ client.client.subscribe = MagicMock()
+ client.client.publish = MagicMock()
+ client.client.loop_forever = MagicMock()
+
+ client.connect_and_listen()
+
+ client.client.connect.assert_called_once_with('localhost', 1883, 60)
+ client.client.subscribe.assert_called_once_with('test/output', qos=1)
+ client.client.publish.assert_called_once_with(
+ 'test/input', json.dumps(fake_payload), qos=1
+ )
+ client.client.loop_forever.assert_called_once()
+
+
+class TestOnMessage: # pylint: disable=too-few-public-methods
+ """Tests for on_message callback."""
+
+ def test_prints_received_message(self, capsys, client):
+ """Test that on_message prints topic and decoded payload."""
+ msg = MagicMock()
+ msg.topic = "some/topic"
+ msg.payload = b'{"value": 42}'
+
+ client.on_message(None, None, msg)
+ out, _ = capsys.readouterr()
+
+ assert "Message received:" in out
+ assert "some/topic" in out
+ assert '{"value": 42}' in out
diff --git a/simulation_bridge/test/unit/test_rabbitmq_adapter.py b/simulation_bridge/test/unit/test_rabbitmq_adapter.py
new file mode 100644
index 0000000..065ed69
--- /dev/null
+++ b/simulation_bridge/test/unit/test_rabbitmq_adapter.py
@@ -0,0 +1,270 @@
+"""Test suite for rabbitmq_adapter.py using pytest and unittest.mock."""
+
+# pylint: disable=redefined-outer-name,unused-argument,protected-access
+
+from unittest import mock
+
+import json
+import pytest
+
+from simulation_bridge.src.protocol_adapters.rabbitmq import rabbitmq_adapter
+
+
+@pytest.fixture
+def config_manager_mock():
+ """Mocked ConfigManager providing RabbitMQ configuration."""
+ mock_cfg = mock.MagicMock()
+ mock_cfg.get_rabbitmq_config.return_value = {
+ 'username': 'user',
+ 'password': 'pass',
+ 'host': 'localhost',
+ 'port': 5672,
+ 'vhost': '/',
+ 'infrastructure': {'queues': [{'name': 'Q.bridge.input'}, {'name': 'Q.bridge.result'}]}
+ }
+ return mock_cfg
+
+
+@pytest.fixture
+def pika_connection_mock(monkeypatch):
+ """Patch pika.BlockingConnection and channel for RabbitMQ connection."""
+ mock_channel = mock.MagicMock()
+ mock_conn = mock.MagicMock()
+ mock_conn.channel.return_value = mock_channel
+
+ monkeypatch.setattr(
+ rabbitmq_adapter.pika,
+ "BlockingConnection",
+ lambda params: mock_conn)
+ monkeypatch.setattr(
+ rabbitmq_adapter.pika,
+ "PlainCredentials",
+ lambda u,
+ p: None)
+ monkeypatch.setattr(
+ rabbitmq_adapter.pika,
+ "ConnectionParameters",
+ lambda **kwargs: None)
+ return mock_conn, mock_channel
+
+
+class TestRabbitMQAdapterInit:
+ """Tests for RabbitMQAdapter initialization and queue subscription."""
+
+ def test_init_subscribes_to_configured_queues(
+ self, config_manager_mock, pika_connection_mock):
+ """RabbitMQAdapter should subscribe to queues defined in config."""
+ _conn_mock, chan_mock = pika_connection_mock
+ adapter = rabbitmq_adapter.RabbitMQAdapter( # pylint: disable=unused-variable
+ config_manager_mock)
+ # Should call basic_consume for each queue
+ assert chan_mock.basic_consume.call_count == 2
+ calls = [call.kwargs['queue']
+ for call in chan_mock.basic_consume.call_args_list]
+ assert 'Q.bridge.input' in calls
+ assert 'Q.bridge.result' in calls
+
+ def test_init_logger_debug_called(
+ self, config_manager_mock, pika_connection_mock):
+ """Initialization logs debug messages."""
+ with mock.patch.object(rabbitmq_adapter.logger, "debug") as log_debug:
+ rabbitmq_adapter.RabbitMQAdapter(config_manager_mock)
+ log_debug.assert_any_call("RabbitMQ adapter initialized")
+ log_debug.assert_any_call(
+ "RabbitMQ adapter initialized and subscribed to queues")
+
+
+class TestProcessMessage:
+ """Tests for the _process_message method handling incoming messages."""
+
+ @pytest.fixture
+ def adapter(self, config_manager_mock, pika_connection_mock):
+ """Instantiate RabbitMQAdapter for tests."""
+ return rabbitmq_adapter.RabbitMQAdapter(config_manager_mock)
+
+ # pylint: disable=protected-access
+ def test_process_message_yaml_success(self, adapter):
+ """Process YAML message correctly and ack message."""
+ ch = mock.MagicMock()
+ method = mock.MagicMock()
+ body = b"simulation:\n client_id: test\n simulator: sim"
+ adapter._process_message(ch, method, None, body, 'Q.bridge.input')
+ ch.basic_ack.assert_called_once_with(delivery_tag=method.delivery_tag)
+
+ def test_process_message_json_success(self, adapter):
+ """Process JSON message correctly and ack message."""
+ ch = mock.MagicMock()
+ method = mock.MagicMock()
+ body = json.dumps(
+ {"simulation": {"client_id": "client", "simulator": "sim"}}).encode()
+ adapter._process_message(ch, method, None, body, 'Q.bridge.input')
+ ch.basic_ack.assert_called_once_with(delivery_tag=method.delivery_tag)
+
+ def test_process_message_raw_fallback(self, adapter):
+ """Process raw message fallback and ack message."""
+ ch = mock.MagicMock()
+ method = mock.MagicMock()
+ # Unparsable YAML/JSON
+ body = b"not: valid: yaml"
+ adapter._process_message(ch, method, None, body, 'Q.bridge.input')
+ ch.basic_ack.assert_called_once_with(delivery_tag=method.delivery_tag)
+
+ def test_process_message_non_dict_raises_nack(self, adapter):
+ """Non-dict message triggers nack and error log."""
+ ch = mock.MagicMock()
+ method = mock.MagicMock()
+ body = b"[]"
+ with mock.patch.object(rabbitmq_adapter.logger, "error") as log_error:
+ adapter._process_message(ch, method, None, body, 'Q.bridge.input')
+ ch.basic_nack.assert_called_once_with(
+ delivery_tag=method.delivery_tag, requeue=False)
+ log_error.assert_called_once()
+
+ def test_process_message_bridge_meta_malformed_json_logs_warning(
+ self, adapter):
+ """Malformed JSON in bridge_meta logs warning but does not raise."""
+ ch = mock.MagicMock()
+ method = mock.MagicMock()
+ msg = {
+ "simulation": {},
+ "bridge_meta": "{not:json}"
+ }
+ body = json.dumps(msg).encode()
+ with mock.patch.object(rabbitmq_adapter.logger, "warning") as log_warn:
+ adapter._process_message(ch, method, None, body, 'Q.bridge.result')
+ ch.basic_ack.assert_called_once_with(
+ delivery_tag=method.delivery_tag)
+ log_warn.assert_called_once()
+
+ def test_process_message_unknown_queue_does_not_send_signal(self, adapter):
+ """Messages from unknown queue do not send any signal but ack."""
+ ch = mock.MagicMock()
+ method = mock.MagicMock()
+ body = b'{"simulation": {}}'
+ with mock.patch("blinker.signal") as mock_signal:
+ adapter._process_message(ch, method, None, body, 'unknown_queue')
+ # Signal should not be called with None or any
+ assert mock_signal.call_count in (0, 1)
+ ch.basic_ack.assert_called_once_with(
+ delivery_tag=method.delivery_tag)
+
+
+class TestRunConsumer:
+ """Tests for _run_consumer method running the consumer thread."""
+
+ @pytest.fixture
+ def adapter(self, config_manager_mock, pika_connection_mock):
+ """Adapter instance for consumer tests."""
+ return rabbitmq_adapter.RabbitMQAdapter(config_manager_mock)
+
+ # pylint: disable=protected-access
+ def test_run_consumer_starts_and_sets_running_flag(self, adapter):
+ """_run_consumer sets _running True and calls start_consuming."""
+ adapter.channel.start_consuming = mock.MagicMock()
+ adapter._run_consumer()
+ assert not adapter._running # Should be False after finishing
+ adapter.channel.start_consuming.assert_called_once()
+
+ def test_run_consumer_logs_error_on_exception(self, adapter):
+ """Logs error if start_consuming raises exception while running."""
+ adapter._running = True
+ adapter.channel.start_consuming = mock.Mock(
+ side_effect=RuntimeError("fail"))
+ with mock.patch.object(rabbitmq_adapter.logger, "error") as log_error:
+ adapter._run_consumer()
+ log_error.assert_called_once()
+
+
+class TestStartStopAdapter:
+ """Tests for start and stop lifecycle methods of RabbitMQAdapter."""
+
+ @pytest.fixture
+ def adapter(self, config_manager_mock, pika_connection_mock):
+ """Adapter instance for lifecycle tests."""
+ return rabbitmq_adapter.RabbitMQAdapter(config_manager_mock)
+
+ def test_start_creates_and_starts_thread(self, adapter):
+ """Start method creates and starts consumer thread."""
+ with mock.patch("threading.Thread") as thread_mock:
+ thread_inst = mock.Mock()
+ thread_mock.return_value = thread_inst
+ adapter.start()
+ thread_mock.assert_called_once()
+ thread_inst.start.assert_called_once()
+
+ def test_start_logs_and_raises_on_exception(self, adapter):
+ """Start logs error and raises if thread creation fails."""
+ with mock.patch("threading.Thread", side_effect=RuntimeError("fail")), \
+ mock.patch.object(rabbitmq_adapter.logger, "error") as log_error, \
+ mock.patch.object(adapter, "stop") as stop_mock:
+ with pytest.raises(RuntimeError):
+ adapter.start()
+ log_error.assert_called_once()
+ stop_mock.assert_called_once()
+
+ def test_stop_stops_consuming_and_closes_connection(self, adapter):
+ """Stop schedules stop_consuming, joins thread and closes connection."""
+ adapter._running = True
+ adapter.channel.is_open = True
+ adapter.connection.is_open = True
+ adapter._consumer_thread = mock.Mock(
+ is_alive=mock.Mock(return_value=True))
+ adapter.connection.add_callback_threadsafe = mock.Mock()
+ adapter.connection.close = mock.Mock()
+
+ adapter.stop()
+
+ adapter.connection.add_callback_threadsafe.assert_called_once()
+ adapter._consumer_thread.join.assert_called_once_with(timeout=5)
+ adapter.connection.close.assert_called_once()
+
+ def test_stop_handles_exceptions_gracefully(self, adapter):
+ """Stop method handles exceptions without raising."""
+ adapter.channel = mock.Mock(is_open=True)
+ adapter.connection = mock.Mock(is_open=True)
+ adapter.connection.add_callback_threadsafe = mock.Mock(
+ side_effect=Exception("fail"))
+ adapter._consumer_thread = mock.Mock()
+ adapter._consumer_thread.is_alive = mock.Mock(return_value=True)
+ adapter._consumer_thread.join = mock.Mock(side_effect=Exception("fail"))
+ adapter.connection.close = mock.Mock(side_effect=Exception("fail"))
+
+ with mock.patch.object(rabbitmq_adapter.logger, "warning") as log_warn, \
+ mock.patch.object(rabbitmq_adapter.logger, "error") as log_error:
+ adapter.stop()
+ assert log_warn.call_count >= 1
+ # error log might be called due to add_callback_threadsafe
+ assert log_error.call_count >= 1
+
+
+class TestHandleMessageAndStartAdapter:
+ """Tests for _handle_message and _start_adapter methods."""
+
+ @pytest.fixture
+ def adapter(self, config_manager_mock, pika_connection_mock):
+ """RabbitMQAdapter instance for handle/start adapter tests."""
+ return rabbitmq_adapter.RabbitMQAdapter(config_manager_mock)
+
+ # pylint: disable=protected-access
+ def test_handle_message_calls_process_message(self, adapter):
+ """_handle_message calls _process_message with expected args."""
+ with mock.patch.object(adapter, "_process_message") as process_mock:
+ msg = {"some": "data"}
+ adapter._handle_message(msg)
+ process_mock.assert_called_once_with(
+ None, None, None, msg, 'Q.bridge.input')
+
+ def test_start_adapter_starts_consuming(self, adapter):
+ """_start_adapter calls channel.start_consuming and handles exceptions."""
+ adapter.channel.start_consuming = mock.Mock()
+ adapter._start_adapter()
+ adapter.channel.start_consuming.assert_called_once()
+
+ def test_start_adapter_logs_error_on_exception(self, adapter):
+ """_start_adapter logs error and raises if start_consuming fails."""
+ adapter.channel.start_consuming = mock.Mock(
+ side_effect=RuntimeError("fail"))
+ with mock.patch.object(rabbitmq_adapter.logger, "error") as log_error:
+ with pytest.raises(RuntimeError):
+ adapter._start_adapter()
+ log_error.assert_called_once()
diff --git a/simulation_bridge/test/unit/test_rabbitmq_client.py b/simulation_bridge/test/unit/test_rabbitmq_client.py
new file mode 100644
index 0000000..6baacdb
--- /dev/null
+++ b/simulation_bridge/test/unit/test_rabbitmq_client.py
@@ -0,0 +1,230 @@
+"""Tests for RabbitMQ client in simulation bridge."""
+
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name
+# pylint: disable=too-many-positional-arguments
+from unittest.mock import MagicMock
+from unittest import mock
+import pytest
+
+from simulation_bridge.resources.rabbitmq import rabbitmq_client
+
+
+@pytest.fixture
+def mock_config():
+ """Mock configuration dictionary for RabbitMQClient."""
+ return {
+ 'rabbitmq':
+ {
+ 'host': 'localhost',
+ 'port': 5672,
+ 'vhost': '/',
+ 'username': 'guest',
+ 'password': 'guest'
+ },
+ 'digital_twin': {
+ 'dt_id': 'dt123',
+ 'routing_key_send': 'dt.send'
+ },
+ 'exchanges': {
+ 'input_bridge': {
+ 'name': 'input_ex',
+ 'type': 'direct',
+ 'durable': True
+ },
+ 'bridge_result': {
+ 'name': 'result_ex',
+ 'type': 'fanout',
+ 'durable': True
+ }
+ },
+ 'queue': {
+ 'result_queue_prefix': 'bridge',
+ 'durable': True,
+ 'routing_key': 'dt.result'
+ },
+ 'payload_file': 'payload.yaml'
+ }
+
+
+@pytest.fixture
+def mock_channel():
+ """Mock channel object."""
+ return MagicMock()
+
+
+@pytest.fixture
+def mock_connection(mock_channel):
+ """Mock pika connection with mock channel."""
+ conn = MagicMock()
+ conn.channel.return_value = mock_channel
+ return conn
+
+
+@pytest.fixture
+def mock_pika(monkeypatch, mock_connection):
+ """Patch pika.BlockingConnection to return mock connection and avoid real connection errors."""
+ monkeypatch.setattr(
+ rabbitmq_client.pika,
+ "BlockingConnection",
+ lambda params: mock_connection
+ )
+
+
+class TestRabbitMQClientInitialization: # pylint: disable=too-few-public-methods
+ """Test RabbitMQClient initialization and infrastructure setup."""
+
+ def test_initialization_sets_up_infrastructure(
+ self, mock_config, mock_pika, mock_channel):
+ """Ensure __init__ sets up exchanges, queues, and bindings correctly."""
+ client = rabbitmq_client.RabbitMQClient(mock_config)
+
+ assert client.dt_id == 'dt123'
+ mock_channel.exchange_declare.assert_any_call(
+ exchange='input_ex', exchange_type='direct', durable=True)
+ mock_channel.exchange_declare.assert_any_call(
+ exchange='result_ex', exchange_type='fanout', durable=True)
+ mock_channel.queue_declare.assert_called_once()
+ mock_channel.queue_bind.assert_called_once()
+
+
+class TestSendSimulationRequest: # pylint: disable=too-few-public-methods
+ """Test the send_simulation_request method."""
+
+ def test_send_simulation_request_calls_basic_publish(
+ self, mock_config, mock_pika, mock_channel
+ ):
+ """Check that simulation request triggers a call to basic_publish."""
+ client = rabbitmq_client.RabbitMQClient(mock_config)
+
+ payload = {'temperature': 42}
+ client.send_simulation_request(payload)
+
+ mock_channel.basic_publish.assert_called_once()
+ args, kwargs = mock_channel.basic_publish.call_args # pylint: disable=unused-variable
+ assert kwargs['exchange'] == 'input_ex'
+ assert kwargs['routing_key'] == 'dt.send'
+ assert 'temperature' in kwargs['body']
+
+
+class TestHandleResult:
+ """Test handling of incoming messages."""
+
+ def test_handle_result_acknowledges_valid_yaml(
+ self, mock_config, mock_pika, mock_channel):
+ """Should acknowledge a valid YAML result message."""
+ client = rabbitmq_client.RabbitMQClient(mock_config)
+
+ method = MagicMock()
+ method.routing_key = 'source.service'
+ method.delivery_tag = 10
+ body = b"key: value"
+
+ client.handle_result(mock_channel, method, None, body)
+ mock_channel.basic_ack.assert_called_once_with(10)
+
+ def test_handle_result_handles_yaml_error(
+ self, mock_config, mock_pika, mock_channel):
+ """Should nack message if YAML is invalid."""
+ client = rabbitmq_client.RabbitMQClient(mock_config)
+
+ method = MagicMock()
+ method.routing_key = 'source.service'
+ method.delivery_tag = 11
+ body = b": invalid_yaml"
+
+ client.handle_result(mock_channel, method, None, body)
+ mock_channel.basic_nack.assert_called_once_with(11)
+
+
+class TestStartListening: # pylint: disable=too-few-public-methods
+ """Test listener behavior."""
+
+ def test_start_listening_calls_basic_consume_and_start(self,
+ mock_config,
+ mock_pika,
+ mock_channel):
+ """Ensure basic_consume and start_consuming are called."""
+ client = rabbitmq_client.RabbitMQClient(mock_config)
+ client.start_listening()
+
+ mock_channel.basic_consume.assert_called_once()
+ mock_channel.start_consuming.assert_called_once()
+
+
+class TestYamlLoading: # pylint: disable=too-few-public-methods
+ """Test static YAML loading."""
+
+ def test_load_yaml_file_returns_parsed_data(self, tmp_path):
+ """Ensure load_yaml_file loads and parses YAML content."""
+ file_path = tmp_path / "data.yaml"
+ file_path.write_text("foo: bar", encoding="utf-8")
+
+ result = rabbitmq_client.RabbitMQClient.load_yaml_file(str(file_path))
+ assert result == {"foo": "bar"}
+
+
+class TestLoadConfig:
+ """Test the load_config function."""
+
+ def test_load_config_success(self, tmp_path, monkeypatch):
+ """Should return loaded config when file is valid."""
+ config_path = tmp_path / "rabbitmq_use.yaml"
+ config_path.write_text("a: 1", encoding="utf-8")
+
+ monkeypatch.setattr(rabbitmq_client, "sys", mock.MagicMock())
+ config = rabbitmq_client.load_config(str(config_path))
+ assert config == {"a": 1}
+
+ def test_load_config_file_not_found(self, monkeypatch):
+ """Should exit if config file is missing."""
+ mock_exit = mock.MagicMock()
+ monkeypatch.setattr(
+ rabbitmq_client,
+ "sys",
+ mock.MagicMock(
+ exit=mock_exit))
+
+ rabbitmq_client.load_config("nonexistent.yaml")
+ mock_exit.assert_called_once_with(1)
+
+
+class TestMainFunction: # pylint: disable=too-few-public-methods
+ """Test main function behavior and CLI entry."""
+
+ @mock.patch("simulation_bridge.resources.rabbitmq.rabbitmq_client.RabbitMQClient")
+ def test_main_keyboard_interrupt(
+ self, mock_rmq_client, mock_config, monkeypatch, mock_pika):
+ """Simulate KeyboardInterrupt in main."""
+ monkeypatch.setattr(rabbitmq_client, "load_config", lambda: mock_config)
+ monkeypatch.setattr(
+ rabbitmq_client,
+ "start_dt_listener",
+ lambda config: None)
+ monkeypatch.setattr(
+ rabbitmq_client.time, "sleep", mock.Mock(
+ side_effect=KeyboardInterrupt))
+
+ rabbitmq_client.main()
+
+ mock_rmq_client.assert_called_once_with(mock_config)
+
+ @pytest.mark.parametrize("error_type",
+ [ValueError("fail"), OSError("fail")])
+ @mock.patch("simulation_bridge.resources.rabbitmq.rabbitmq_client.RabbitMQClient")
+ def test_main_unexpected_exceptions(
+ self, mock_rmq_client, mock_config, monkeypatch, mock_pika, error_type):
+ """Ensure main handles unexpected errors gracefully."""
+ instance = mock_rmq_client.return_value
+ instance.load_yaml_file.return_value = {}
+ instance.send_simulation_request.side_effect = error_type
+
+ monkeypatch.setattr(rabbitmq_client, "load_config", lambda: mock_config)
+ monkeypatch.setattr(
+ rabbitmq_client,
+ "start_dt_listener",
+ lambda config: None)
+ monkeypatch.setattr(
+ rabbitmq_client.time, "sleep", mock.Mock(
+ side_effect=KeyboardInterrupt))
+
+ rabbitmq_client.main()
diff --git a/simulation_bridge/test/unit/test_rest_adapter.py b/simulation_bridge/test/unit/test_rest_adapter.py
new file mode 100644
index 0000000..1ad2399
--- /dev/null
+++ b/simulation_bridge/test/unit/test_rest_adapter.py
@@ -0,0 +1,108 @@
+"""
+simulation_bridge/test/unit/test_rest_adapter.py
+"""
+# pylint: disable=protected-access,unused-argument,redefined-outer-name
+import asyncio
+import json
+import warnings
+
+from unittest.mock import MagicMock, AsyncMock
+import pytest
+
+from simulation_bridge.src.protocol_adapters.rest import rest_adapter
+
+warnings.filterwarnings("ignore", category=RuntimeWarning)
+
+
+@pytest.fixture
+def config_mock():
+ """Mock REST config dictionary."""
+ return {
+ 'host': '127.0.0.1',
+ 'port': 5000,
+ 'endpoint': '/stream',
+ 'certfile': None,
+ 'keyfile': None
+ }
+
+
+@pytest.fixture
+def config_manager_mock(config_mock):
+ """Mock config manager returning REST config."""
+ mock = MagicMock()
+ mock.get_rest_config.return_value = config_mock
+ return mock
+
+
+@pytest.fixture
+def adapter(config_manager_mock):
+ """Create RESTAdapter instance with mock config manager."""
+ return rest_adapter.RESTAdapter(config_manager_mock)
+
+
+@pytest.mark.asyncio
+async def test_generate_response_yields_and_cleans_queue(adapter):
+ """Test response generator yields initial status and queued results."""
+
+ queue = asyncio.Queue()
+ producer = "prod_test"
+ adapter._active_streams[producer] = queue
+
+ await queue.put({"result": "ok"})
+
+ gen = adapter._generate_response(producer, queue)
+ first = await gen.asend(None)
+ assert json.loads(first)['status'] == 'processing'
+
+ second = await gen.asend(None)
+ assert json.loads(second)['result'] == 'ok'
+
+ await gen.aclose()
+ assert producer not in adapter._active_streams
+
+
+@pytest.mark.asyncio
+async def test_send_result_puts_message_in_queue(adapter):
+ """Test send_result puts a message into the correct queue."""
+
+ queue = asyncio.Queue()
+ producer = 'client1'
+ adapter._active_streams[producer] = queue
+
+ result = {'data': 123}
+ await adapter.send_result(producer, result)
+ received = await queue.get()
+ assert received == result
+
+
+@pytest.mark.asyncio
+async def test_send_result_warns_when_no_active_stream(adapter, caplog):
+ """Test send_result logs warning if no active stream found."""
+
+ await adapter.send_result('nonexistent', {'x': 1})
+ assert 'No active stream found' in caplog.text
+
+
+def test_start_calls_asyncio_run(monkeypatch, adapter):
+ """Test start calls asyncio.run and sets running flag."""
+
+ async def fake_start():
+ return None
+
+ monkeypatch.setattr(adapter, '_start_server', fake_start)
+ monkeypatch.setattr('asyncio.run', lambda coro: asyncio.get_event_loop(
+ ).run_until_complete(coro)) # pylint: disable=line-too-long
+ adapter._running = False
+ adapter.start()
+ assert adapter._running is True
+
+
+@pytest.mark.asyncio
+async def test_publish_result_message_rest_calls_send_result_sync(
+ monkeypatch, adapter):
+ """Test that publish_result_message_rest calls send_result_sync correctly."""
+
+ monkeypatch.setattr(adapter, 'send_result_sync', AsyncMock())
+ msg = {'destinations': ['dest1']}
+ adapter.publish_result_message_rest(None, message=msg)
+ adapter.send_result_sync.assert_called_once_with('dest1', msg)
diff --git a/simulation_bridge/test/unit/test_signal_manager.py b/simulation_bridge/test/unit/test_signal_manager.py
new file mode 100644
index 0000000..9486434
--- /dev/null
+++ b/simulation_bridge/test/unit/test_signal_manager.py
@@ -0,0 +1,157 @@
+"""
+Unit tests for the SignalManager module.
+"""
+
+from unittest import mock
+import pytest
+import blinker
+
+from simulation_bridge.src.utils import signal_manager
+
+# pylint: disable=too-many-arguments,unused-argument,protected-access,redefined-outer-name
+
+pytestmark = pytest.mark.usefixtures("config_override")
+
+
+@pytest.fixture(autouse=True)
+def config_override(monkeypatch):
+ """Override PROTOCOL_CONFIG for isolated tests"""
+ test_config = {
+ "proto_a": {"enabled": True, "signals": {"sig_x": "BridgeCore.handle_x"}},
+ "proto_b": {"enabled": False, "signals": {"sig_y": "Adapter.do_y"}},
+ }
+ monkeypatch.setattr(
+ signal_manager.SignalManager,
+ 'PROTOCOL_CONFIG',
+ test_config)
+ monkeypatch.setattr(signal_manager.SignalManager, '_adapter_instances', {})
+ monkeypatch.setattr(
+ signal_manager.SignalManager,
+ '_bridge_core_instance',
+ None)
+ yield
+
+
+@pytest.fixture
+def bridge_core(monkeypatch):
+ """Provide and set a mock BridgeCore instance"""
+ core = mock.Mock()
+ monkeypatch.setattr(
+ signal_manager.SignalManager,
+ '_bridge_core_instance',
+ core)
+ return core
+
+
+@pytest.fixture
+def adapter(monkeypatch):
+ """Provide a mock adapter instance and registry"""
+ adapter_inst = mock.Mock()
+ adapter_inst.do_y = mock.Mock()
+ monkeypatch.setattr(signal_manager.SignalManager, '_adapter_instances', {})
+ return adapter_inst
+
+
+class TestSignalQueries:
+ """Tests for querying available and enabled protocols and signals"""
+
+ def test_get_available_signals_returns_list(self):
+ """Validate fetching signals for a valid protocol"""
+ result = signal_manager.SignalManager.get_available_signals('proto_a')
+ assert result == ['sig_x']
+
+ def test_get_available_signals_empty_for_unknown(self):
+ """Validate empty result for unknown protocol"""
+ result = signal_manager.SignalManager.get_available_signals('unknown')
+ assert not result
+
+ def test_get_enabled_protocols_filters_disabled(self):
+ """Ensure only enabled protocols are returned"""
+ result = signal_manager.SignalManager.get_enabled_protocols()
+ assert result == ['proto_a']
+
+ @pytest.mark.parametrize(
+ 'proto, expected', [
+ ('proto_a', True),
+ ('proto_b', False),
+ ('unknown', False),
+ ]
+ )
+ def test_is_protocol_enabled(self, proto, expected):
+ """Check enabled status across various protocols"""
+ assert signal_manager.SignalManager.is_protocol_enabled(
+ proto) is expected
+
+
+class TestResolveCallback:
+ """Tests for internal callback resolution logic"""
+
+ def test_resolve_without_dot_returns_none(self):
+ """Invalid func_path without dot yields None"""
+ assert signal_manager.SignalManager._resolve_callback(
+ 'invalidpath', 'proto_a') is None
+
+ def test_resolve_bridgecore_no_instance_logs_error(self, caplog):
+ """Logging on missing BridgeCore instance"""
+ caplog.set_level('ERROR')
+ cb = signal_manager.SignalManager._resolve_callback(
+ 'BridgeCore.handle_x', 'proto_a')
+ assert cb is None
+ assert 'BridgeCore instance not set' in caplog.text
+
+ def test_resolve_bridgecore_with_instance(self, bridge_core):
+ """Successful resolution on BridgeCore instance"""
+ bridge_core.handle_x = mock.Mock()
+ cb = signal_manager.SignalManager._resolve_callback(
+ 'BridgeCore.handle_x', 'proto_a')
+ assert cb == bridge_core.handle_x
+
+ def test_resolve_adapter_method(self, adapter):
+ """Resolution of registered adapter methods"""
+ signal_manager.SignalManager.register_adapter_instance(
+ 'proto_b', adapter)
+ cb = signal_manager.SignalManager._resolve_callback(
+ 'Adapter.do_y', 'proto_b')
+ assert cb == adapter.do_y
+
+ def test_resolve_no_adapter_logs_warning(self, caplog):
+ """Logging on missing adapter registration"""
+ caplog.set_level('WARNING')
+ cb = signal_manager.SignalManager._resolve_callback(
+ 'Adapter.do_y', 'proto_b')
+ assert cb is None
+ assert 'No adapter instance registered' in caplog.text
+
+
+class TestSignalConnections:
+ """Tests for connecting and disconnecting signals"""
+
+ @pytest.fixture(autouse=True)
+ def setup_instances(self, bridge_core, adapter):
+ """Register instances before testing signal connections"""
+ signal_manager.SignalManager.register_adapter_instance(
+ 'proto_b', adapter)
+ yield
+
+ def test_connect_all_signals_success(self, bridge_core):
+ """Connect and emit signals for enabled protocols"""
+ sig = blinker.signal('sig_x')
+ # Remove any existing receivers
+ for receiver in list(sig.receivers.values()):
+ sig.disconnect(receiver)
+
+ signal_manager.SignalManager.connect_all_signals()
+ sig.send()
+ bridge_core.handle_x.assert_called_once()
+
+ def test_disconnect_all_signals(self, bridge_core):
+ """Disconnect previously connected signals"""
+ sig = blinker.signal('sig_x')
+ for receiver in list(sig.receivers.values()):
+ sig.disconnect(receiver)
+ signal_manager.SignalManager.connect_all_signals()
+
+ signal_manager.SignalManager.disconnect_all_signals()
+ bridge_core.handle_x.reset_mock()
+ sig.send()
+ bridge_core.handle_x.assert_not_called()
diff --git a/simulation_bridge/test/unit/test_template.py b/simulation_bridge/test/unit/test_template.py
new file mode 100644
index 0000000..98d19c2
--- /dev/null
+++ b/simulation_bridge/test/unit/test_template.py
@@ -0,0 +1,220 @@
+"""Unit tests for simulation_bridge.src.utils.template module."""
+
+from unittest import mock
+import pytest
+import simulation_bridge.src.utils.template as template_module
+
+
+@pytest.fixture
+def dummy_path(tmp_path):
+ """Provide a dummy filesystem path for tests."""
+ return tmp_path / "dummy_file"
+
+
+@pytest.fixture
+def mock_copy_resource_fixture():
+ """Mock copy_resource function to control its behavior."""
+ with mock.patch("simulation_bridge.src.utils.template.copy_resource") as m:
+ yield m
+
+
+@pytest.fixture
+def mock_print_fixture(monkeypatch):
+ """Capture print calls to verify output."""
+ mock_print_obj = mock.Mock()
+ monkeypatch.setattr("builtins.print", mock_print_obj)
+ return mock_print_obj
+
+
+class TestCreateDirectory: # pylint: disable=too-few-public-methods
+ """Test cases for create_directory function."""
+
+ def test_create_directory_created_and_skipped_and_error(self, monkeypatch,
+ mock_print_fixture): # pylint: disable=W0621
+ """Test create_directory status for existing, created and error cases."""
+ mock_print_fixture.reset_mock()
+
+ # Existing directory triggers 'skipped'
+ monkeypatch.setattr("os.path.exists", lambda path: True)
+ status, error = template_module.create_directory("dir", "anypath")
+ assert status == "skipped"
+ assert error is None
+ mock_print_fixture.assert_called_once()
+
+ mock_print_fixture.reset_mock()
+
+ # Successful creation triggers 'created'
+ monkeypatch.setattr("os.path.exists", lambda path: False)
+ monkeypatch.setattr("os.makedirs", lambda path, exist_ok: None)
+ status, error = template_module.create_directory("dir", "anypath")
+ assert status == "created"
+ assert error is None
+
+ # OSError triggers 'error'
+ def raise_oserror(*args, **kwargs):
+ raise OSError("fail")
+
+ monkeypatch.setattr("os.makedirs", raise_oserror)
+ status, error = template_module.create_directory("dir", "anypath")
+ assert status == "error"
+ assert "fail" in error
+
+
+class TestCreateFile: # pylint: disable=too-few-public-methods
+ """Test cases for create_file function."""
+
+ def test_create_file_skipped_created_error(self, monkeypatch, tmp_path,
+ mock_copy_resource_fixture, mock_print_fixture): # pylint: disable=W0621
+ """Test create_file for 'skipped', 'created' and 'error' outcomes."""
+ full_path = tmp_path / "file.txt"
+ mock_print_fixture.reset_mock()
+ mock_copy_resource_fixture.reset_mock()
+
+ # File exists -> skipped
+ monkeypatch.setattr("os.path.exists", lambda path: True)
+ status, error = template_module.create_file(
+ "file.txt", str(full_path), "pkg", "res"
+ )
+ assert status == "skipped"
+ assert error is None
+
+ # File does not exist, parent dir created, copy_resource succeeds
+ monkeypatch.setattr("os.path.exists", lambda path: False)
+ monkeypatch.setattr("os.makedirs", lambda path, exist_ok: None)
+ mock_copy_resource_fixture.return_value = True
+ status, error = template_module.create_file(
+ "file.txt", str(full_path), "pkg", "res"
+ )
+ assert status == "created"
+ assert error is None
+ mock_copy_resource_fixture.assert_called_once_with(
+ "pkg", "res", str(full_path))
+
+ # copy_resource fails
+ mock_copy_resource_fixture.reset_mock()
+ mock_copy_resource_fixture.return_value = False
+ status, error = template_module.create_file(
+ "file.txt", str(full_path), "pkg", "res"
+ )
+ assert status == "error"
+ assert "Failed to create" in error
+
+ # makedirs raises OSError
+ monkeypatch.setattr("os.path.exists", lambda path: False)
+ monkeypatch.setattr(
+ "os.makedirs", mock.Mock(
+ side_effect=OSError("fail")))
+ status, error = template_module.create_file(
+ "path/file.txt", "path/file.txt", "pkg", "res"
+ )
+ assert status == "error"
+ assert "Failed to create parent directory" in error
+
+
+def test_print_summary_outputs(mock_print_fixture): # pylint: disable=W0621
+ """Test print_summary outputs the expected summary."""
+ created = ["file1", "file2"]
+ skipped = ["file3"]
+ errors = ["error1"]
+ descriptions = {"file1": "desc1", "file2": "desc2", "file3": "desc3"}
+
+ template_module.print_summary(created, skipped, errors, descriptions)
+ assert mock_print_fixture.call_count > 5
+ mock_print_fixture.assert_any_call("Project generation summary:")
+
+
+class TestFileFunctions: # pylint: disable=too-few-public-methods
+ """Test cases for file-related utility functions."""
+
+ def test_get_files_to_generate_and_descriptions(self):
+ """Test get_files_to_generate and get_file_descriptions return dicts."""
+ files = template_module.get_files_to_generate()
+ desc = template_module.get_file_descriptions()
+ assert isinstance(files, dict)
+ assert isinstance(desc, dict)
+ assert "client/simulation.yaml" in files
+ assert "client/simulation.yaml" in desc
+
+
+class TestGenerateDefaultConfig: # pylint: disable=too-few-public-methods
+ """Test cases for generate_default_config function."""
+
+ def test_generate_default_config_existing_file(self, monkeypatch, mock_print_fixture): # pylint: disable=W0621
+ """Test generate_default_config does nothing if config file exists."""
+ monkeypatch.setattr("os.path.exists", lambda path: True)
+ template_module.generate_default_config()
+ mock_print_fixture.assert_any_call(
+ mock.ANY) # prints file exists message
+
+ def test_generate_default_config_copy_and_errors(self, monkeypatch, mock_print_fixture): # pylint: disable=W0621
+ """Test generate_default_config copies config or prints errors."""
+ mock_print_fixture.reset_mock()
+
+ # File doesn't exist, copy succeeds
+ monkeypatch.setattr("os.path.exists", lambda path: False)
+ monkeypatch.setattr(
+ template_module,
+ "copy_config_template",
+ lambda path: None)
+ template_module.generate_default_config()
+ mock_print_fixture.assert_any_call(mock.ANY) # prints success message
+
+ mock_print_fixture.reset_mock()
+
+ # File doesn't exist, copy raises FileNotFoundError
+ def raise_fnfe(path):
+ raise FileNotFoundError()
+
+ monkeypatch.setattr(template_module, "copy_config_template", raise_fnfe)
+ template_module.generate_default_config()
+ mock_print_fixture.assert_any_call(
+ "Error: Template configuration file not found.")
+
+ mock_print_fixture.reset_mock()
+
+ # File doesn't exist, copy raises OSError
+ def raise_oserror(path):
+ raise OSError("fail")
+
+ monkeypatch.setattr(
+ template_module,
+ "copy_config_template",
+ raise_oserror)
+ template_module.generate_default_config()
+ mock_print_fixture.assert_any_call(mock.ANY) # prints error message
+
+
+class TestGenerateDefaultProject: # pylint: disable=too-few-public-methods
+ """Test cases for generate_default_project function."""
+
+ def test_generate_default_project_flow(self, monkeypatch, mock_print_fixture): # pylint: disable=W0621
+ """Test generate_default_project runs flow with create_file/directory."""
+ monkeypatch.setattr(template_module, "get_files_to_generate", lambda: {
+ "dir/": ("pkg", "res"),
+ "file.txt": ("pkg", "res"),
+ })
+ monkeypatch.setattr(template_module, "get_file_descriptions", lambda: {
+ "dir/": "desc dir",
+ "file.txt": "desc file",
+ })
+ monkeypatch.setattr("os.path.join", lambda *args: "/".join(args))
+
+ statuses = iter([
+ ("created", None), # dir created
+ ("skipped", None), # file skipped
+ ])
+
+ def fake_create_directory(fp, full): # pylint: disable=unused-argument
+ return next(statuses)
+
+ def fake_create_file(fp, full, pkg, res): # pylint: disable=unused-argument
+ return next(statuses)
+
+ monkeypatch.setattr(
+ template_module,
+ "create_directory",
+ fake_create_directory)
+ monkeypatch.setattr(template_module, "create_file", fake_create_file)
+
+ template_module.generate_default_project()
+ assert mock_print_fixture.call_count > 5
diff --git a/src/simulation_bridge/README.md b/src/simulation_bridge/README.md
deleted file mode 100644
index 021d396..0000000
--- a/src/simulation_bridge/README.md
+++ /dev/null
@@ -1,96 +0,0 @@
-# Bidirectional RabbitMQ Messaging Architecture
-
-### System Overview
-
-The system facilitates seamless bidirectional communication between Digital Twin (DT), Mock Physical Twin (MockPT), Physical Twin (PT), and simulators using a RabbitMQ-based messaging bridge.
-
-### Key Components
-
-1. **Data Sources (DT, PT, MockPT)**
-
-- **Sending**: Generate data and publish it to the `ex.input.bridge` exchange with routing keys in the format `` (e.g., `dt`, `pt`).
-- **Receiving**: Receive simulation results via the `ex.bridge.result` exchange on dedicated queues (e.g., `Q.dt.result`).
-
-2. **Simulation Bridge**
-
-- **Input**: Receives messages from the `Q.bridge.input` queue and forwards them to simulators via the `ex.bridge.output` exchange.
-- **Output**: Receives simulation results from the `Q.bridge.result` queue and forwards them to data sources via the `ex.bridge.result` exchange.
-
-3. **Simulators**
-
-- **Input**: Listen on specific queues to receive simulation requests.
-- **Output**: Publish results to the `ex.sim.result` exchange with routing keys in the format `.result.`.
-
-### RabbitMQ Topology
-
-#### Exchanges
-
-| Exchange Name | Type | Description |
-| ------------------ | ----- | ----------------------------------------------- |
-| `ex.input.bridge` | Topic | Entry point for all incoming data. |
-| `ex.bridge.output` | Topic | Exit point for routing messages to simulators. |
-| `ex.sim.result` | Topic | Entry point for simulation results. |
-| `ex.bridge.result` | Topic | Exit point for routing results to data sources. |
-
-#### Queues
-
-| Queue Name | Binding Key | Description |
-| ------------------- | ----------- | ------------------------------------------------------------- |
-| `Q.bridge.input` | `#` | Receives all incoming messages. |
-| `Q.bridge.result` | `#` | Receives all simulation results. |
-| `Q.sim.` | `*.` | Dedicated queues for each simulation. |
-| `Q..result` | `*.result` | Dedicated queues for receiving results (e.g., `Q.dt.result`). |
-
-### Data Flow
-
-#### Request Flow (DT → Simulator)
-
-1. DT publishes to `ex.input.bridge` with routing key `dt`.
-2. The bridge receives the message from the `Q.bridge.input` queue.
-3. The bridge forwards the message to `ex.bridge.output` with routing key `dt.simX`.
-4. The simulator receives the message from its queue `Q.sim.X`.
-
-#### Response Flow (Simulator → DT)
-
-1. The simulator publishes to `ex.sim.result` with routing key `simX.result.dt`.
-2. The bridge receives the message from the `Q.bridge.result` queue.
-3. The bridge forwards the message to `ex.bridge.result` with routing key `simX.result`.
-4. DT receives the message from its queue `Q.dt.result`.
-5.
-
-## Flow Diagram
-
-
-
-
-
-### Instructions for Use
-
-1. Start RabbitMQ.
-2. Launch the bridge: `python bridge.py`.
-3. Start the simulations:
- ```bash
- python simulation.py simA
- python simulation.py simB
- ```
-4. Send messages:
- ```bash
- python dt.py
- ```
-
-## Author
-
-
-
-
-
diff --git a/src/simulation_bridge/config/config.yaml b/src/simulation_bridge/config/config.yaml
deleted file mode 100644
index c7d48e6..0000000
--- a/src/simulation_bridge/config/config.yaml
+++ /dev/null
@@ -1,43 +0,0 @@
-# config/bridge_config.yaml - Configurazione del bridge
-rabbitmq:
- host: localhost
- prefetch_count: 1
-
-infrastructure:
- exchanges:
- - name: ex.input.bridge
- type: topic
- durable: true
- - name: ex.bridge.output
- type: topic
- durable: true
- - name: ex.sim.result
- type: topic
- durable: true
- - name: ex.bridge.result
- type: topic
- durable: true
-
- queues:
- - name: Q.bridge.input
- durable: true
- - name: Q.bridge.result
- durable: true
- - name: Q.dt.result
- durable: true
- - name: Q.pt.result
- durable: true
-
- bindings:
- - queue: Q.bridge.input
- exchange: ex.input.bridge
- routing_key: "#"
- - queue: Q.bridge.result
- exchange: ex.sim.result
- routing_key: "#"
- - queue: Q.dt.result
- exchange: ex.bridge.result
- routing_key: "*.result"
- - queue: Q.pt.result
- exchange: ex.bridge.result
- routing_key: "*.result"
diff --git a/src/simulation_bridge/config_manager.py b/src/simulation_bridge/config_manager.py
deleted file mode 100644
index 6a8acf7..0000000
--- a/src/simulation_bridge/config_manager.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# config_manager.py - Gestione configurazione YAML
-import yaml
-import os
-from pathlib import Path
-
-class ConfigManager:
- def __init__(self, config_file='config/config.yaml'):
- self.config = None
- self.config_file = Path(config_file)
- self.load_config()
-
- def load_config(self):
- try:
- with open(self.config_file, 'r') as f:
- self.config = yaml.safe_load(f)
- except FileNotFoundError:
- raise Exception(f"File di configurazione non trovato: {self.config_file}")
- except yaml.YAMLError as e:
- raise Exception(f"Errore nel parsing del file YAML: {str(e)}")
-
-
- def get_rabbitmq_config(self):
- return self.config.get('rabbitmq', {})
-
- def get_infrastructure_config(self):
- return self.config.get('infrastructure', {})
\ No newline at end of file
diff --git a/src/simulation_bridge/core.py b/src/simulation_bridge/core.py
deleted file mode 100644
index ae9d2de..0000000
--- a/src/simulation_bridge/core.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# core.py - Base components for message routing
-from typing import List, Dict, Any
-import pika
-from .utils.logger import get_logger
-
-class RabbitMQConnection:
- def __init__(self, host: str = 'localhost') -> None:
- self.connection_params: pika.ConnectionParameters = pika.ConnectionParameters(host)
- self.connection: pika.BlockingConnection | None = None
- self.channel: pika.adapters.blocking_connection.BlockingChannel | None = None
-
- def connect(self) -> pika.adapters.blocking_connection.BlockingChannel:
- """Establish a connection to RabbitMQ and return the channel."""
- self.connection = pika.BlockingConnection(self.connection_params)
- self.channel = self.connection.channel()
- return self.channel
-
- def close(self) -> None:
- """Close the RabbitMQ connection if it is open."""
- if self.connection and self.connection.is_open:
- self.connection.close()
-
-class InfrastructureManager:
- def __init__(self, channel: pika.adapters.blocking_connection.BlockingChannel) -> None:
- self.channel: pika.adapters.blocking_connection.BlockingChannel = channel
- self.logger = get_logger()
-
- def setup_exchanges(self, exchanges: List[Dict[str, Any]]) -> None:
- """Declare exchanges based on the provided configuration."""
- for exchange in exchanges:
- self.channel.exchange_declare(
- exchange=exchange['name'],
- exchange_type=exchange['type'],
- durable=exchange['durable']
- )
- self.logger.debug(
- "Declared exchange: %s (type: %s, durable: %s)",
- exchange['name'],
- exchange['type'],
- exchange['durable']
- )
-
- def setup_queues(self, queues: List[Dict[str, Any]]) -> None:
- """Declare queues based on the provided configuration."""
- for queue in queues:
- self.channel.queue_declare(
- queue=queue['name'],
- durable=queue['durable']
- )
- self.logger.debug(
- "Declared queue: %s (durable: %s)",
- queue['name'],
- queue['durable']
- )
-
- def setup_bindings(self, bindings: List[Dict[str, Any]]) -> None:
- """Create bindings between queues and exchanges."""
- for binding in bindings:
- self.channel.queue_bind(
- queue=binding['queue'],
- exchange=binding['exchange'],
- routing_key=binding['routing_key']
- )
- self.logger.debug(
- "Created binding: %s -> %s (%s)",
- binding['queue'],
- binding['exchange'],
- binding['routing_key']
- )
-
-class BaseMessageHandler:
- def __init__(self, channel: pika.adapters.blocking_connection.BlockingChannel) -> None:
- self.channel: pika.adapters.blocking_connection.BlockingChannel = channel
-
- def handle(self, ch: pika.adapters.blocking_connection.BlockingChannel,
- method: pika.spec.Basic.Deliver,
- properties: pika.spec.BasicProperties,
- body: bytes) -> None:
- """Handle incoming messages. Must be implemented by subclasses."""
- raise NotImplementedError("Must be implemented by subclasses")
-
- def ack_message(self, ch: pika.adapters.blocking_connection.BlockingChannel, delivery_tag: int) -> None:
- """Acknowledge the message."""
- ch.basic_ack(delivery_tag)
-
- def nack_message(self, ch: pika.adapters.blocking_connection.BlockingChannel, delivery_tag: int) -> None:
- """Negatively acknowledge the message."""
- ch.basic_nack(delivery_tag)
\ No newline at end of file
diff --git a/src/simulation_bridge/main.py b/src/simulation_bridge/main.py
deleted file mode 100644
index 21a19a6..0000000
--- a/src/simulation_bridge/main.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# main.py
-from .simulation_bridge import SimulationBridge
-from .utils.logger import setup_logger
-import logging
-from typing import Optional
-import os
-from pathlib import Path
-
-os.chdir(Path(__file__).parent)
-
-
-def main() -> None:
- logger: logging.Logger = setup_logger(level=logging.INFO)
- bridge: Optional[SimulationBridge] = None
- try:
- logger.info("Starting Simulation Bridge...")
- bridge = SimulationBridge()
- bridge.start()
- except KeyboardInterrupt:
- logger.info("Stopping application via interrupt")
- if bridge:
- bridge.conn.close()
- except Exception as e:
- logger.critical(f"Critical error: {str(e)}", exc_info=True)
- raise
-
-if __name__ == "__main__":
- main()
diff --git a/src/simulation_bridge/simulation_bridge.py b/src/simulation_bridge/simulation_bridge.py
deleted file mode 100644
index 555f87e..0000000
--- a/src/simulation_bridge/simulation_bridge.py
+++ /dev/null
@@ -1,209 +0,0 @@
-# simulation_bridge.py
-from .core import RabbitMQConnection, InfrastructureManager, BaseMessageHandler
-from .config_manager import ConfigManager
-from .utils.logger import get_logger
-from typing import Any, Dict
-import json
-import yaml
-from pika.adapters.blocking_connection import BlockingChannel
-from pika.spec import Basic, BasicProperties
-
-logger = get_logger()
-
-class SimulationInputMessageHandler(BaseMessageHandler):
- """Handler for incoming messages from DTs to simulators"""
- def handle(
- self,
- ch: BlockingChannel,
- method: Basic.Deliver,
- properties: BasicProperties,
- body: bytes
- ) -> None:
-
- try:
- source: str = method.routing_key
-
- # Load the message body as YAML
- msg: Dict[str, Any] = yaml.safe_load(body)
-
- # Log the received message
- logger.debug(
- "Received input message from %s: %s",
- source,
- msg,
- extra={'message_id': properties.message_id}
- )
-
- # Forward the message to all destinations
- for dest in msg.get('destinations', []):
- routing_key: str = f"{source}.{dest}"
- self.channel.basic_publish(
- exchange='ex.bridge.output',
- routing_key=routing_key,
- body=body, # Message body remains unchanged (in YAML)
- properties=properties
- )
- logger.debug(
- "Input message forwarded to %s",
- routing_key,
- extra={'message_id': properties.message_id}
- )
-
- # Acknowledge the message
- self.ack_message(ch, method.delivery_tag)
- except yaml.YAMLError as e:
- # Handle YAML errors
- logger.error(
- "YAML decoding error: %s",
- str(e),
- extra={'body': body, 'delivery_tag': method.delivery_tag}
- )
- self.nack_message(ch, method.delivery_tag)
- except Exception as e:
- # Handle generic errors
- logger.exception(
- "Error processing message: %s",
- str(e),
- extra={'delivery_tag': method.delivery_tag}
- )
- self.nack_message(ch, method.delivery_tag)
-
-class SimulationResultMessageHandler(BaseMessageHandler):
- """Handler for result messages from simulators to DTs"""
- def handle(
- self,
- ch: BlockingChannel,
- method: Basic.Deliver,
- properties: BasicProperties,
- body: bytes
- ) -> None:
- try:
- # The routing key will be in the format: sim.result.
- parts: list[str] = method.routing_key.split('.')
- if len(parts) < 3:
- logger.error(
- "Invalid routing key format for result message: %s",
- method.routing_key,
- extra={'delivery_tag': method.delivery_tag}
- )
- self.nack_message(ch, method.delivery_tag)
- return
-
- source: str = parts[0] # simulator
- destination: str = parts[2] # recipient (dt, pt, etc)
-
- # Load the message body as YAML
- msg: Dict[str, Any] = yaml.safe_load(body)
-
- # Log the received message
- logger.debug(
- "Received result message from %s to %s: %s",
- source,
- destination,
- msg,
- extra={'message_id': properties.message_id}
- )
-
- # Forward the message to the recipient
- routing_key: str = f"{source}.result"
- self.channel.basic_publish(
- exchange='ex.bridge.result',
- routing_key=routing_key,
- body=body, # Message body remains unchanged (in YAML)
- properties=properties
- )
- logger.debug(
- "Result message forwarded to %s via %s",
- destination,
- routing_key,
- extra={'message_id': properties.message_id}
- )
-
- # Acknowledge the message
- self.ack_message(ch, method.delivery_tag)
- except yaml.YAMLError as e:
- # Handle YAML errors
- logger.error(
- "YAML decoding error in result message: %s",
- str(e),
- extra={'body': body, 'delivery_tag': method.delivery_tag}
- )
- self.nack_message(ch, method.delivery_tag)
- except Exception as e:
- # Handle generic errors
- logger.exception(
- "Error processing result message: %s",
- str(e),
- extra={'delivery_tag': method.delivery_tag}
- )
- self.nack_message(ch, method.delivery_tag)
-
-class SimulationBridge:
- def __init__(self) -> None:
- self.config: ConfigManager = ConfigManager()
- rmq_config: Dict[str, Any] = self.config.get_rabbitmq_config()
-
- logger.debug("Initializing RabbitMQ connection")
- self.conn: RabbitMQConnection = RabbitMQConnection(host=rmq_config.get('host', 'localhost'))
- self.channel: BlockingChannel = self.conn.connect()
-
- # Handler for incoming messages (from DTs to simulators)
- self.input_handler: SimulationInputMessageHandler = SimulationInputMessageHandler(self.channel)
-
- # Handler for result messages (from simulators to DTs)
- self.result_handler: SimulationResultMessageHandler = SimulationResultMessageHandler(self.channel)
-
- self.setup_infrastructure()
-
- def setup_infrastructure(self) -> None:
- logger.debug("Configuring RabbitMQ infrastructure")
- infra_config: Dict[str, Any] = self.config.get_infrastructure_config()
-
- # Configuring exchanges
- try:
- logger.debug("Configuring exchanges...")
- im: InfrastructureManager = InfrastructureManager(self.channel)
- im.setup_exchanges(infra_config.get('exchanges', []))
- logger.debug("Exchanges configured successfully")
- except Exception as e:
- logger.error(f"Error during exchange configuration: {str(e)}")
- raise # Re-raise the exception to stop the process in case of error
-
- # Configuring queues
- try:
- logger.debug("Configuring queues...")
- im.setup_queues(infra_config.get('queues', []))
- logger.debug("Queues configured successfully")
- except Exception as e:
- logger.error(f"Error during queue configuration: {str(e)}")
- raise # Re-raise the exception to stop the process in case of error
-
- # Configuring bindings
- try:
- logger.debug("Configuring bindings...")
- im.setup_bindings(infra_config.get('bindings', []))
- logger.debug("Bindings configured successfully")
- except Exception as e:
- logger.error(f"Error during binding configuration: {str(e)}")
- raise # Re-raise the exception to stop the process in case of error
-
- logger.info("RabbitMQ infrastructure configured successfully")
-
- def start(self) -> None:
- rmq_config: Dict[str, Any] = self.config.get_rabbitmq_config()
- self.channel.basic_qos(prefetch_count=rmq_config.get('prefetch_count', 1))
-
- # Consume input messages (from DTs to simulators)
- self.channel.basic_consume(
- queue='Q.bridge.input',
- on_message_callback=self.input_handler.handle
- )
-
- # Consume result messages (from simulators to DTs)
- self.channel.basic_consume(
- queue='Q.bridge.result',
- on_message_callback=self.result_handler.handle
- )
-
- logger.info("Simulation Bridge Running")
- self.channel.start_consuming()
\ No newline at end of file
diff --git a/tests/dt/dt_rabbitmq.py b/tests/dt/dt_rabbitmq.py
deleted file mode 100644
index 7a54a50..0000000
--- a/tests/dt/dt_rabbitmq.py
+++ /dev/null
@@ -1,153 +0,0 @@
-import pika
-import yaml
-import os
-import threading
-import uuid
-import sys
-
-class DigitalTwin:
- def __init__(self, dt_id="dt"):
- self.dt_id = dt_id
- self.connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
- self.channel = self.connection.channel()
- self.setup_infrastructure()
-
- def setup_infrastructure(self):
- # Exchange to send commands to simulators
- self.channel.exchange_declare(
- exchange='ex.input.bridge',
- exchange_type='topic',
- durable=True
- )
-
- # Exchange to receive results from simulators
- self.channel.exchange_declare(
- exchange='ex.bridge.result',
- exchange_type='topic',
- durable=True
- )
-
- # Queue to receive results
- self.result_queue_name = f'Q.{self.dt_id}.result'
- self.channel.queue_declare(queue=self.result_queue_name, durable=True)
- self.channel.queue_bind(
- exchange='ex.bridge.result',
- queue=self.result_queue_name,
- routing_key=f"*.result" # Receive all results
- )
-
- def send_simulation_request(self, destinations, payload_data):
- """
- Sends a simulation request to the specified simulators.
- """
- # Add the 'destinations' list to the payload
- payload = {
- **payload_data, # Add payload data
- 'destinations': destinations, # Add the list of destinations
- 'request_id': str(uuid.uuid4()) # Unique identifier for the request
- }
-
- # Serialize the payload in YAML format
- payload_yaml = yaml.dump(payload, default_flow_style=False)
-
- # Publish the message to RabbitMQ
- self.channel.basic_publish(
- exchange='ex.input.bridge',
- routing_key=self.dt_id, # e.g., 'dt', 'pt', 'mockpt'
- body=payload_yaml,
- properties=pika.BasicProperties(
- delivery_mode=2,
- content_type='application/x-yaml',
- message_id=str(uuid.uuid4())
- )
- )
- print(f" [{self.dt_id.upper()}] Simulation request sent to {destinations}: {payload}")
-
- def handle_result(self, ch, method, properties, body):
- """
- Handles simulation results.
- """
- try:
- # Extract information from the routing key
- source = method.routing_key.split('.')[0] # Simulator that sent the result
-
- # Load the message body as YAML
- result = yaml.safe_load(body)
-
- print(f"\n[{self.dt_id.upper()}] Received result from {source}:")
- print(f"Result: {result}")
- print("-" * 50)
-
- # Acknowledge the message
- ch.basic_ack(method.delivery_tag)
-
- except yaml.YAMLError as e:
- print(f"Error decoding YAML result: {e}")
- ch.basic_nack(method.delivery_tag)
- except Exception as e:
- print(f"Error processing the result: {e}")
- ch.basic_nack(method.delivery_tag)
-
- def start_listening(self):
- """
- Starts listening for simulation results.
- """
- self.channel.basic_consume(
- queue=self.result_queue_name,
- on_message_callback=self.handle_result
- )
- print(f" [{self.dt_id.upper()}] Listening for simulation results...")
- self.channel.start_consuming()
-
- def load_yaml_file(self, file_path):
- """
- Loads the content of a YAML file.
- """
- with open(file_path, 'r') as file:
- return yaml.safe_load(file)
-
-
-def start_dt_listener(dt_id):
- """
- Function to start listening for results in a separate thread.
- """
- dt = DigitalTwin(dt_id)
- dt.start_listening()
-
-
-if __name__ == "__main__":
- dt_id = "dt"
- if len(sys.argv) > 1:
- dt_id = sys.argv[1]
-
- # Start the listener thread for results
- listener_thread = threading.Thread(target=start_dt_listener, args=(dt_id,))
- listener_thread.daemon = True # The thread will terminate when the main program ends
- listener_thread.start()
-
- # Create a main instance to send requests
- dt = DigitalTwin(dt_id)
-
- # Load the simulation.yaml file from the same folder
- base_dir = os.path.dirname(os.path.abspath(__file__))
- yaml_file_path = os.path.join(base_dir, 'simulation.yaml')
-
- try:
- # Load the payload from the YAML file
- simulation_payload = dt.load_yaml_file(yaml_file_path)
-
- # Send the simulation request
- dt.send_simulation_request(
- destinations=['matlab'], # You can specify multiple destinations: ['simA', 'simB']
- payload_data=simulation_payload
- )
-
- print("\nPress Ctrl+C to terminate the program...")
- # Keep the program running to receive results
- while True:
- pass
-
- except KeyboardInterrupt:
- print("\nProgram terminated by the user.")
- except Exception as e:
- print(f"Error: {e}")
\ No newline at end of file
diff --git a/tests/dt/simulation.yaml b/tests/dt/simulation.yaml
deleted file mode 100644
index 8c31902..0000000
--- a/tests/dt/simulation.yaml
+++ /dev/null
@@ -1,48 +0,0 @@
-simulation:
- simulator: matlab
- type: batch # Type of simulation (batch,streaming)
- file: simulation_batch.m # Name of the simulation file
- inputs:
- x_i: 10
- y_i: 5
- z_i: 0
- v_x: 1
- v_y: 2
- v_z: 3
- t: 10
- outputs:
- x_f: Final x position
- y_f: Final y position
- z_f: Final z position
-# simulation:
-# simulator: matlab
-# type: batch # Type of simulation (batch,streaming)
-# file: simulation_batch_1.m # Name of the simulation file
-# inputs:
-# x0: 10 # Initial x position (m)
-# y0: 5 # Initial y position (m)
-# speed0: 3 # Initial speed (m/s)
-# acceleration: 0.5 # Acceleration (m/s^2)
-# angle_deg: 45 # Initial direction (degrees, 0-360°)
-# simulation_time: 10 # Total simulation time (s)
-# outputs:
-# x_final: "Final x position (m)"
-# y_final: "Final y position (m)"
-# speed_final: "Final speed (m/s)"
-# time_elapsed: "Actual simulation time (s)"
-# simulation:
-# simulator: matlab
-# type: streaming
-# file: simulation_streaming.m
-# inputs:
-# num_agents: 8 # Number of agents
-# max_steps: 200 # Max steps for the simulation
-# avoidance_threshold: 1 # Minimum distance to avoid collision
-# show_agent_index: 1 # Index of the agent to show
-# use_gui: false # GUI flag
-# outputs:
-# time: float # execution time
-# current_step: int # current step of the simulation
-# positions: "[[float, float]]" # positions of the agents
-# velocities: "[[float, float]]" # velocities of the agents
-# running: bool # running flag
diff --git a/tests/simulations/matlab/SimulationWrapper.m b/tests/simulations/matlab/SimulationWrapper.m
deleted file mode 100644
index 32a2ead..0000000
--- a/tests/simulations/matlab/SimulationWrapper.m
+++ /dev/null
@@ -1,61 +0,0 @@
-classdef SimulationWrapper < handle
- properties (Access = private)
- tcp_client % TCP client object for communication with Python
- inputs % Store the inputs received from Python
- end
-
- methods
- % Constructor for the SimulationWrapper class
- function obj = SimulationWrapper()
- % Default port (modifiable)
- port = 5678;
-
- % Max retries for connecting to the server
- max_retries = 5;
- retry_delay = 1; % Delay between retries in seconds
-
- % Try to connect to the server up to 'max_retries' times
- for retry = 1:max_retries
- try
- % Create a TCP client object to connect to Python server
- obj.tcp_client = tcpclient('localhost', port);
- % Configure the TCP client to use LF as a terminator
- configureTerminator(obj.tcp_client, "LF");
- break; % Exit the loop if the connection is successful
- catch ME
- % If connection fails, retry up to 'max_retries' times
- if retry == max_retries
- % If max retries reached, rethrow the exception
- rethrow(ME);
- end
- % Wait before retrying
- pause(retry_delay);
- end
- end
-
- % Receive the initial parameters in JSON format from Python
- data = readline(obj.tcp_client);
- % Decode the received JSON data and store it as 'inputs'
- obj.inputs = jsondecode(data);
- end
-
- % Method to retrieve the input parameters from the Python server
- function inputs = get_inputs(obj)
- inputs = obj.inputs; % Return the stored inputs
- end
-
- % Method to send output data to the Python server
- function send_output(obj, output_data)
- % Convert the output data to JSON format
- json_data = jsonencode(output_data);
- % Send the JSON-encoded data to Python server
- writeline(obj.tcp_client, json_data);
- end
-
- % Destructor to clean up the TCP client object when the wrapper is deleted
- function delete(obj)
- % Close the TCP connection by deleting the client object
- delete(obj.tcp_client);
- end
- end
-end
diff --git a/tests/simulations/matlab/simulation_1.m b/tests/simulations/matlab/simulation_1.m
deleted file mode 100644
index 4a0ae39..0000000
--- a/tests/simulations/matlab/simulation_1.m
+++ /dev/null
@@ -1,50 +0,0 @@
-function [x_final, y_final, speed_final, time_elapsed] = simulation_1(x0, y0, speed0, acceleration, angle_deg, simulation_time)
- % SIMULAZIONE MOVIMENTO AUTO 2D
- % Input:
- % x0, y0: posizione iniziale (m)
- % speed0: velocità iniziale (m/s)
- % acceleration: accelerazione (m/s^2)
- % angle_deg: direzione iniziale in gradi (0-360°)
- % simulation_time: tempo totale simulazione (s)
- %
- % Output:
- % x_final, y_final: posizione finale (m)
- % speed_final: velocità finale (m/s)
- % time_elapsed: tempo effettivo simulato (s)
-
- % Converti angolo in radianti
- angle_rad = deg2rad(angle_deg);
-
- % Componenti vettoriali della velocità iniziale
- vx0 = speed0 * cos(angle_rad);
- vy0 = speed0 * sin(angle_rad);
-
- % Componenti dell'accelerazione
- ax = acceleration * cos(angle_rad);
- ay = acceleration * sin(angle_rad);
-
- % Calcolo tempo effettivo (evita casi con accelerazione negativa e velocità 0)
- if acceleration >= 0
- time_elapsed = simulation_time;
- else
- % Tempo fino a fermarsi (v = v0 + a*t => t = -v0/a)
- stop_time = -speed0/acceleration;
- time_elapsed = min(simulation_time, stop_time);
- end
-
- % Equazioni del moto uniformemente accelerato
- x_final = x0 + vx0 * time_elapsed + 0.5 * ax * time_elapsed^2;
- y_final = y0 + vy0 * time_elapsed + 0.5 * ay * time_elapsed^2;
-
- % Velocità finale
- speed_final = speed0 + acceleration * time_elapsed;
- if speed_final < 0
- speed_final = 0; % L'auto non può andare indietro in questo modello
- end
-
- % Debug info (opzionale)
- fprintf('Simulazione completata:\n');
- fprintf('Tempo simulato: %.2f s\n', time_elapsed);
- fprintf('Posizione finale: (%.2f, %.2f) m\n', x_final, y_final);
- fprintf('Velocità finale: %.2f m/s\n\n', speed_final);
-end
\ No newline at end of file
diff --git a/tests/simulations/matlab/simulation_agent.m b/tests/simulations/matlab/simulation_agent.m
deleted file mode 100644
index efc3684..0000000
--- a/tests/simulations/matlab/simulation_agent.m
+++ /dev/null
@@ -1,93 +0,0 @@
-function simulation_agent(steps, show_agent, use_gui)
- global sim_running agent_data;
- sim_running = true;
-
- % 🔧 Crea la cartella matfile se non esiste
- if ~exist('matfile', 'dir')
- mkdir('matfile');
- end
-
-
- if nargin < 1
- steps = 10;
- end
- if nargin < 2
- show_agent = 1;
- end
- if nargin < 3
- use_gui = false;
- end
-
- num_agents = 3;
- positions = zeros(num_agents, 2);
- velocities = zeros(num_agents, 2);
-
- agent_data = struct(...
- 'positions', positions, ...
- 'velocities', velocities, ...
- 'time', 0, ...
- 'current_step', 0, ...
- 'running', true ...
- );
-
- if use_gui
- fig = figure('Name', 'Simulazione Agenti', 'NumberTitle', 'off');
- hold on;
- axis([-10 10 -10 10]);
- grid on;
- title('Simulazione Movimento Agenti');
- xlabel('X');
- ylabel('Y');
- colors = lines(num_agents);
- h = gobjects(num_agents,1);
- for i = 1:num_agents
- h(i) = plot(positions(i,1), positions(i,2), 'o', ...
- 'MarkerSize', 10, 'MarkerFaceColor', colors(i,:), ...
- 'DisplayName', sprintf('Agente %d', i));
- end
- legend show;
- end
-
- tic;
-
- while sim_running
- for i = 1:num_agents
- velocities(i,:) = 0.9 * velocities(i,:) + 0.1 * randn(1,2);
- positions(i,:) = positions(i,:) + velocities(i,:);
- if use_gui
- set(h(i), 'XData', positions(i,1), 'YData', positions(i,2));
- end
- end
-
- agent_data.positions = positions;
- agent_data.velocities = velocities;
- agent_data.time = toc;
- agent_data.current_step = agent_data.current_step + 1;
- agent_data.running = true;
-
- assignin('base', 'agent_data', agent_data);
- % Salva lo stato su file per Python
- save('matfile/agent_data_tmp.mat', 'agent_data');
- movefile('matfile/agent_data_tmp.mat', 'matfile/agent_data.mat');
-
-
- pause(0.1);
-
- if agent_data.current_step >= steps
- sim_running = false;
- end
- end
-
- agent_data.running = false;
- assignin('base', 'agent_data', agent_data);
-
- if show_agent >= 1 && show_agent <= num_agents
- fprintf('\nStato finale Agente %d:\n', show_agent);
- fprintf('Posizione: [%.2f, %.2f]\n', positions(show_agent,1), positions(show_agent,2));
- fprintf('Velocità: [%.2f, %.2f]\n', velocities(show_agent,1), velocities(show_agent,2));
- end
-
- if use_gui
- close(fig);
- end
-end
diff --git a/tests/simulations/matlab/simulation_agent_2.m b/tests/simulations/matlab/simulation_agent_2.m
deleted file mode 100644
index a4c1da6..0000000
--- a/tests/simulations/matlab/simulation_agent_2.m
+++ /dev/null
@@ -1,106 +0,0 @@
-function simulation_dynamic_agents(num_agents, max_steps, avoidance_threshold, show_agent_index, use_gui)
- global sim_running simulation_data;
- sim_running = true;
-
- % 🔧 Crea la cartella matfile se non esiste
- if ~exist('matfile', 'dir')
- mkdir('matfile');
- end
-
- % Parametri di simulazione
- positions = rand(num_agents, 2) * 20 - 10; % Posizioni casuali in uno spazio 20x20
- velocities = zeros(num_agents, 2); % Velocità iniziali
-
- % Dati della simulazione
- simulation_data = struct(...
- 'positions', positions, ...
- 'velocities', velocities, ...
- 'time', 0, ...
- 'current_step', 0, ...
- 'running', true ...
- );
-
- % Impostazioni GUI (se richiesto)
- if use_gui
- fig = figure('Name', 'Simulazione Agenti Dinamici', 'NumberTitle', 'off');
- hold on;
- axis([-10 10 -10 10]);
- grid on;
- title('Simulazione Agenti Dinamici con Evitamento di Collisioni');
- xlabel('X');
- ylabel('Y');
- colors = lines(num_agents);
- h = gobjects(num_agents,1);
- for i = 1:num_agents
- h(i) = plot(positions(i,1), positions(i,2), 'o', ...
- 'MarkerSize', 10, 'MarkerFaceColor', colors(i,:), ...
- 'DisplayName', sprintf('Agente %d', i));
- end
- legend show;
- end
-
- tic; % Timer per la simulazione
-
- while sim_running
- for i = 1:num_agents
- % Velocità random con piccole variazioni
- velocities(i,:) = 0.9 * velocities(i,:) + 0.1 * randn(1,2);
-
- % Movimento dell'agente
- positions(i,:) = positions(i,:) + velocities(i,:);
-
- % Evita collisioni con altri agenti
- for j = 1:num_agents
- if i ~= j
- distance = norm(positions(i,:) - positions(j,:));
- if distance < avoidance_threshold % Se due agenti sono troppo vicini
- % Modifica la velocità per evitare la collisione
- velocities(i,:) = velocities(i,:) + (positions(i,:) - positions(j,:)) * 0.1;
- end
- end
- end
-
- % Aggiorna la GUI se necessaria
- if use_gui
- set(h(i), 'XData', positions(i,1), 'YData', positions(i,2));
- end
- end
-
- % Aggiorna i dati della simulazione
- simulation_data.positions = positions;
- simulation_data.velocities = velocities;
- simulation_data.time = toc;
- simulation_data.current_step = simulation_data.current_step + 1;
- simulation_data.running = true;
-
- % Salva i dati su file per real-time
- save('matfile/simulation_data_tmp.mat', 'simulation_data');
- movefile('matfile/simulation_data_tmp.mat', 'matfile/simulation_data.mat');
-
- pause(0.1); % Pausa tra gli step della simulazione
-
- % Controlla se raggiungere il numero massimo di step
- if simulation_data.current_step >= max_steps
- sim_running = false; % Termina la simulazione dopo il numero di step
- end
- end
-
- simulation_data.running = false;
- assignin('base', 'simulation_data', simulation_data);
-
- % Mostra lo stato finale dell'agente selezionato
- if show_agent_index >= 1 && show_agent_index <= num_agents
- fprintf('\nStato finale Agente %d:\n', show_agent_index);
- fprintf('Posizione: [%.2f, %.2f]\n', positions(show_agent_index,1), positions(show_agent_index,2));
- fprintf('Velocità: [%.2f, %.2f]\n', velocities(show_agent_index,1), velocities(show_agent_index,2));
- end
-
- % Chiudi la GUI se utilizzata
- if use_gui
- close(fig);
- end
-
- % Aggiungi lo stato finale come output
- status = struct('completed', true, 'final_step', simulation_data.current_step);
- assignin('base', 'status', status);
-end
diff --git a/tests/simulations/matlab/simulation_batch.m b/tests/simulations/matlab/simulation_batch.m
deleted file mode 100644
index 55818d9..0000000
--- a/tests/simulations/matlab/simulation_batch.m
+++ /dev/null
@@ -1,9 +0,0 @@
-% simulation.m
-
-function [x_f, y_f, z_f] = simulation(x_i, y_i, z_i, v_x, v_y, v_z, t)
- % Calcola la posizione futura della pallina
- x_f = x_i + v_x * t;
- y_f = y_i + v_y * t;
- z_f = z_i + v_z * t;
-end
-
diff --git a/tests/simulations/matlab/simulation_batch_1.m b/tests/simulations/matlab/simulation_batch_1.m
deleted file mode 100644
index 8fcd030..0000000
--- a/tests/simulations/matlab/simulation_batch_1.m
+++ /dev/null
@@ -1,50 +0,0 @@
-function [x_final, y_final, speed_final, time_elapsed] = simulation_1(x0, y0, speed0, acceleration, angle_deg, simulation_time)
- % SIMULAZIONE MOVIMENTO AUTO 2D
- % Input:
- % x0, y0: posizione iniziale (m)
- % speed0: velocità iniziale (m/s)
- % acceleration: accelerazione (m/s^2)
- % angle_deg: direzione iniziale in gradi (0-360°)
- % simulation_time: tempo totale simulazione (s)
- %
- % Output:
- % x_final, y_final: posizione finale (m)
- % speed_final: velocità finale (m/s)
- % time_elapsed: tempo effettivo simulato (s)
-
- % Converti angolo in radianti
- angle_rad = deg2rad(angle_deg);
-
- % Componenti vettoriali della velocità iniziale
- vx0 = speed0 * cos(angle_rad);
- vy0 = speed0 * sin(angle_rad);
-
- % Componenti dell'accelerazione
- ax = acceleration * cos(angle_rad);
- ay = acceleration * sin(angle_rad);
-
- % Calcolo tempo effettivo (evita casi con accelerazione negativa e velocità 0)
- if acceleration >= 0
- time_elapsed = simulation_time;
- else
- % Tempo fino a fermarsi (v = v0 + a*t => t = -v0/a)
- stop_time = -speed0/acceleration;
- time_elapsed = min(simulation_time, stop_time);
- end
-
- % Equazioni del moto uniformemente accelerato
- x_final = x0 + vx0 * time_elapsed + 0.5 * ax * time_elapsed^2;
- y_final = y0 + vy0 * time_elapsed + 0.5 * ay * time_elapsed^2;
-
- % Velocità finale
- speed_final = speed0 + acceleration * time_elapsed;
- if speed_final < 0
- speed_final = 0; % L'auto non può andare indietro in questo modello
- end
-
- % Debug info (opzionale)
- fprintf('Simulazione completata:\n');
- fprintf('Tempo simulato: %.2f s\n', time_elapsed);
- fprintf('Posizione finale: (%.2f, %.2f) m\n', x_final, y_final);
- fprintf('Velocità finale: %.2f m/s\n\n', speed_final);
-end
diff --git a/tests/simulations/matlab/simulation_hybrid.m b/tests/simulations/matlab/simulation_hybrid.m
deleted file mode 100644
index 06cf795..0000000
--- a/tests/simulations/matlab/simulation_hybrid.m
+++ /dev/null
@@ -1,212 +0,0 @@
-%% Inizializzazione parametri globali
-dt = 0.1; % Time step in secondi
-roadRadius = 800; % Raggio del circuito urbano in metri
-numIntersections = 6; % Numero di incroci/strisce pedonali
-carSpeedMax = 3; % m/s (~54 km/h)
-carAccel = 0.2; % Accelerazione massima (m/s²)
-carDecel = 0.4; % Decelerazione massima (m/s²)
-safetyMargin = 5; % Margine di sicurezza aggiuntivo (metri)
-
-%% 1. Definizione topologia urbana
-intersectionAngles = linspace(0, 2*pi, numIntersections+1);
-intersectionAngles(end) = [];
-global trafficLights;
-trafficLights = struct(...
- 'position', num2cell(intersectionAngles),...
- 'state', repmat({'green'}, 1, numIntersections),...
- 'timer', num2cell(randi([500, 600], 1, numIntersections)),...
- 'pedestrianRequest', num2cell(false(1, numIntersections)));
-
-
-if ~exist('matfile', 'dir')
- mkdir('matfile');
-end
-
-%% 2. Inizializzazione veicolo autonomo
-car = struct(...
- 'theta', 0, ...
- 'speed', 0, ...
- 'nextLight', 1, ...
- 'stopped', false);
-
-%% 3. Interfaccia grafica unica
-mainFig = figure('Position', [100 100 1200 600]);
-tiledlayout(mainFig,1,2);
-
-% Vista città
-axSim = nexttile;
-hold(axSim, 'on');
-axis(axSim, 'equal');
-xlim(axSim, [-roadRadius*1.2 roadRadius*1.2]);
-ylim(axSim, [-roadRadius*1.2 roadRadius*1.2]);
-title(axSim, 'Simulazione Smart City - Traffico Urbano');
-xlabel(axSim, 'X (m)');
-ylabel(axSim, 'Y (m)');
-
-theta = linspace(0, 2*pi, 100);
-plot(axSim, roadRadius*cos(theta), roadRadius*sin(theta), 'k-', 'LineWidth', 2);
-
-for i = 1:numIntersections
- angle = intersectionAngles(i);
- plot(axSim, [roadRadius*0.9*cos(angle), roadRadius*1.1*cos(angle)],...
- [roadRadius*0.9*sin(angle), roadRadius*1.1*sin(angle)],...
- 'k--', 'LineWidth', 1.5);
-end
-
-carPlot = plot(axSim, 0, 0, 'bo', 'MarkerSize', 12, 'MarkerFaceColor', 'b');
-lightPlots = gobjects(numIntersections,1);
-for i = 1:numIntersections
- [x,y] = pol2cart(trafficLights(i).position, roadRadius);
- lightPlots(i) = plot(axSim, x, y, 's', 'MarkerSize', 18, 'LineWidth', 3,...
- 'MarkerEdgeColor', [0 0.7 0]);
-end
-
-% Pannello di controllo
-controlPanel = nexttile;
-title(controlPanel, 'Controllo Semafori');
-axis(controlPanel, 'off');
-
-lightPopups = gobjects(numIntersections,1);
-for i = 1:numIntersections
- uicontrol(mainFig, 'Style','text','Units','normalized',...
- 'Position',[0.72 0.9 - (i-1)*0.12 0.05 0.04],...
- 'String',['S' num2str(i)], 'FontWeight','bold');
-
- lightPopups(i) = uicontrol(mainFig, 'Style','popupmenu',...
- 'Units','normalized',...
- 'Position',[0.77 0.9 - (i-1)*0.12 0.1 0.04],...
- 'String',{'green','yellow','red'},...
- 'Callback',@(src,~) changeLightState(src,i));
-end
-
-% Callback cambio stato semafori
-function changeLightState(src, lightIndex)
- global trafficLights;
- newState = src.String{src.Value};
- trafficLights(lightIndex).state = newState;
-
- switch newState
- case 'green'
- trafficLights(lightIndex).timer = randi([200,300]);
- case 'yellow'
- trafficLights(lightIndex).timer = 300;
- case 'red'
- trafficLights(lightIndex).timer = 350;
- trafficLights(lightIndex).pedestrianRequest = false;
- end
-end
-
-%% 4. Inizializzazione logging
-log = struct();
-log.time = [];
-log.carPos = [];
-log.carSpeed = [];
-log.lightStates = {};
-logIndex = 1;
-agent_data = struct();
-
-
-%% 5. Loop simulazione
-t = 0;
-while ishandle(mainFig)
- %% A. Aggiorna semafori (solo aggiornamento visivo e sync GUI)
- for i = 1:numIntersections
- trafficLights(i).timer = trafficLights(i).timer - dt;
-
- currentState = trafficLights(i).state;
- set(lightPopups(i), 'Value', find(strcmp(lightPopups(i).String, currentState)));
-
- switch currentState
- case 'green'
- set(lightPlots(i), 'MarkerEdgeColor', [0 0.7 0]);
- case 'yellow'
- set(lightPlots(i), 'MarkerEdgeColor', [1 0.8 0]);
- case 'red'
- set(lightPlots(i), 'MarkerEdgeColor', [1 0 0]);
- end
- end
-
- %% B. Controllo veicolo
- nextLightID = car.nextLight;
- angleToNextLight = trafficLights(nextLightID).position - car.theta;
- if angleToNextLight < 0
- angleToNextLight = angleToNextLight + 2*pi;
- end
- distanceToLight = angleToNextLight * roadRadius;
-
- stoppingDistance = car.speed^2 / (2*carDecel) + safetyMargin;
-
- nextLightState = trafficLights(nextLightID).state;
- mustStop = strcmp(nextLightState, 'red') || (strcmp(nextLightState, 'yellow') && distanceToLight < stoppingDistance);
-
- if mustStop
- targetSpeed = 0;
- car.stopped = true;
- else
- targetSpeed = carSpeedMax;
- car.stopped = false;
- end
-
- if car.speed < targetSpeed
- car.speed = min(car.speed + carAccel*dt, targetSpeed);
- else
- car.speed = max(car.speed - carDecel*dt, targetSpeed);
- end
-
- car.theta = mod(car.theta + car.speed/roadRadius * dt, 2*pi);
- [~, car.nextLight] = min(abs([trafficLights.position] - car.theta));
-
- %% C. Aggiorna visualizzazione
- [x_car, y_car] = pol2cart(car.theta, roadRadius);
- set(carPlot, 'XData', x_car, 'YData', y_car);
- drawnow limitrate;
-
- %% D. Logging dati
- log.time(logIndex) = t;
- log.carPos(:, logIndex) = [x_car; y_car];
- log.carSpeed(logIndex) = car.speed;
-
- currentLightStates = cell(1, numIntersections);
- for i = 1:numIntersections
- currentLightStates{i} = trafficLights(i).state;
- end
- log.lightStates{logIndex} = currentLightStates;
-
- logIndex = logIndex + 1;
- t = t + dt;
- % Salvataggio dei dati del veicolo per ogni frame
- agent_data.time = t;
- agent_data.position = [x_car; y_car]; % Posizione della macchina
- agent_data.speed = car.speed; % Velocità attuale
- agent_data.isStopped = car.stopped; % Stato se fermo o no
- agent_data.currentIntersectionID = car.nextLight; % ID dell'incrocio attuale
-
- % Calcolare la distanza dall'incrocio
- angleToNextLight = trafficLights(car.nextLight).position - car.theta;
- if angleToNextLight < 0
- angleToNextLight = angleToNextLight + 2*pi;
- end
- agent_data.distanceToIntersection = angleToNextLight * roadRadius; % Distanza dall'incrocio
-
- % Stato di tutti i semafori
- trafficLightStates = cell(1, numIntersections);
- for i = 1:numIntersections
- trafficLightStates{i} = trafficLights(i).state;
- end
- agent_data.trafficLightStates = trafficLightStates; % Stato semafori
-
- % Salvataggio dei dati nel file .mat (versione temporanea per ogni frame)
- save('matfile/agent_data_tmp.mat', 'agent_data');
- movefile('matfile/agent_data_tmp.mat', 'matfile/agent_data.mat');
-
-end
-
-%% 6. Salvataggio dati
-fields = fieldnames(log);
-for i = 1:length(fields)
- if iscell(log.(fields{i}))
- log.(fields{i}) = log.(fields{i})(1:logIndex-1);
- else
- log.(fields{i}) = log.(fields{i})(:,1:logIndex-1);
- end
-end
diff --git a/tests/simulations/matlab/simulation_tennis_ball.m b/tests/simulations/matlab/simulation_tennis_ball.m
deleted file mode 100644
index badadcb..0000000
--- a/tests/simulations/matlab/simulation_tennis_ball.m
+++ /dev/null
@@ -1,182 +0,0 @@
-function simulation_tennis_ball(steps, show_info, use_gui)
- % SIMULATION_TENNIS_BALL simulates the physics of a tennis ball
- % Inputs:
- % steps - Maximum number of simulation steps (default: 200)
- % show_info - Display information during simulation (default: true)
- % use_gui - Use graphical interface (default: false)
-
- global sim_running agent_data;
- sim_running = true;
-
- if nargin < 1
- steps = 200;
- end
- if nargin < 2
- show_info = true;
- end
- if nargin < 3
- use_gui = false;
- end
-
- matfile_path = './matfile/';
-
- % Create directory if it doesn't exist
- if ~exist(matfile_path, 'dir')
- mkdir(matfile_path);
- end
-
- % Remove existing files to avoid conflicts
- if exist(fullfile(matfile_path, 'agent_data.mat'), 'file')
- delete(fullfile(matfile_path, 'agent_data.mat'));
- end
- if exist(fullfile(matfile_path, 'agent_data_tmp.mat'), 'file')
- delete(fullfile(matfile_path, 'agent_data_tmp.mat'));
- end
-
- g = 9.81; % Gravity [m/s^2]
- dt = 0.05; % Time interval
- position = [0.6, 0.25, 1.48]; % Initial position (x,y,z) in meters
- velocity = [12, 5, 10]; % Initial velocity (x,y,z) in m/s
- court_length = 24; % Tennis court length in m
- bounce_damping = 0.7; % Energy loss on each bounce
-
- % Initialize data structure
- agent_data = struct(...
- 'position', position, ...
- 'velocity', velocity, ...
- 'time', 0, ...
- 'current_step', 0, ...
- 'running', true ...
- );
-
- % Assign the structure for debugging
- assignin('base', 'agent_data', agent_data);
-
- % Write initial file
- save(fullfile(matfile_path, 'agent_data.mat'), 'agent_data');
-
- % Setup graphical interface if requested
- if use_gui
- fig = figure('Name', 'Tennis Simulation', 'NumberTitle', 'off');
- hold on;
- axis equal;
- axis([-5 25 -5 5 0 5]);
- grid on;
- view(3);
- title('Tennis Ball Simulation');
- xlabel('X (m)');
- ylabel('Y (m)');
- zlabel('Z (m)');
- h = plot3(position(1), position(2), position(3), 'ro', 'MarkerSize', 8, 'MarkerFaceColor', 'g');
-
- % Draw tennis court
- % Court outline
- rectangle('Position', [0 -5 23.77 10], 'EdgeColor', 'g');
- % Net
- line([11.885 11.885], [-5 5], [0 1.07], 'Color', 'k', 'LineWidth', 2);
- end
-
- % Start measuring time
- tic;
- step_count = 0;
-
- % Main simulation loop
- while sim_running && step_count < steps
- % Update physics
- velocity(3) = velocity(3) - g * dt; % Update Z velocity (gravity)
- position = position + velocity * dt; % Update position
-
- % Bounce on ground
- if position(3) < 0
- position(3) = 0;
- velocity(3) = -velocity(3) * bounce_damping;
- % Apply some friction on ground contact
- velocity(1) = velocity(1) * 0.95;
- velocity(2) = velocity(2) * 0.95;
- end
-
- % Add some air resistance
- velocity = velocity * 0.99;
-
- % Bounce on court boundaries (optional)
- if position(2) < -5 || position(2) > 5
- velocity(2) = -velocity(2) * 0.9;
- position(2) = max(min(position(2), 5), -5);
- end
-
- % Tennis net collision
- if position(1) > 11.885-0.1 && position(1) < 11.885+0.1 && position(3) < 1.07
- velocity(1) = -velocity(1) * 0.5;
- position(1) = (position(1) < 11.885) ? 11.885-0.1 : 11.885+0.1;
- end
-
- % Increment step counter
- step_count = step_count + 1;
-
- % Update data structure
- agent_data.position = position;
- agent_data.velocity = velocity;
- agent_data.time = toc;
- agent_data.current_step = step_count;
- agent_data.running = true; % Always true during simulation
-
- % Assign to workspace for debugging
- assignin('base', 'agent_data', agent_data);
-
- % Save in the format used in the reference example
- save(fullfile(matfile_path, 'agent_data_tmp.mat'), 'agent_data');
- movefile(fullfile(matfile_path, 'agent_data_tmp.mat'), fullfile(matfile_path, 'agent_data.mat'), 'f');
-
- % Update visualization if GUI is active
- if use_gui
- set(h, 'XData', position(1), 'YData', position(2), 'ZData', position(3));
- drawnow;
- end
-
- % Show real-time information
- if show_info && mod(step_count, 5) == 0
- fprintf('\n🎾 Real-time ball state (step %d):\n', step_count);
- fprintf('Position: [%.2f, %.2f, %.2f] m\n', position);
- fprintf('Velocity: [%.2f, %.2f, %.2f] m/s\n', velocity);
- end
-
- % Pause to simulate real-time
- pause(0.05);
-
- % Debug: always show that we're executing
- if step_count < 10 || mod(step_count, 20) == 0
- fprintf('Executing step: %d, sim_running: %d\n', step_count, sim_running);
- end
-
- % Stop simulation if steps reached or ball exceeds court
- if position(1) > court_length
- fprintf('Simulation terminated at step %d (ball left court)\n', step_count);
- break;
- end
-
- % Stop if ball velocity is very low (ball at rest)
- if norm(velocity) < 0.5 && position(3) < 0.01
- fprintf('Simulation terminated at step %d (ball stopped)\n', step_count);
- break;
- end
- end
-
- % Mark simulation as completed
- agent_data.running = false;
- assignin('base', 'agent_data', agent_data);
- save(fullfile(matfile_path, 'agent_data_tmp.mat'), 'agent_data');
- movefile(fullfile(matfile_path, 'agent_data_tmp.mat'), fullfile(matfile_path, 'agent_data.mat'), 'f');
-
- % Show final information
- if show_info
- fprintf('\n🎾 Final ball state:\n');
- fprintf('Position: [%.2f, %.2f, %.2f] m\n', position);
- fprintf('Velocity: [%.2f, %.2f, %.2f] m/s\n', velocity);
- fprintf('Simulation completed in %d steps (%.2f seconds)\n', step_count, agent_data.time);
- end
-
- % Close GUI window if present
- if use_gui && exist('fig', 'var') && ishandle(fig)
- close(fig);
- end
-end
\ No newline at end of file
diff --git a/tests/simulations/matlab/start_simulation.m b/tests/simulations/matlab/start_simulation.m
deleted file mode 100644
index d75c340..0000000
--- a/tests/simulations/matlab/start_simulation.m
+++ /dev/null
@@ -1,10 +0,0 @@
-global sim_running;
-sim_running = true;
-
-global agent_data;
-agent_data = [];
-
-% Esegui simulation_agent in modo asincrono
-f = parfeval(@simulation_agent, 0, 300, 2, false); % nessun output, 300 step, show_agent=2, no grafica
-
-disp("Simulazione avviata in background con parfeval.");
diff --git a/tests/simulations/python/config.yml b/tests/simulations/python/config.yml
deleted file mode 100644
index eb4930a..0000000
--- a/tests/simulations/python/config.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-mqtt:
- broker: "test.mosquitto.org"
- port: 1883
-
-topics:
- input_topic: "sim/python/input"
- output_topic: "sim/python/output"
diff --git a/tests/simulations/python/simulator.py b/tests/simulations/python/simulator.py
deleted file mode 100644
index c5f4654..0000000
--- a/tests/simulations/python/simulator.py
+++ /dev/null
@@ -1,131 +0,0 @@
-import yaml
-import json
-import paho.mqtt.client as mqtt
-import logging
-
-# Configure logging
-logging.basicConfig(
- level=logging.INFO,
- format='%(asctime)s - %(levelname)s - %(message)s'
-)
-logger = logging.getLogger(__name__)
-
-def load_config(config_path='config.yml'):
- """
- Load configuration from YAML file.
-
- Args:
- config_path (str): Path to the configuration file
-
- Returns:
- dict: Configuration parameters
- """
- try:
- with open(config_path, 'r') as file:
- return yaml.safe_load(file)
- except Exception as e:
- logger.error(f"Failed to load configuration: {e}")
- raise
-
-class MQTTCalculator:
- """MQTT service that calculates the product of received values"""
-
- def __init__(self, config_path='config.yml'):
- """
- Initialize the MQTT calculator service.
-
- Args:
- config_path (str): Path to the configuration file
- """
- self.config = load_config(config_path)
- self.broker = self.config['mqtt']['broker']
- self.port = self.config['mqtt']['port']
- self.input_topic = self.config['topics']['input_topic']
- self.output_topic = self.config['topics']['output_topic']
-
- # Setup MQTT client
- self.client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION1)
- self.client.on_connect = self.on_connect
- self.client.on_message = self.on_message
-
- # Set optional authentication if provided in config
- if 'username' in self.config['mqtt'] and 'password' in self.config['mqtt']:
- self.client.username_pw_set(
- self.config['mqtt']['username'],
- self.config['mqtt']['password']
- )
-
- def on_connect(self, client, userdata, flags, rc, properties=None):
- """
- Callback for successful connection to the MQTT broker.
-
- Args:
- client: MQTT client instance
- userdata: User data
- flags: Connection flags
- rc: Connection result code
- properties: Connection properties (MQTT v5)
- """
- if rc == 0:
- logger.info(f"Connected to {self.broker} with status code {rc}")
- client.subscribe(self.input_topic)
- logger.info(f"Subscribed to {self.input_topic}")
- else:
- logger.error(f"Failed to connect to {self.broker}, return code {rc}")
-
- def on_message(self, client, userdata, msg):
- """
- Process incoming MQTT messages.
-
- Args:
- client: MQTT client instance
- userdata: User data
- msg: Received message
- """
- try:
- # Decode and parse the JSON payload
- payload = json.loads(msg.payload.decode())
- logger.debug(f"Received message: {payload}")
-
- # Extract values with defaults
- v1 = float(payload.get("value1", 1))
- v2 = float(payload.get("value2", 1))
- v3 = float(payload.get("value3", 1))
-
- # Calculate product
- product = v1 * v2 * v3
- logger.info(f"Received: {payload} → Product: {product}")
-
- # Prepare and publish result
- result = {"product": product}
- self.client.publish(self.output_topic, json.dumps(result))
- logger.info(f"Published to {self.output_topic}: {result}")
-
- except json.JSONDecodeError:
- logger.error("Invalid JSON format in the message")
- except Exception as e:
- logger.error(f"Error processing message: {e}")
-
- def run(self):
- """Start the MQTT calculator service"""
- try:
- # Connect to the broker
- logger.info(f"Connecting to MQTT broker {self.broker}:{self.port}")
- self.client.connect(self.broker, self.port)
-
- # Start processing messages
- logger.info("Starting MQTT processing loop")
- self.client.loop_forever()
- except KeyboardInterrupt:
- logger.info("Service stopped by user")
- except Exception as e:
- logger.error(f"Service error: {e}")
- finally:
- # Clean up resources
- self.client.disconnect()
- logger.info("Disconnected from MQTT broker")
-
-if __name__ == "__main__":
- # Create and start the calculator service
- calculator = MQTTCalculator()
- calculator.run()
\ No newline at end of file
diff --git a/tests/simulations/simulation_client_rabbitmq.py b/tests/simulations/simulation_client_rabbitmq.py
deleted file mode 100644
index 79287ff..0000000
--- a/tests/simulations/simulation_client_rabbitmq.py
+++ /dev/null
@@ -1,157 +0,0 @@
-import pika
-import yaml
-import sys
-import uuid
-import time
-
-class Simulation:
- def __init__(self, sim_id):
- self.sim_id = sim_id
- self.connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
- self.channel = self.connection.channel()
- self.setup_infrastructure()
-
- def setup_infrastructure(self):
- # To receive input messages
- self.channel.exchange_declare(
- exchange='ex.bridge.output',
- exchange_type='topic',
- durable=True
- )
-
- # To send simulation result messages
- self.channel.exchange_declare(
- exchange='ex.sim.result',
- exchange_type='topic',
- durable=True
- )
-
- # Queue to receive input commands
- self.input_queue_name = f'Q.sim.{self.sim_id}'
- self.channel.queue_declare(queue=self.input_queue_name, durable=True)
- self.channel.queue_bind(
- exchange='ex.bridge.output',
- queue=self.input_queue_name,
- routing_key=f"*.{self.sim_id}" # Accept messages from any source
- )
-
- def handle_message(self, ch, method, properties, body):
- try:
- # Load the message body as YAML
- msg = yaml.safe_load(body)
- print(f" [SIM {self.sim_id}] Received: {msg}")
-
- # Extract simulation information
- sim_type = msg.get('simulation', {}).get('type', 'batch')
- source = method.routing_key.split('.')[0] # Extract the message source
-
- print(f" [SIM {self.sim_id}] Simulation type: {sim_type}")
- print(f" [SIM {self.sim_id}] Source: {source}")
-
- # Perform the simulation (in a real-world scenario, this would be a complex computation)
- result = self.perform_simulation(sim_type, msg)
-
- # Send the simulation result
- self.send_result(source, result)
-
- # Acknowledge the receipt of the original message
- ch.basic_ack(method.delivery_tag)
-
- except yaml.YAMLError as e:
- print(f"Error decoding YAML message: {e}")
- ch.basic_nack(method.delivery_tag)
- except Exception as e:
- print(f"Error processing the message: {e}")
- ch.basic_nack(method.delivery_tag)
-
- def perform_simulation(self, sim_type, input_data):
- """
- Executes the simulation and returns the result.
- In a real-world case, this would perform the simulation computation.
- """
- print(f" [SIM {self.sim_id}] Executing simulation of type: {sim_type}")
-
- # Simulate processing time
- if sim_type == 'realtime':
- # For real-time simulations, send multiple results sequentially
- return {
- 'simulation_id': str(uuid.uuid4()),
- 'sim_type': sim_type,
- 'timestamp': time.time(),
- 'status': 'completed',
- 'data': {
- 'result_type': 'realtime',
- 'metrics': {
- 'accuracy': 0.95,
- 'precision': 0.92,
- 'recall': 0.94
- },
- 'values': [1.2, 3.4, 5.6, 7.8]
- }
- }
- else: # batch or other types
- # For batch simulations, send a single complete result
- return {
- 'simulation_id': str(uuid.uuid4()),
- 'sim_type': sim_type,
- 'timestamp': time.time(),
- 'status': 'completed',
- 'data': {
- 'result_type': 'batch',
- 'summary': {
- 'total_iterations': 100,
- 'convergence_rate': 0.001,
- 'execution_time_ms': 345
- },
- 'output': {
- 'prediction': [10.5, 20.3, 15.7],
- 'confidence': 0.89
- }
- }
- }
-
- def send_result(self, destination, result):
- """
- Sends the simulation result to the specified destination.
- """
- # Prepare the payload with the destination
- payload = {
- **result, # Result data
- 'source': self.sim_id, # Simulation identifier
- 'destinations': [destination] # Recipient (e.g., 'dt', 'pt')
- }
-
- # Serialize to YAML
- payload_yaml = yaml.dump(payload, default_flow_style=False)
-
- # Routing key: .result.
- routing_key = f"{self.sim_id}.result.{destination}"
-
- # Publish the message
- self.channel.basic_publish(
- exchange='ex.sim.result',
- routing_key=routing_key,
- body=payload_yaml,
- properties=pika.BasicProperties(
- delivery_mode=2, # Persistent message
- content_type='application/x-yaml',
- message_id=str(uuid.uuid4())
- )
- )
- print(f" [SIM {self.sim_id}] Result sent to {destination}: {payload}")
-
- def start(self):
- self.channel.basic_consume(
- queue=self.input_queue_name,
- on_message_callback=self.handle_message
- )
- print(f" [SIM {self.sim_id}] Listening for simulation requests...")
- self.channel.start_consuming()
-
-
-if __name__ == "__main__":
- if len(sys.argv) != 2:
- print("Usage: simulation.py ")
- sys.exit(1)
-
- Simulation(sys.argv[1]).start()
\ No newline at end of file