From ce642c1d5d3f0253b34b27200d237e82e3cf397f Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 25 Mar 2025 13:07:02 +0100 Subject: [PATCH 01/52] =?UTF-8?q?F=C3=BCge=20experimentelle=20=C3=84nderun?= =?UTF-8?q?gen=20f=C3=BCr=20Registry=20und=20Discovery=20hinzu?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Discovery Server/Dockerfile | 45 +++ Discovery Server/README.md | 97 +++++ Discovery Server/app/main.py | 46 +++ Discovery Server/compose.yml | 7 + Discovery Server/entrypoint.sh | 71 ++++ Discovery Server/stop-supervisor.sh | 8 + Discovery Server/supervisord.ini | 27 ++ Discovery Server/uwsgi.ini | 9 + Registry Server/Dockerfile | 45 +++ Registry Server/README.md | 97 +++++ Registry Server/app/main.py | 46 +++ Registry Server/compose.yml | 7 + Registry Server/entrypoint.sh | 71 ++++ Registry Server/stop-supervisor.sh | 8 + Registry Server/supervisord.ini | 27 ++ Registry Server/uwsgi.ini | 9 + sdk/basyx/aas/adapter/http.py | 46 ++- .../aas/adapter/json/json_deserialization.py | 147 ++++++- .../aas/adapter/json/json_serialization.py | 114 ++++++ sdk/basyx/aas/adapter/registry.py | 367 ++++++++++++++++++ sdk/basyx/aas/adapter/resolver.py | 253 ++++++++++++ .../aas/adapter/xml/xml_deserialization.py | 58 +++ sdk/basyx/aas/model/__init__.py | 6 +- sdk/basyx/aas/model/aas.py | 11 + sdk/basyx/aas/model/aas_descriptor.py | 67 ++++ sdk/basyx/aas/model/descriptor.py | 133 +++++++ sdk/basyx/aas/model/submodel_descriptor.py | 29 ++ test.py | 67 ++++ 28 files changed, 1913 insertions(+), 5 deletions(-) create mode 100644 Discovery Server/Dockerfile create mode 100644 Discovery Server/README.md create mode 100644 Discovery Server/app/main.py create mode 100644 Discovery Server/compose.yml create mode 100644 Discovery Server/entrypoint.sh create mode 100644 Discovery Server/stop-supervisor.sh create mode 100644 Discovery Server/supervisord.ini create mode 100644 Discovery Server/uwsgi.ini create mode 100644 Registry Server/Dockerfile create mode 100644 Registry Server/README.md create mode 100644 Registry Server/app/main.py create mode 100644 Registry Server/compose.yml create mode 100644 Registry Server/entrypoint.sh create mode 100644 Registry Server/stop-supervisor.sh create mode 100644 Registry Server/supervisord.ini create mode 100644 Registry Server/uwsgi.ini create mode 100644 sdk/basyx/aas/adapter/registry.py create mode 100644 sdk/basyx/aas/adapter/resolver.py create mode 100644 sdk/basyx/aas/model/aas_descriptor.py create mode 100644 sdk/basyx/aas/model/descriptor.py create mode 100644 sdk/basyx/aas/model/submodel_descriptor.py create mode 100644 test.py diff --git a/Discovery Server/Dockerfile b/Discovery Server/Dockerfile new file mode 100644 index 000000000..6dc3c4cac --- /dev/null +++ b/Discovery Server/Dockerfile @@ -0,0 +1,45 @@ +FROM python:3.11-alpine + +LABEL org.label-schema.name="Eclipse BaSyx" \ + org.label-schema.version="1.0" \ + org.label-schema.description="Docker image for the basyx-python-sdk server application" \ + org.label-schema.maintainer="Eclipse BaSyx" + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +# If we have more dependencies for the server it would make sense +# to refactor uswgi to the pyproject.toml +RUN apk update && \ + apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ + pip install uwsgi && \ + pip install --no-cache-dir git+https://github.com/eclipse-basyx/basyx-python-sdk@main#subdirectory=sdk && \ + apk del git bash + + +COPY uwsgi.ini /etc/uwsgi/ +COPY supervisord.ini /etc/supervisor/conf.d/supervisord.ini +COPY stop-supervisor.sh /etc/supervisor/stop-supervisor.sh +RUN chmod +x /etc/supervisor/stop-supervisor.sh + +# Makes it possible to use a different configuration +ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini +# object stores aren't thread-safe yet +# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 +ENV UWSGI_CHEAPER=0 +ENV UWSGI_PROCESSES=1 +ENV NGINX_MAX_UPLOAD=1M +ENV NGINX_WORKER_PROCESSES=1 +ENV LISTEN_PORT=80 +ENV CLIENT_BODY_BUFFER_SIZE=1M + +# Copy the entrypoint that will generate Nginx additional configs +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +COPY ./app /app +WORKDIR /app + +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Discovery Server/README.md b/Discovery Server/README.md new file mode 100644 index 000000000..339226c53 --- /dev/null +++ b/Discovery Server/README.md @@ -0,0 +1,97 @@ +# Eclipse BaSyx Python SDK - HTTP Server + +This package contains a Dockerfile to spin up an exemplary HTTP/REST server following the [Specification of the AAS Part 2 API][6] with ease. +The server currently implements the following interfaces: + +- [Asset Administration Shell Repository Service][4] +- [Submodel Repository Service][5] + +It uses the [HTTP API][1] and the [AASX][7], [JSON][8], and [XML][9] Adapters of the [BaSyx Python SDK][3], to serve regarding files from a given directory. +The files are only read, changes won't persist. + +Alternatively, the container can also be told to use the [Local-File Backend][2] instead, which stores AAS and Submodels as individual JSON files and allows for persistent changes (except supplementary files, i.e. files referenced by `File` submodel elements). +See [below](#options) on how to configure this. + +## Building +The container image can be built via: +``` +$ docker buildx build -t basyx-python-sdk-http-server . +``` + +## Running + +### Storage +The container needs to be provided with the directory `/storage` to store AAS and Submodel files: AASX, JSON, XML or JSON files of Local-File Backend. + +This directory can be mapped via the `-v` option from another image or a local directory. +To map the directory `storage` inside the container, `-v ./storage:/storage` can be used. +The directory `storage` will be created in the current working directory, if it doesn't already exist. + +### Port +The HTTP server inside the container listens on port 80 by default. +To expose it on the host on port 8080, use the option `-p 8080:80` when running it. + +### Options +The container can be configured via environment variables: +- `API_BASE_PATH` determines the base path under which all other API paths are made available. + Default: `/api/v3.0` +- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: + - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve AASX, JSON, XML files from the storage directory. + The files are not modified, all changes done via the API are only stored in memory. + - When instead set to `LOCAL_FILE`, the server makes use of the [LocalFileBackend][2], where AAS and Submodels are persistently stored as JSON files. + Supplementary files, i.e. files referenced by `File` submodel elements, are not stored in this case. +- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. + +### Running Examples + +Putting it all together, the container can be started via the following command: +``` +$ docker run -p 8080:80 -v ./storage:/storage basyx-python-sdk-http-server +``` + +Since Windows uses backslashes instead of forward slashes in paths, you'll have to adjust the path to the storage directory there: +``` +> docker run -p 8080:80 -v .\storage:/storage basyx-python-sdk-http-server +``` + +Per default, the server will use the `LOCAL_FILE_READ_ONLY` storage type and serve the API under `/api/v3.0` and read files from `/storage`. If you want to change this, you can do so like this: +``` +$ docker run -p 8080:80 -v ./storage2:/storage2 -e API_BASE_PATH=/api/v3.1 -e STORAGE_TYPE=LOCAL_FILE_BACKEND -e STORAGE_PATH=/storage2 basyx-python-sdk-http-server +``` + +## Building and running the image with docker-compose + +The container image can also be built and run via: +``` +$ docker compose up +``` + +This is the exemplary `docker-compose` file for the server: +````yaml +services: + app: + build: . + ports: + - "8080:80" + volumes: + - ./storage:/storage + +```` + +Here files are read from `/storage` and the server can be accessed at http://localhost:8080/api/v3.0/ from your host system. +To get a different setup this compose.yaml file can be adapted and expanded. + +## Acknowledgments + +This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. + +[1]: https://github.com/eclipse-basyx/basyx-python-sdk/pull/238 +[2]: https://basyx-python-sdk.readthedocs.io/en/latest/backend/local_file.html +[3]: https://github.com/eclipse-basyx/basyx-python-sdk +[4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 +[5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 +[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces +[7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx +[8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html +[9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html +[10]: https://github.com/tiangolo/uwsgi-nginx-docker diff --git a/Discovery Server/app/main.py b/Discovery Server/app/main.py new file mode 100644 index 000000000..f3eed03ec --- /dev/null +++ b/Discovery Server/app/main.py @@ -0,0 +1,46 @@ +import os +import pathlib +import sys + +from basyx.aas import model, adapter +from basyx.aas.adapter import aasx + +from basyx.aas.backend.local_file import LocalFileObjectStore +from basyx.aas.adapter.registry import RegistryAPI + +storage_path = os.getenv("STORAGE_PATH", "/storage") +storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") +base_path = os.getenv("API_BASE_PATH") + +wsgi_optparams = {} + +if base_path is not None: + wsgi_optparams["base_path"] = base_path + +if storage_type == "LOCAL_FILE_BACKEND": + application = RegistryAPI(LocalFileObjectStore(storage_path), aasx.DictSupplementaryFileContainer(), **wsgi_optparams) + +elif storage_type in "LOCAL_FILE_READ_ONLY": + object_store: model.DictObjectStore = model.DictObjectStore() + file_store: aasx.DictSupplementaryFileContainer = aasx.DictSupplementaryFileContainer() + + for file in pathlib.Path(storage_path).iterdir(): + if not file.is_file(): + continue + print(f"Loading {file}") + + if file.suffix.lower() == ".json": + with open(file) as f: + adapter.json.read_aas_json_file_into(object_store, f) + elif file.suffix.lower() == ".xml": + with open(file) as f: + adapter.xml.read_aas_xml_file_into(object_store, file) + elif file.suffix.lower() == ".aasx": + with aasx.AASXReader(file) as reader: + reader.read_into(object_store=object_store, file_store=file_store) + + application = RegistryAPI(object_store, file_store, **wsgi_optparams) + +else: + print(f"STORAGE_TYPE must be either LOCAL_FILE or LOCAL_FILE_READ_ONLY! Current value: {storage_type}", + file=sys.stderr) diff --git a/Discovery Server/compose.yml b/Discovery Server/compose.yml new file mode 100644 index 000000000..90840a09b --- /dev/null +++ b/Discovery Server/compose.yml @@ -0,0 +1,7 @@ +services: + app: + build: . + ports: + - "8084:80" + volumes: + - ./storage:/storage diff --git a/Discovery Server/entrypoint.sh b/Discovery Server/entrypoint.sh new file mode 100644 index 000000000..722394409 --- /dev/null +++ b/Discovery Server/entrypoint.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env sh +set -e + +# Get the maximum upload file size for Nginx, default to 0: unlimited +USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} + +# Get the number of workers for Nginx, default to 1 +USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} + +# Set the max number of connections per worker for Nginx, if requested +# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below +NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} + +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +# Get the client_body_buffer_size for Nginx, default to 1M +USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} + +# Create the conf.d directory if it doesn't exist +if [ ! -d /etc/nginx/conf.d ]; then + mkdir -p /etc/nginx/conf.d +fi + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content='user nginx;\n' + # Set the number of worker processes in Nginx + content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" + content=$content'error_log /var/log/nginx/error.log warn;\n' + content=$content'pid /var/run/nginx.pid;\n' + content=$content'events {\n' + content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" + content=$content'}\n' + content=$content'http {\n' + content=$content' include /etc/nginx/mime.types;\n' + content=$content' default_type application/octet-stream;\n' + content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" + content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" + content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" + content=$content' access_log /var/log/nginx/access.log main;\n' + content=$content' sendfile on;\n' + content=$content' keepalive_timeout 65;\n' + content=$content' include /etc/nginx/conf.d/*.conf;\n' + content=$content'}\n' + content=$content'daemon off;\n' + # Set the max number of open file descriptors for Nginx workers, if requested + if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then + content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" + fi + # Save generated /etc/nginx/nginx.conf + printf "$content" > /etc/nginx/nginx.conf + + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server' location / {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf + + # # Generate additional configuration + printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf + printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf + printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf +fi + +exec "$@" diff --git a/Discovery Server/stop-supervisor.sh b/Discovery Server/stop-supervisor.sh new file mode 100644 index 000000000..9a953c94b --- /dev/null +++ b/Discovery Server/stop-supervisor.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +printf "READY\n" + +while read line; do + echo "Processing Event: $line" >&2 + kill $PPID +done < /dev/stdin diff --git a/Discovery Server/supervisord.ini b/Discovery Server/supervisord.ini new file mode 100644 index 000000000..d73d98014 --- /dev/null +++ b/Discovery Server/supervisord.ini @@ -0,0 +1,27 @@ +[supervisord] +nodaemon=true + +[program:uwsgi] +command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[program:nginx] +command=/usr/sbin/nginx +stdout_logfile=/var/log/nginx.out.log +stdout_logfile_maxbytes=0 +stderr_logfile=/var/log/nginx.err.log +stderr_logfile_maxbytes=0 +stopsignal=QUIT +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[eventlistener:quit_on_failure] +events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL +command=/etc/supervisor/stop-supervisor.sh diff --git a/Discovery Server/uwsgi.ini b/Discovery Server/uwsgi.ini new file mode 100644 index 000000000..9c54ae1cc --- /dev/null +++ b/Discovery Server/uwsgi.ini @@ -0,0 +1,9 @@ +[uwsgi] +wsgi-file = /app/main.py +socket = /tmp/uwsgi.sock +chown-socket = nginx:nginx +chmod-socket = 664 +hook-master-start = unix_signal:15 gracefully_kill_them_all +need-app = true +die-on-term = true +show-config = false diff --git a/Registry Server/Dockerfile b/Registry Server/Dockerfile new file mode 100644 index 000000000..6dc3c4cac --- /dev/null +++ b/Registry Server/Dockerfile @@ -0,0 +1,45 @@ +FROM python:3.11-alpine + +LABEL org.label-schema.name="Eclipse BaSyx" \ + org.label-schema.version="1.0" \ + org.label-schema.description="Docker image for the basyx-python-sdk server application" \ + org.label-schema.maintainer="Eclipse BaSyx" + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +# If we have more dependencies for the server it would make sense +# to refactor uswgi to the pyproject.toml +RUN apk update && \ + apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ + pip install uwsgi && \ + pip install --no-cache-dir git+https://github.com/eclipse-basyx/basyx-python-sdk@main#subdirectory=sdk && \ + apk del git bash + + +COPY uwsgi.ini /etc/uwsgi/ +COPY supervisord.ini /etc/supervisor/conf.d/supervisord.ini +COPY stop-supervisor.sh /etc/supervisor/stop-supervisor.sh +RUN chmod +x /etc/supervisor/stop-supervisor.sh + +# Makes it possible to use a different configuration +ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini +# object stores aren't thread-safe yet +# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 +ENV UWSGI_CHEAPER=0 +ENV UWSGI_PROCESSES=1 +ENV NGINX_MAX_UPLOAD=1M +ENV NGINX_WORKER_PROCESSES=1 +ENV LISTEN_PORT=80 +ENV CLIENT_BODY_BUFFER_SIZE=1M + +# Copy the entrypoint that will generate Nginx additional configs +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +COPY ./app /app +WORKDIR /app + +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Registry Server/README.md b/Registry Server/README.md new file mode 100644 index 000000000..339226c53 --- /dev/null +++ b/Registry Server/README.md @@ -0,0 +1,97 @@ +# Eclipse BaSyx Python SDK - HTTP Server + +This package contains a Dockerfile to spin up an exemplary HTTP/REST server following the [Specification of the AAS Part 2 API][6] with ease. +The server currently implements the following interfaces: + +- [Asset Administration Shell Repository Service][4] +- [Submodel Repository Service][5] + +It uses the [HTTP API][1] and the [AASX][7], [JSON][8], and [XML][9] Adapters of the [BaSyx Python SDK][3], to serve regarding files from a given directory. +The files are only read, changes won't persist. + +Alternatively, the container can also be told to use the [Local-File Backend][2] instead, which stores AAS and Submodels as individual JSON files and allows for persistent changes (except supplementary files, i.e. files referenced by `File` submodel elements). +See [below](#options) on how to configure this. + +## Building +The container image can be built via: +``` +$ docker buildx build -t basyx-python-sdk-http-server . +``` + +## Running + +### Storage +The container needs to be provided with the directory `/storage` to store AAS and Submodel files: AASX, JSON, XML or JSON files of Local-File Backend. + +This directory can be mapped via the `-v` option from another image or a local directory. +To map the directory `storage` inside the container, `-v ./storage:/storage` can be used. +The directory `storage` will be created in the current working directory, if it doesn't already exist. + +### Port +The HTTP server inside the container listens on port 80 by default. +To expose it on the host on port 8080, use the option `-p 8080:80` when running it. + +### Options +The container can be configured via environment variables: +- `API_BASE_PATH` determines the base path under which all other API paths are made available. + Default: `/api/v3.0` +- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: + - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve AASX, JSON, XML files from the storage directory. + The files are not modified, all changes done via the API are only stored in memory. + - When instead set to `LOCAL_FILE`, the server makes use of the [LocalFileBackend][2], where AAS and Submodels are persistently stored as JSON files. + Supplementary files, i.e. files referenced by `File` submodel elements, are not stored in this case. +- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. + +### Running Examples + +Putting it all together, the container can be started via the following command: +``` +$ docker run -p 8080:80 -v ./storage:/storage basyx-python-sdk-http-server +``` + +Since Windows uses backslashes instead of forward slashes in paths, you'll have to adjust the path to the storage directory there: +``` +> docker run -p 8080:80 -v .\storage:/storage basyx-python-sdk-http-server +``` + +Per default, the server will use the `LOCAL_FILE_READ_ONLY` storage type and serve the API under `/api/v3.0` and read files from `/storage`. If you want to change this, you can do so like this: +``` +$ docker run -p 8080:80 -v ./storage2:/storage2 -e API_BASE_PATH=/api/v3.1 -e STORAGE_TYPE=LOCAL_FILE_BACKEND -e STORAGE_PATH=/storage2 basyx-python-sdk-http-server +``` + +## Building and running the image with docker-compose + +The container image can also be built and run via: +``` +$ docker compose up +``` + +This is the exemplary `docker-compose` file for the server: +````yaml +services: + app: + build: . + ports: + - "8080:80" + volumes: + - ./storage:/storage + +```` + +Here files are read from `/storage` and the server can be accessed at http://localhost:8080/api/v3.0/ from your host system. +To get a different setup this compose.yaml file can be adapted and expanded. + +## Acknowledgments + +This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. + +[1]: https://github.com/eclipse-basyx/basyx-python-sdk/pull/238 +[2]: https://basyx-python-sdk.readthedocs.io/en/latest/backend/local_file.html +[3]: https://github.com/eclipse-basyx/basyx-python-sdk +[4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 +[5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 +[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces +[7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx +[8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html +[9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html +[10]: https://github.com/tiangolo/uwsgi-nginx-docker diff --git a/Registry Server/app/main.py b/Registry Server/app/main.py new file mode 100644 index 000000000..f3eed03ec --- /dev/null +++ b/Registry Server/app/main.py @@ -0,0 +1,46 @@ +import os +import pathlib +import sys + +from basyx.aas import model, adapter +from basyx.aas.adapter import aasx + +from basyx.aas.backend.local_file import LocalFileObjectStore +from basyx.aas.adapter.registry import RegistryAPI + +storage_path = os.getenv("STORAGE_PATH", "/storage") +storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") +base_path = os.getenv("API_BASE_PATH") + +wsgi_optparams = {} + +if base_path is not None: + wsgi_optparams["base_path"] = base_path + +if storage_type == "LOCAL_FILE_BACKEND": + application = RegistryAPI(LocalFileObjectStore(storage_path), aasx.DictSupplementaryFileContainer(), **wsgi_optparams) + +elif storage_type in "LOCAL_FILE_READ_ONLY": + object_store: model.DictObjectStore = model.DictObjectStore() + file_store: aasx.DictSupplementaryFileContainer = aasx.DictSupplementaryFileContainer() + + for file in pathlib.Path(storage_path).iterdir(): + if not file.is_file(): + continue + print(f"Loading {file}") + + if file.suffix.lower() == ".json": + with open(file) as f: + adapter.json.read_aas_json_file_into(object_store, f) + elif file.suffix.lower() == ".xml": + with open(file) as f: + adapter.xml.read_aas_xml_file_into(object_store, file) + elif file.suffix.lower() == ".aasx": + with aasx.AASXReader(file) as reader: + reader.read_into(object_store=object_store, file_store=file_store) + + application = RegistryAPI(object_store, file_store, **wsgi_optparams) + +else: + print(f"STORAGE_TYPE must be either LOCAL_FILE or LOCAL_FILE_READ_ONLY! Current value: {storage_type}", + file=sys.stderr) diff --git a/Registry Server/compose.yml b/Registry Server/compose.yml new file mode 100644 index 000000000..932422dbc --- /dev/null +++ b/Registry Server/compose.yml @@ -0,0 +1,7 @@ +services: + app: + build: . + ports: + - "8083:80" + volumes: + - ./storage:/storage diff --git a/Registry Server/entrypoint.sh b/Registry Server/entrypoint.sh new file mode 100644 index 000000000..722394409 --- /dev/null +++ b/Registry Server/entrypoint.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env sh +set -e + +# Get the maximum upload file size for Nginx, default to 0: unlimited +USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} + +# Get the number of workers for Nginx, default to 1 +USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} + +# Set the max number of connections per worker for Nginx, if requested +# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below +NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} + +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +# Get the client_body_buffer_size for Nginx, default to 1M +USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} + +# Create the conf.d directory if it doesn't exist +if [ ! -d /etc/nginx/conf.d ]; then + mkdir -p /etc/nginx/conf.d +fi + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content='user nginx;\n' + # Set the number of worker processes in Nginx + content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" + content=$content'error_log /var/log/nginx/error.log warn;\n' + content=$content'pid /var/run/nginx.pid;\n' + content=$content'events {\n' + content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" + content=$content'}\n' + content=$content'http {\n' + content=$content' include /etc/nginx/mime.types;\n' + content=$content' default_type application/octet-stream;\n' + content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" + content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" + content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" + content=$content' access_log /var/log/nginx/access.log main;\n' + content=$content' sendfile on;\n' + content=$content' keepalive_timeout 65;\n' + content=$content' include /etc/nginx/conf.d/*.conf;\n' + content=$content'}\n' + content=$content'daemon off;\n' + # Set the max number of open file descriptors for Nginx workers, if requested + if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then + content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" + fi + # Save generated /etc/nginx/nginx.conf + printf "$content" > /etc/nginx/nginx.conf + + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server' location / {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf + + # # Generate additional configuration + printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf + printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf + printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf +fi + +exec "$@" diff --git a/Registry Server/stop-supervisor.sh b/Registry Server/stop-supervisor.sh new file mode 100644 index 000000000..9a953c94b --- /dev/null +++ b/Registry Server/stop-supervisor.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +printf "READY\n" + +while read line; do + echo "Processing Event: $line" >&2 + kill $PPID +done < /dev/stdin diff --git a/Registry Server/supervisord.ini b/Registry Server/supervisord.ini new file mode 100644 index 000000000..d73d98014 --- /dev/null +++ b/Registry Server/supervisord.ini @@ -0,0 +1,27 @@ +[supervisord] +nodaemon=true + +[program:uwsgi] +command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[program:nginx] +command=/usr/sbin/nginx +stdout_logfile=/var/log/nginx.out.log +stdout_logfile_maxbytes=0 +stderr_logfile=/var/log/nginx.err.log +stderr_logfile_maxbytes=0 +stopsignal=QUIT +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[eventlistener:quit_on_failure] +events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL +command=/etc/supervisor/stop-supervisor.sh diff --git a/Registry Server/uwsgi.ini b/Registry Server/uwsgi.ini new file mode 100644 index 000000000..9c54ae1cc --- /dev/null +++ b/Registry Server/uwsgi.ini @@ -0,0 +1,9 @@ +[uwsgi] +wsgi-file = /app/main.py +socket = /tmp/uwsgi.sock +chown-socket = nginx:nginx +chmod-socket = 664 +hook-master-start = unix_signal:15 gracefully_kill_them_all +need-app = true +die-on-term = true +show-config = false diff --git a/sdk/basyx/aas/adapter/http.py b/sdk/basyx/aas/adapter/http.py index a4d7ab289..30e26e419 100644 --- a/sdk/basyx/aas/adapter/http.py +++ b/sdk/basyx/aas/adapter/http.py @@ -289,7 +289,10 @@ class HTTPApiDecoder: model.Qualifier: XMLConstructables.QUALIFIER, model.Submodel: XMLConstructables.SUBMODEL, model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, - model.Reference: XMLConstructables.REFERENCE + model.Reference: XMLConstructables.REFERENCE, + model.AssetAdministrationShellDescriptor: XMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR, + model.SubmodelDescriptor: XMLConstructables.SUBMODEL_DESCRIPTOR, + model.AssetLink: XMLConstructables.ASSET_LINK, } @classmethod @@ -333,6 +336,12 @@ def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool constructor = decoder._construct_reference # type: ignore[assignment] elif expect_type is model.Qualifier: constructor = decoder._construct_qualifier # type: ignore[assignment] + elif expect_type is model.AssetAdministrationShellDescriptor: + constructor = decoder._construct_asset_administration_shell_descriptor + elif expect_type is model.SubmodelDescriptor: + constructor = decoder._construct_submodel_descriptor + elif expect_type is model.AssetLink: + constructor = decoder._construct_asset_link if constructor is not None: # construct elements that aren't self-identified @@ -394,7 +403,42 @@ def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> if request.mimetype == "application/json": return cls.json(request.get_data(), expect_type, stripped) return cls.xml(request.get_data(), expect_type, stripped) + @classmethod + def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: + """ + Deserializes the request body to an instance (or list of instances) + of the expected type. + """ + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + ", ".join(valid_content_types) + ) + if request.mimetype == "application/json": + raw_data = request.get_data() + try: + parsed = json.loads(raw_data) + except Exception as e: + raise werkzeug.exceptions.BadRequest(f"Invalid JSON: {e}") + # Prüfe, ob parsed ein Array ist: + if isinstance(parsed, list): + # Für jedes Element wird die Konvertierung angewandt. + return [cls._convert_single_json_item(item, expect_type, stripped) for item in parsed] # type: ignore + else: + return cls._convert_single_json_item(parsed, expect_type, stripped) + else: + return cls.xml(request.get_data(), expect_type, stripped) + + @classmethod + def _convert_single_json_item(cls, data: any, expect_type: Type[T], stripped: bool) -> T: + """ + Konvertiert ein einzelnes JSON-Objekt (als Python-Dict) in ein Objekt vom Typ expect_type. + Hierbei wird das Dictionary zuerst wieder in einen JSON-String serialisiert und als Bytes übergeben. + """ + json_bytes = json.dumps(data).encode("utf-8") + return cls.json(json_bytes, expect_type, stripped) class Base64URLConverter(werkzeug.routing.UnicodeConverter): diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index c1ce35fef..3a50975a9 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -34,6 +34,7 @@ import json import logging import pprint +from abc import abstractmethod from typing import Dict, Callable, ContextManager, TypeVar, Type, List, IO, Optional, Set, get_args from basyx.aas import model @@ -188,6 +189,9 @@ def object_hook(cls, dct: Dict[str, object]) -> object: 'Range': cls._construct_range, 'ReferenceElement': cls._construct_reference_element, 'DataSpecificationIec61360': cls._construct_data_specification_iec61360, + 'AssetAdministrationShellDescriptor': cls._construct_asset_administration_shell_descriptor, + 'SubmodelDescriptor': cls._construct_submodel_descriptor, + 'AssetLink': cls._construct_asset_link, } # Get modelType and constructor function @@ -275,7 +279,16 @@ def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None if 'extensions' in dct: for extension in _get_ts(dct, 'extensions', list): obj.extension.add(cls._construct_extension(extension)) - + if isinstance(obj, model.Descriptor): + if 'description' in dct: + obj.description = cls._construct_lang_string_set(_get_ts(dct, 'description', list), + model.MultiLanguageTextType) + if 'displayName' in dct: + obj.display_name = cls._construct_lang_string_set(_get_ts(dct, 'displayName', list), + model.MultiLanguageNameType) + if 'extensions' in dct: + for extension in _get_ts(dct, 'extensions', list): + obj.extension.add(cls._construct_extension(extension)) @classmethod def _get_kind(cls, dct: Dict[str, object]) -> model.ModellingKind: """ @@ -747,6 +760,134 @@ def _construct_reference_element( ret.value = cls._construct_reference(_get_ts(dct, 'value', dict)) return ret + @classmethod + def _construct_asset_administration_shell_descriptor( + cls, dct: Dict[str, object], object_class=model.AssetAdministrationShellDescriptor) -> model.AssetAdministrationShellDescriptor: + ret = object_class(id_=_get_ts(dct, 'id', str)) + cls._amend_abstract_attributes(ret, dct) + if 'administration' in dct: + ret.administration = cls._construct_administrative_information(_get_ts(dct, 'administration', dict)) + if 'assetkind' in dct: + asset_kind=ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] + if 'assetType' in dct: + ret.asset_type = _get_ts(dct, 'assetType', str) + global_asset_id = None + if 'globalAssetId' in dct: + global_asset_id = _get_ts(dct, 'globalAssetId', str) + specific_asset_id = set() + if 'specificAssetIds' in dct: + for desc_data in _get_ts(dct, "specificAssetIds", list): + specific_asset_id.add(cls._construct_specific_asset_id(desc_data, model.SpecificAssetId)) + if 'endpoints' in dct: + for endpoint_dct in _get_ts(dct, 'endpoints', list): + if 'protocolInformation' in endpoint_dct: + ret.endpoints.append( + cls._construct_endpoint(endpoint_dct, + model.Endpoint)) + elif 'href' in endpoint_dct: + protocol_info = model.ProtocolInformation( + href=_get_ts(endpoint_dct['href'], 'href', str), + endpoint_protocol=_get_ts(endpoint_dct['href'], + 'endpointProtocol', + str) if 'endpointProtocol' in + endpoint_dct[ + 'href'] else None, + endpoint_protocol_version=_get_ts( + endpoint_dct['href'], + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in + endpoint_dct['href'] else None + ) + ret.endpoints.append(model.Endpoint( + protocol_information=protocol_info, + interface=_get_ts(endpoint_dct, 'interface', + str))) + if 'idShort' in dct: + ret.id_short = _get_ts(dct, 'idShort', str) + if 'submodelDescriptors' in dct: + ret.submodel_descriptors = cls._construct_submodel_descriptor(_get_ts(dct, 'submodelDescriptors', list), model.SubmodelDescriptor) + return ret + + @classmethod + def _construct_protocol_information(cls, dct: Dict[str, object], + object_class=model.ProtocolInformation) -> model.ProtocolInformation: + ret = object_class( + href=_get_ts(dct, 'href', str), + endpoint_protocol=_get_ts(dct, 'endpointProtocol', + str) if 'endpointProtocol' in dct else None, + endpoint_protocol_version=_get_ts(dct, + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in dct else None, + subprotocol=_get_ts(dct, 'subprotocol', + str) if 'subprotocol' in dct else None, + subprotocol_body=_get_ts(dct, 'subprotocolBody', + str) if 'subprotocolBody' in dct else None, + subprotocol_body_encoding=_get_ts(dct, + 'subprotocolBodyEncoding', + str) if 'subprotocolBodyEncoding' in dct else None + ) + return ret + + @classmethod + def _construct_endpoint(cls, dct: Dict[str, object], + object_class=model.Endpoint) -> model.Endpoint: + ret = object_class( + protocol_information=cls._construct_protocol_information( + _get_ts(dct, 'protocolInformation', dict), + model.ProtocolInformation + ), + interface=_get_ts(dct, 'interface', + str) + ) + cls._amend_abstract_attributes(ret, dct) + return ret + + @classmethod + def _construct_submodel_descriptor( + cls, dct: Dict[str, object], object_class=model.SubmodelDescriptor) -> model.SubmodelDescriptor: + ret = object_class(id_=_get_ts(dct, 'id', str), + endpoints=[]) + cls._amend_abstract_attributes(ret, dct) + for endpoint_dct in _get_ts(dct, 'endpoints', list): + if 'protocolInformation' in endpoint_dct: + ret.endpoints.append( + cls._construct_endpoint(endpoint_dct, + model.Endpoint)) + elif 'href' in endpoint_dct: + protocol_info = model.ProtocolInformation( + href=_get_ts(endpoint_dct['href'], 'href', str), + endpoint_protocol=_get_ts(endpoint_dct['href'], + 'endpointProtocol', + str) if 'endpointProtocol' in + endpoint_dct[ + 'href'] else None, + endpoint_protocol_version=_get_ts( + endpoint_dct['href'], + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in + endpoint_dct['href'] else None + ) + ret.endpoints.append(model.Endpoint( + protocol_information=protocol_info, + interface=_get_ts(endpoint_dct, 'interface', + str))) + if 'administration' in dct: + ret.administration = cls._construct_administrative_information( + _get_ts(dct, 'administration', dict)) + if 'idShort' in dct: + ret.id_short = _get_ts(dct, 'idShort', str) + if 'semanticId' in dct: + ret.semantic_id = cls._construct_reference(_get_ts(dct, 'semanticId', dict)) + if 'supplementalSemanticIds' in dct: + for ref in _get_ts(dct, 'supplementalSemanticIds', list): + ret.supplemental_semantic_id.append(cls._construct_reference(ref)) + return ret + @classmethod + def _construct_asset_link ( + cls, dct: Dict[str, object], object_class=model.AssetLink) -> model.AssetLink: + ret = object_class(name=_get_ts(dct, 'name', str), + value=_get_ts(dct, 'value', str)) + return ret class StrictAASFromJsonDecoder(AASFromJsonDecoder): """ @@ -845,7 +986,9 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO for name, expected_type in (('assetAdministrationShells', model.AssetAdministrationShell), ('submodels', model.Submodel), - ('conceptDescriptions', model.ConceptDescription)): + ('conceptDescriptions', model.ConceptDescription), + ('assetAdministrationShellDescriptors', model.AssetAdministrationShellDescriptor), + ('submodelDescriptors', model.SubmodelDescriptor)): try: lst = _get_ts(data, name, list) except (KeyError, TypeError): diff --git a/sdk/basyx/aas/adapter/json/json_serialization.py b/sdk/basyx/aas/adapter/json/json_serialization.py index 8c6a671f1..29d614fa8 100644 --- a/sdk/basyx/aas/adapter/json/json_serialization.py +++ b/sdk/basyx/aas/adapter/json/json_serialization.py @@ -93,6 +93,11 @@ def default(self, obj: object) -> object: model.SubmodelElementCollection: self._submodel_element_collection_to_json, model.SubmodelElementList: self._submodel_element_list_to_json, model.ValueReferencePair: self._value_reference_pair_to_json, + model.AssetAdministrationShellDescriptor: self._asset_administration_shell_descriptor_to_json, + model.SubmodelDescriptor: self._submodel_descriptor_to_json, + model.Endpoint: self._endpoint_to_json, + model.ProtocolInformation: self._protocol_information_to_json, + model.AssetLink: self._asset_link_to_json } for typ in mapping: if isinstance(obj, typ): @@ -150,6 +155,14 @@ def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: if isinstance(obj, model.Qualifiable) and not cls.stripped: if obj.qualifier: data['qualifiers'] = list(obj.qualifier) + if isinstance(obj, model.Descriptor): + if obj.description: + data['description'] = obj.description + if obj.display_name: + data['displayName'] = obj.display_name + if obj.extension: + data['extensions'] = list(obj.extension) + return data # ############################################################# @@ -670,6 +683,92 @@ def _basic_event_element_to_json(cls, obj: model.BasicEventElement) -> Dict[str, data['maxInterval'] = model.datatypes.xsd_repr(obj.max_interval) return data + @classmethod + def _asset_administration_shell_descriptor_to_json(cls, obj: model.AssetAdministrationShellDescriptor) -> Dict[str, object]: + """ + serialization of an object from class AssetAdministrationShell to json + + :param obj: object of class AssetAdministrationShell + :return: dict with the serialized attributes of this object + """ + data = cls._abstract_classes_to_json(obj) + data.update(cls._namespace_to_json(obj)) + data['id'] = obj.id + if obj.administration: + data['administration'] = obj.administration + if obj.asset_kind: + data['assetKind'] = _generic.ASSET_KIND[obj.asset_kind] + if obj.asset_type: + data['assetType'] = obj.asset_type + if obj.global_asset_id: + data['globalAssetId'] = obj.global_asset_id + if obj.specific_asset_id: + data['specificAssetIds'] = list(obj.specific_asset_id) + if obj.endpoints: + data['endpoints'] = list(obj.endpoints) + if obj.id_short: + data['idShort'] = obj.id_short + if obj.submodel_descriptors: + data['submodelDescriptors'] = list(obj.submodel_descriptors) + return data + + @classmethod + def _protocol_information_to_json(cls, + obj: model.ProtocolInformation) -> \ + Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + + data['href'] = obj.href + if obj.endpoint_protocol: + data['endpointProtocol'] = obj.endpoint_protocol + if obj.endpoint_protocol_version: + data['endpointProtocolVersion'] = obj.endpoint_protocol_version + if obj.subprotocol: + data['subprotocol'] = obj.subprotocol + if obj.subprotocol_body: + data['subprotocolBody'] = obj.subprotocol_body + if obj.subprotocol_body_encoding: + data['subprotocolBodyEncoding'] = obj.subprotocol_body_encoding + + return data + + + @classmethod + def _endpoint_to_json(cls, obj: model.Endpoint) -> Dict[ + str, object]: + data = cls._abstract_classes_to_json(obj) + data['protocolInformation'] = cls._protocol_information_to_json( + obj.protocol_information) + data['interface'] = obj.interface + return data + + @classmethod + def _submodel_descriptor_to_json(cls, obj: model.SubmodelDescriptor) -> Dict[str, object]: + """ + serialization of an object from class Submodel to json + + :param obj: object of class Submodel + :return: dict with the serialized attributes of this object + """ + data = cls._abstract_classes_to_json(obj) + data['id'] = obj.id + data['endpoints'] = [cls._endpoint_to_json(ep) for ep in + obj.endpoints] + if obj.id_short: + data['idShort'] = obj.id_short + if obj.administration: + data['administration'] = obj.administration + if obj.semantic_id: + data['semanticId'] = obj.semantic_id + if obj.supplemental_semantic_id: + data['supplementalSemanticIds'] = list(obj.supplemental_semantic_id) + return data + @classmethod + def _asset_link_to_json(cls, obj: model.AssetLink) -> Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + data['name'] = obj.name + data['value'] = obj.value + return data class StrippedAASToJsonEncoder(AASToJsonEncoder): """ @@ -698,6 +797,9 @@ def _create_dict(data: model.AbstractObjectStore) -> dict: asset_administration_shells: List[model.AssetAdministrationShell] = [] submodels: List[model.Submodel] = [] concept_descriptions: List[model.ConceptDescription] = [] + asset_administration_shell_descriptors: List[model.AssetAdministrationShellDescriptor] = [] + submodel_descriptors: List[model.SubmodelDescriptor] = [] + assets_links: List[model.AssetLink] = [] for obj in data: if isinstance(obj, model.AssetAdministrationShell): asset_administration_shells.append(obj) @@ -705,6 +807,12 @@ def _create_dict(data: model.AbstractObjectStore) -> dict: submodels.append(obj) elif isinstance(obj, model.ConceptDescription): concept_descriptions.append(obj) + elif isinstance(obj, model.AssetAdministrationShellDescriptor): + asset_administration_shell_descriptors.append(obj) + elif isinstance(obj, model.SubmodelDescriptor): + submodel_descriptors.append(obj) + elif isinstance(obj, model.AssetLink): + assets_links.append(obj) dict_: Dict[str, List] = {} if asset_administration_shells: dict_['assetAdministrationShells'] = asset_administration_shells @@ -712,6 +820,12 @@ def _create_dict(data: model.AbstractObjectStore) -> dict: dict_['submodels'] = submodels if concept_descriptions: dict_['conceptDescriptions'] = concept_descriptions + if asset_administration_shell_descriptors: + dict_['assetAdministrationShellDescriptors'] = asset_administration_shell_descriptors + if submodel_descriptors: + dict_['submodelDescriptors'] = submodel_descriptors + if assets_links: + dict_['assetLinks'] = assets_links return dict_ diff --git a/sdk/basyx/aas/adapter/registry.py b/sdk/basyx/aas/adapter/registry.py new file mode 100644 index 000000000..c3871c612 --- /dev/null +++ b/sdk/basyx/aas/adapter/registry.py @@ -0,0 +1,367 @@ +# Copyright (c) 2024 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +""" +This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". +""" + +import abc +import base64 +import binascii +import datetime +import enum +import io +import json +import itertools + +from lxml import etree +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.urls +import werkzeug.utils +from werkzeug.exceptions import BadRequest, Conflict, NotFound, UnprocessableEntity +from werkzeug.routing import MapAdapter, Rule, Submount +from werkzeug.wrappers import Request, Response +from werkzeug.datastructures import FileStorage + +from basyx.aas import model +from ._generic import XML_NS_MAP +from .xml import XMLConstructables, read_aas_xml_element, xml_serialization, object_to_xml_element +from .json import AASToJsonEncoder, StrictAASFromJsonDecoder, StrictStrippedAASFromJsonDecoder +from . import aasx +from .http import APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder +from .http import Base64URLConverter + +from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple + +def get_response_type(request: Request) -> Type[APIResponse]: + response_types: Dict[str, Type[APIResponse]] = { + "application/json": JsonResponse, + "application/xml": XmlResponse, + "text/xml": XmlResponseAlt + } + if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): + return JsonResponse + mime_type = request.accept_mimetypes.best_match(response_types) + if mime_type is None: + raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " + + ", ".join(response_types.keys())) + return response_types[mime_type] + +def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ + -> APIResponse: + headers = exception.get_headers() + location = exception.get_response().location + if location is not None: + headers.append(("Location", location)) + if exception.code and exception.code >= 400: + message = Message(type(exception).__name__, exception.description if exception.description is not None else "", + MessageType.ERROR) + result = Result(False, [message]) + else: + result = Result(False) + return response_type(result, status=exception.code, headers=headers) + +def is_stripped_request(request: Request) -> bool: + return request.args.get("level") == "core" + +T = TypeVar("T") + +BASE64URL_ENCODING = "utf-8" +class RegistryAPI: + def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/api/v3.0"): + self.object_store: model.AbstractObjectStore = object_store + self.url_map = werkzeug.routing.Map([ + Submount(base_path, [ + Rule("/shell-descriptors", methods=["GET"], endpoint=self.get_aas_descriptors_all), + Rule("/shell-descriptors", methods=["POST"], endpoint=self.post_aas_descriptor), + Submount("/shell-descriptors", [ + Rule("/", methods=["GET"], endpoint=self.get_aas_descriptor), + Rule("/", methods=["PUT"], endpoint=self.put_aas_descriptor), + Rule("/", methods=["DELETE"], endpoint=self.delete_aas_descriptor), + Submount("/", [ + Rule("/submodel-descriptors", methods=["GET"], endpoint=self.get_all_submodel_descriptors_through_superpath), + Rule("/submodel-descriptors", methods=["POST"], endpoint=self.post_submodel_descriptor_through_superpath), + Submount("/submodel-descriptors", [ + Rule("/", methods=["GET"], endpoint=self.get_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["PUT"], endpoint=self.put_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_descriptor_by_id_through_superpath), + ]) + ]) + ]), + Rule("/submodel-descriptors", methods=["GET"], endpoint=self.get_all_submodel_descriptors), + Rule("/submodel-descriptors", methods=["POST"], endpoint=self.post_submodel_descriptor), + Submount("/submodel-descriptors", [ + Rule("/", methods=["GET"], endpoint=self.get_submodel_descriptor_by_id), + Rule("/", methods=["PUT"], endpoint=self.put_submodel_descriptor_by_id), + Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_descriptor_by_id), + ]) + ]) + ], converters={ + "base64url": Base64URLConverter + }, strict_slashes=False) + + def __call__(self, environ, start_response) -> Iterable[bytes]: + response: Response = self.handle_request(Request(environ)) + return response(environ, start_response) + def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: + identifiable = self.object_store.get(identifier) + if not isinstance(identifiable, type_): + raise NotFound(f"No {type_.__name__} with {identifier} found!") + identifiable.update() + return identifiable + + def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: + for obj in self.object_store: + if isinstance(obj, type_): + obj.update() + yield obj + @classmethod + def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: + limit_str = request.args.get('limit', default="10") + cursor_str = request.args.get('cursor', default="0") + try: + limit, cursor = int(limit_str), int(cursor_str) + if limit < 0 or cursor < 0: + raise ValueError + except ValueError: + raise BadRequest("Cursor and limit must be positive integers!") + start_index = cursor + end_index = cursor + limit + paginated_slice = itertools.islice(iterator, start_index, end_index) + return paginated_slice, end_index + + def _get_descriptors(self, request: "Request") -> Tuple[Iterator[model.AssetAdministrationShellDescriptor], int]: + """ + Returns all Asset Administration Shell Descriptors + """ + descriptors: Iterator[model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( + model.AssetAdministrationShellDescriptor + ) + + id_short = request.args.get("idShort") + if id_short is not None: + descriptors = filter(lambda desc: desc.id_short == id_short, descriptors) + + asset_ids = request.args.getlist("assetIds") + if asset_ids: + # Decode und Instanziierung der SpecificAssetIds + specific_asset_ids: List[model.SpecificAssetId] = list( + map(lambda asset_id: HTTPApiDecoder.base64urljson(asset_id, model.SpecificAssetId, False), asset_ids) + ) + # Filtere anhand der übergebenen SpecificAssetIds + descriptors = filter( + lambda desc: all(specific_asset_id in desc.asset_information.specific_asset_id + for specific_asset_id in specific_asset_ids), + descriptors + ) + + paginated_descriptors, end_index = self._get_slice(request, descriptors) + return paginated_descriptors, end_index + + def _get_descriptor(self, url_args: Dict) -> model.AssetAdministrationShellDescriptor: + return self._get_obj_ts(url_args["aas_id"], model.AssetAdministrationShellDescriptor) + + def _get_submodel_descriptors(self, request: Request) -> Tuple[Iterator[model.SubmodelDescriptor], int]: + submodel_descriptors: Iterator[model.Submodel] = self._get_all_obj_of_type(model.SubmodelDescriptor) + id_short = request.args.get("idShort") + if id_short is not None: + submodel_descriptors= filter(lambda sm: sm.id_short == id_short, submodels) + semantic_id = request.args.get("semanticId") + if semantic_id is not None: + spec_semantic_id = HTTPApiDecoder.base64urljson( + semantic_id, model.Reference, False) # type: ignore[type-abstract] + submodel_descriptors = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodel_descriptors) + paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) + return paginated_submodel_descriptors, end_index + + def _get_submodel_descriptor(self, url_args: Dict) -> model.SubmodelDescriptor: + return self._get_obj_ts(url_args["submodel_id"], model.SubmodelDescriptor) + + def handle_request(self, request: Request): + map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) + try: + response_t = get_response_type(request) + except werkzeug.exceptions.NotAcceptable as e: + return e + + try: + endpoint, values = map_adapter.match() + return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) + + # any raised error that leaves this function will cause a 500 internal server error + # so catch raised http exceptions and return them + except werkzeug.exceptions.HTTPException as e: + return http_exception_to_response(e, response_t) + + # ------ AAS REGISTRY ROUTES ------- + def get_aas_descriptors_all(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + aasdescriptors, cursor = self._get_descriptors(request) + return response_t(list(aasdescriptors), cursor=cursor) + + def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + map_adapter: MapAdapter) -> Response: + descriptor = HTTPApiDecoder.request_body(request, model.AssetAdministrationShellDescriptor, False) + try: + self.object_store.add(descriptor) + except KeyError as e: + raise Conflict(f"AssetAdministrationShellDescriptor with Identifier {descriptor.id} already exists!") from e + descriptor.commit() + created_resource_url = map_adapter.build(self.get_aas_descriptor, { + "aas_id": descriptor.id + }, force_external=True) + return response_t(descriptor, status=201, headers={"Location": created_resource_url}) + + def get_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + descriptor = self._get_descriptor(url_args) + return response_t(descriptor) + + def put_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + descriptor = self._get_descriptor(url_args) + descriptor.update_from(HTTPApiDecoder.request_body(request, model.AssetAdministrationShellDescriptor, + is_stripped_request(request))) + descriptor.commit() + return response_t() + + def delete_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + descriptor = self._get_descriptor(url_args) + self.object_store.remove(descriptor) + return response_t() + + def get_all_submodel_descriptors_through_superpath(self, + request: Request, + url_args: Dict, + response_t: Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_descriptor(url_args) + submodel_descriptors, cursor = self._get_slice(request, + aas_descriptor.submodel_descriptors) + return response_t(list(submodel_descriptors), cursor=cursor) + + def get_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if + sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound( + f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + return response_t(submodel_descriptor) + + def post_submodel_descriptor_through_superpath(self, + request: Request, + url_args: Dict, + response_t: Type[ + APIResponse], + map_adapter: MapAdapter) -> Response: + aas_descriptor = self._get_descriptor(url_args) + submodel_descriptor = HTTPApiDecoder.request_body(request, + model.SubmodelDescriptor, + is_stripped_request( + request)) + if any(sd.id == submodel_descriptor.id for sd in + aas_descriptor.submodel_descriptors): + raise Conflict( + f"Submodel Descriptor with Identifier {submodel_descriptor.id} already exists!") + aas_descriptor.submodel_descriptors.append(submodel_descriptor) + aas_descriptor.commit() + created_resource_url = map_adapter.build( + self.get_submodel_descriptor_by_id_through_superpath, { + "aas_id": aas_descriptor.id, + "submodel_id": submodel_descriptor.id + }, force_external=True) + return response_t(submodel_descriptor, status=201, + headers={"Location": created_resource_url}) + + def put_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if + sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound( + f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + submodel_descriptor.update_from( + HTTPApiDecoder.request_body(request, + model.SubmodelDescriptor, + is_stripped_request(request))) + aas_descriptor.commit() + return response_t() + + def delete_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound(f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + aas_descriptor.submodel_descriptors.remove(submodel_descriptor) + aas_descriptor.commit() + return response_t() + + # ------ Submodel REGISTRY ROUTES ------- + def get_all_submodel_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + submodel_descriptors, cursor = self._get_submodel_descriptors(request) + return response_t(list(submodel_descriptors), cursor=cursor, stripped=is_stripped_request(request)) + + + def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + submodel_descriptor = self._get_submodel_descriptor(url_args) + return response_t(submodel_descriptor, stripped=is_stripped_request(request)) + + + def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + map_adapter: MapAdapter) -> Response: + submodel_descriptor = HTTPApiDecoder.request_body(request, model.SubmodelDescriptor, is_stripped_request(request)) + try: + self.object_store.add(submodel_descriptor) + except KeyError as e: + raise Conflict(f"Submodel Descriptor with Identifier {submodel_descriptor.id} already exists!") from e + submodel_descriptor.commit() + created_resource_url = map_adapter.build(self.get_submodel_descriptor_by_id, { + "submodel_id": submodel_descriptor.id + }, force_external=True) + return response_t(submodel_descriptor, status=201, headers={"Location": created_resource_url}) + + + def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + submodel_descriptor = self._get_submodel_descriptor(url_args) + submodel_descriptor.update_from(HTTPApiDecoder.request_body(request, model.SubmodelDescriptor, is_stripped_request(request))) + submodel_descriptor.commit() + return response_t() + + def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], model.SubmodelDescriptor)) + return response_t() + + +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8083, RegistryAPI(create_full_example()), + use_debugger=True, use_reloader=True) diff --git a/sdk/basyx/aas/adapter/resolver.py b/sdk/basyx/aas/adapter/resolver.py new file mode 100644 index 000000000..6e3261c74 --- /dev/null +++ b/sdk/basyx/aas/adapter/resolver.py @@ -0,0 +1,253 @@ +# Copyright (c) 2024 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +""" +This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". +""" + +import abc +import base64 +import binascii +import datetime +import enum +import io +import json +import itertools + +from lxml import etree +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.urls +import werkzeug.utils +from werkzeug.exceptions import BadRequest, Conflict, NotFound, UnprocessableEntity +from werkzeug.routing import MapAdapter, Rule, Submount +from werkzeug.wrappers import Request, Response +from werkzeug.datastructures import FileStorage + +from basyx.aas import model +from ._generic import XML_NS_MAP +from .xml import XMLConstructables, read_aas_xml_element, xml_serialization, object_to_xml_element +from .json import AASToJsonEncoder, StrictAASFromJsonDecoder, StrictStrippedAASFromJsonDecoder +from . import aasx +from .http import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder + +from .http import get_response_type, http_exception_to_response, is_stripped_request + +from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple + + +T = TypeVar("T") + +BASE64URL_ENCODING = "utf-8" + +# Klasse, die das externe Mapping verwaltet + + +class ResolverAPI: + def __init__(self, object_store: model.AbstractObjectStore, + base_path: str = "/api/v3.0"): + self.object_store: model.AbstractObjectStore = object_store + self.url_map = werkzeug.routing.Map([ + Submount(base_path, [ + Rule("/lookup/shellsByAssetLink", methods=["POST"], + endpoint=self.search_all_aas_ids_by_asset_link), + Submount("/lookup/shells", [ + Rule("/", methods=["GET"], + endpoint=self.get_all_asset_links_by_id), + Rule("/", methods=["POST"], + endpoint=self.post_all_asset_links_by_id), + Rule("/", methods=["DELETE"], + endpoint=self.delete_all_asset_links_by_id), + ]), + ]) + ], converters={ + "base64url": Base64URLConverter + }, strict_slashes=False) + + def __call__(self, environ, start_response) -> Iterable[bytes]: + response: Response = self.handle_request(Request(environ)) + return response(environ, start_response) + + def _get_obj_ts(self, identifier: model.Identifier, type_: Type[ + model.provider._IT]) -> model.provider._IT: + identifiable = self.object_store.get(identifier) + if not isinstance(identifiable, type_): + raise NotFound( + f"No {type_.__name__} with {identifier} found!") + identifiable.update() + return identifiable + + def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> \ + Iterator[model.provider._IT]: + for obj in self.object_store: + if isinstance(obj, type_): + obj.update() + yield obj + + @classmethod + def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: + limit_str = request.args.get('limit', default="10") + cursor_str = request.args.get('cursor', default="0") + try: + limit, cursor = int(limit_str), int(cursor_str) + if limit < 0 or cursor < 0: + raise ValueError + except ValueError: + raise BadRequest("Cursor and limit must be positive integers!") + start_index = cursor + end_index = cursor + limit + paginated_slice = itertools.islice(iterator, start_index, end_index) + return paginated_slice, end_index + + def _get_assets(self, request: Request) -> Tuple[ + Iterator[model.SpecificAssetId], int]: + specific_asset_ids: Iterator[ + model.SpecificAssetId] = self._get_all_obj_of_type( + model.SpecificAssetId) + + asset_name = request.args.get("name") + if asset_name is not None: + specific_asset_ids = filter( + lambda asset: asset.name == asset_name, + specific_asset_ids) + + paginated_assets, end_index = self._get_slice(request, + specific_asset_ids) + return paginated_assets, end_index + + def handle_request(self, request: Request): + map_adapter: MapAdapter = self.url_map.bind_to_environ( + request.environ) + try: + response_t = get_response_type(request) + except werkzeug.exceptions.NotAcceptable as e: + return e + try: + endpoint, values = map_adapter.match() + return endpoint(request, values, response_t=response_t, + map_adapter=map_adapter) + + # any raised error that leaves this function will cause a 500 internal server error + # so catch raised http exceptions and return them + except werkzeug.exceptions.HTTPException as e: + return http_exception_to_response(e, response_t) + + # ------ Discovery ROUTES ------- + def search_all_aas_ids_by_asset_link(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + """ + Returns a list of Asset Administration Shell IDs linked to specific asset identifiers or the global asset ID + """ + asset_links = HTTPApiDecoder.request_body_list(request, model.AssetLink, + True) + + matching_aas_ids = [] + for asset_link in asset_links: + if asset_link.name == "globalAssetId": + for aas in self._get_all_obj_of_type( + model.AssetAdministrationShell): + if aas.asset_information.global_asset_id == asset_link.value: + matching_aas_ids.append(aas.id_) + else: + for aas in self._get_all_obj_of_type( + model.AssetAdministrationShell): + for specific_asset_id in aas.asset_information.specific_asset_id: + if specific_asset_id.name == asset_link.name and specific_asset_id.value == asset_link.value: + matching_aas_ids.append(aas.id) + + paginated_ids, end_index = self._get_slice(request, iter( + matching_aas_ids)) + return response_t(matching_aas_ids, cursor=end_index) + + def get_all_asset_links_by_id(self, request: Request, + url_args: Dict, + response_t: Type[APIResponse], + **_kwargs) -> Response: + """ + Returns a list of specific asset identifiers based on an Asset Administration Shell ID to edit discoverable content. + The global asset ID is returned as specific asset ID with "name" equal to "globalAssetId" (see Constraint AASd-116). + """ + aas_identifier = url_args.get("aas_id") + try: + aas = self._get_obj_ts(aas_identifier, + model.AssetAdministrationShell) + except NotFound: + raise NotFound( + f"Asset Administration Shell with ID '{aas_identifier}' not found.") + + specific_asset_ids = list(aas.asset_information.specific_asset_id) + + if aas.asset_information.global_asset_id: + specific_asset_ids.append(model.SpecificAssetId( + name="globalAssetId", + value=aas.asset_information.global_asset_id + )) + + return response_t(specific_asset_ids) + + def post_all_asset_links_by_id(self, request: Request, + url_args: Dict, + response_t: Type[APIResponse], + **_kwargs) -> Response: + """ + Creates specific asset identifiers linked to an Asset Administration Shell to edit discoverable content. + """ + aas_identifier = url_args.get("aas_id") + + # Try to retrieve the Asset Administration Shell by its identifier + try: + aas = self._get_obj_ts(aas_identifier, + model.AssetAdministrationShell) + except NotFound: + raise NotFound( + f"Asset Administration Shell with ID '{aas_identifier}' not found.") + + # Decode the request body to retrieve specific asset identifiers + specific_asset_ids = HTTPApiDecoder.request_body_list( + request, model.SpecificAssetId, False) + + # Check for conflicts with existing specific asset identifiers + existing_ids = {id.value for id in + aas.asset_information.specific_asset_id} + for specific_asset_id in specific_asset_ids: + if specific_asset_id.value in existing_ids: + raise Conflict( + f"Specific asset identifier with value '{specific_asset_id.value}' already exists.") + else: + aas.asset_information.specific_asset_id.add(specific_asset_id) + + return response_t(specific_asset_ids) + + def delete_all_asset_links_by_id(self, request: Request, + url_args: Dict, + response_t: Type[APIResponse], + **_kwargs) -> Response: + """ + Deletes all specific asset identifiers linked to an Asset Administration Shell to edit discoverable content. + """ + aas_identifier = url_args.get("aas_id") + + # Try to retrieve the Asset Administration Shell by its identifier + try: + aas = self._get_obj_ts(aas_identifier, + model.AssetAdministrationShell) + except NotFound: + raise NotFound( + f"Asset Administration Shell with ID '{aas_identifier}' not found.") + + # Clear all specific asset identifiers from the AAS + aas.asset_information.specific_asset_id.clear() + + # Return 204 No Content response + return Response(status=204) + + +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8084, ResolverAPI(create_full_example()), + use_debugger=True, use_reloader=True) diff --git a/sdk/basyx/aas/adapter/xml/xml_deserialization.py b/sdk/basyx/aas/adapter/xml/xml_deserialization.py index ab78d3c2e..591d82107 100644 --- a/sdk/basyx/aas/adapter/xml/xml_deserialization.py +++ b/sdk/basyx/aas/adapter/xml/xml_deserialization.py @@ -1181,7 +1181,58 @@ def construct_data_specification_iec61360(cls, element: etree._Element, ds_iec.level_types.add(IEC61360_LEVEL_TYPES_INVERSE[tag]) cls._amend_abstract_attributes(ds_iec, element) return ds_iec + @classmethod + def construct_asset_administration_shell_descriptor(cls, element: etree._Element, object_class=model.AssetAdministrationShellDescriptor, + **_kwargs: Any) -> model.AssetAdministrationShellDescriptor: + id_value = _child_text_mandatory(element, NS_AAS + "id") + id_short = _child_text_mandatory(element, NS_AAS + "idShort") + endpoints_elem = element.find(NS_AAS + "endpoints") + endpoints: List[str] = [] + if endpoints_elem is not None: + endpoints = [child.text.strip() for child in endpoints_elem.findall(NS_AAS + "endpoint") if child.text] + + asset_kind = _child_text_mandatory(element, NS_AAS + "assetKind") + + specific_asset_ids_elem = element.find(NS_AAS + "specificAssetIds") + specific_asset_ids: List[Dict[str, Any]] = [] + if specific_asset_ids_elem is not None: + for sid_elem in specific_asset_ids_elem.findall(NS_AAS + "specificAssetId"): + name = sid_elem.findtext(NS_AAS + "name") + value = sid_elem.findtext(NS_AAS + "value") + if name is not None and value is not None: + specific_asset_ids.append({"name": name.strip(), "value": value.strip()}) + + descriptor = object_class( + id=id_value, + id_short=id_short, + endpoints=endpoints, + asset_kind=asset_kind, + specific_asset_ids=specific_asset_ids + ) + + cls._amend_abstract_attributes(descriptor, element) + return descriptor + @classmethod + def construct_submodel_descriptor(cls, element: etree._Element, object_class=model.SubmodelDescriptor, + **_kwargs: Any) -> model.SubmodelDescriptor: + submodel_id = _child_text_mandatory(element, NS_AAS + "id") + id_short = _child_text_mandatory(element, NS_AAS + "idShort") + + endpoints_elem = element.find(NS_AAS + "endpoints") + endpoints: List[str] = [] + if endpoints_elem is not None: + endpoints = [child.text.strip() for child in endpoints_elem.findall(NS_AAS + "endpoint") if child.text] + + # Hier können weitere optionale Felder verarbeitet werden, z.B. semanticId, etc. + + submodel_descriptor = object_class( + id=submodel_id, + id_short=id_short, + endpoints=endpoints + ) + cls._amend_abstract_attributes(submodel_descriptor, element) + return submodel_descriptor class StrictAASFromXmlDecoder(AASFromXmlDecoder): """ @@ -1307,6 +1358,9 @@ class XMLConstructables(enum.Enum): EMBEDDED_DATA_SPECIFICATION = enum.auto() DATA_SPECIFICATION_CONTENT = enum.auto() DATA_SPECIFICATION_IEC61360 = enum.auto() + ASSET_ADMINISTRATION_SHELL_DESCRIPTOR = enum.auto() + SUBMODEL_DESCRIPTOR = enum.auto() + ASSET_LINK = enum.auto() def read_aas_xml_element(file: PathOrIO, construct: XMLConstructables, failsafe: bool = True, stripped: bool = False, @@ -1414,6 +1468,10 @@ def read_aas_xml_element(file: PathOrIO, construct: XMLConstructables, failsafe: # type aliases elif construct == XMLConstructables.VALUE_LIST: constructor = decoder_.construct_value_list + elif construct == XMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR: + constructor = decoder_.construct_asset_administration_shell_descriptor + elif construct == XMLConstructables.SUBMODEL_DESCRIPTOR: + constructor = decoder_.construct_submodel_descriptor else: raise ValueError(f"{construct.name} cannot be constructed!") diff --git a/sdk/basyx/aas/model/__init__.py b/sdk/basyx/aas/model/__init__.py index e541968b5..1dcc29966 100644 --- a/sdk/basyx/aas/model/__init__.py +++ b/sdk/basyx/aas/model/__init__.py @@ -7,14 +7,16 @@ from basyx.aas.model import AssetAdministrationShell, Submodel, Property """ - +from __future__ import absolute_import from .aas import * from .base import * from .submodel import * from .provider import * from .concept import ConceptDescription from . import datatypes - +from .aas_descriptor import AssetAdministrationShellDescriptor +from .descriptor import * +from .submodel_descriptor import SubmodelDescriptor # A mapping of BaSyx Python SDK implementation classes to the corresponding `KeyTypes` enum members for all classes # that are covered by this enum. KEY_TYPES_CLASSES: Dict[Type[Referable], KeyTypes] = { diff --git a/sdk/basyx/aas/model/aas.py b/sdk/basyx/aas/model/aas.py index 684a1ff06..dd3a12c23 100644 --- a/sdk/basyx/aas/model/aas.py +++ b/sdk/basyx/aas/model/aas.py @@ -172,3 +172,14 @@ def __init__(self, self.submodel: Set[base.ModelReference[Submodel]] = set() if submodel is None else submodel self.embedded_data_specifications: List[base.EmbeddedDataSpecification] = list(embedded_data_specifications) self.extension = base.NamespaceSet(self, [("name", True)], extension) + +class AssetLink: + + def __init__(self, name: base.LabelType, value: base.Identifier): + if not name: + raise ValueError("AssetLink 'name' must be a non-empty string.") + if not value: + raise ValueError("AssetLink 'value' must be a non-empty string.") + self.name = name + self.value = value + \ No newline at end of file diff --git a/sdk/basyx/aas/model/aas_descriptor.py b/sdk/basyx/aas/model/aas_descriptor.py new file mode 100644 index 000000000..a4051abaf --- /dev/null +++ b/sdk/basyx/aas/model/aas_descriptor.py @@ -0,0 +1,67 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime + +from typing import List, Dict, Optional,Iterable, Set + +from . import base, _string_constraints, aas +from . import descriptor +from .submodel_descriptor import SubmodelDescriptor +import re + +class AssetAdministrationShellDescriptor(descriptor.Descriptor): + + def __init__(self, + id_: base.Identifier, + administration: Optional[base.AdministrativeInformation] = None, + asset_kind: Optional[base.AssetKind] = None, + asset_type: Optional[base.Identifier] = None, + endpoints: Optional[List[descriptor.Endpoint]] = None, + global_asset_id: Optional[base.Identifier] = None, + id_short: Optional[base.NameType] = None, + specific_asset_id: Iterable[base.SpecificAssetId] = (), + submodel_descriptors: Optional[List[SubmodelDescriptor]] = None, + description: Optional[base.MultiLanguageTextType] = None, + display_name: Optional[base.MultiLanguageNameType] = None, + extension: Iterable[base.Extension] = ()): + """AssetAdministrationShellDescriptor - + + Nur das 'id'-Feld (id_) ist zwingend erforderlich. Alle anderen Felder erhalten Defaultwerte. + """ + super().__init__() + self.administration: Optional[base.AdministrativeInformation] = administration + self.asset_kind: Optional[base.AssetKind] = asset_kind + self.asset_type: Optional[base.Identifier] = asset_type + self.endpoints: Optional[List[descriptor.Endpoint]] = endpoints if endpoints is not None else [] # leere Liste, falls nicht gesetzt + self.global_asset_id: Optional[base.Identifier] = global_asset_id + self.id_short: Optional[base.NameType] = id_short + self.id: base.Identifier = id_ + self._specific_asset_id: base.ConstrainedList[base.SpecificAssetId] = base.ConstrainedList( + specific_asset_id, + item_set_hook=self._check_constraint_set_spec_asset_id, + item_del_hook=self._check_constraint_del_spec_asset_id + ) + self.submodel_descriptors = submodel_descriptors if submodel_descriptors is not None else [] + self.description: Optional[base.MultiLanguageTextType] = description + self.display_name: Optional[base.MultiLanguageNameType] = display_name + self.extension = base.NamespaceSet(self, [("name", True)], extension) + + @property + def specific_asset_id(self) -> base.ConstrainedList[base.SpecificAssetId]: + return self._specific_asset_id + + @specific_asset_id.setter + def specific_asset_id(self, specific_asset_id: Iterable[base.SpecificAssetId]) -> None: + # constraints are checked via _check_constraint_set_spec_asset_id() in this case + self._specific_asset_id[:] = specific_asset_id + + def _check_constraint_set_spec_asset_id(self, items_to_replace: List[base.SpecificAssetId], + new_items: List[base.SpecificAssetId], + old_list: List[base.SpecificAssetId]) -> None: + self._validate_aasd_131(self.global_asset_id, + len(old_list) - len(items_to_replace) + len(new_items) > 0) + + def _check_constraint_del_spec_asset_id(self, _item_to_del: base.SpecificAssetId, + old_list: List[base.SpecificAssetId]) -> None: + self._validate_aasd_131(self.global_asset_id, len(old_list) > 1) diff --git a/sdk/basyx/aas/model/descriptor.py b/sdk/basyx/aas/model/descriptor.py new file mode 100644 index 000000000..d9071fc84 --- /dev/null +++ b/sdk/basyx/aas/model/descriptor.py @@ -0,0 +1,133 @@ +# coding: utf-8 + +from __future__ import absolute_import + +import abc +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict, Optional,Iterable, Set + +from enum import Enum +import re + +from . import base, _string_constraints + +class Descriptor(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __init__(self, description: Optional[base.MultiLanguageTextType] = None, display_name: Optional[base.MultiLanguageNameType] = None, extension: Iterable[base.Extension] = ()): + + super().__init__() + self.namespace_element_sets: List[NamespaceSet] = [] + self.description: Optional[base.MultiLanguageTextType] = description + self.display_name: Optional[base.MultiLanguageNameType] = display_name + self.extension = base.NamespaceSet(self, [("name", True)], extension) + + @property + def description(self) -> Optional[base.MultiLanguageTextType]: + return self._description + + @description.setter + def description(self, value: Optional[base.MultiLanguageTextType]): + self._description = value + + @property + def display_name(self) -> Optional[base.MultiLanguageNameType]: + return self._display_name + + @display_name.setter + def display_name(self, value: Optional[base.MultiLanguageNameType]): + self._display_name = value + + def commit(self): + pass + def update(self): + pass + def update_from(self, other: "Referable", update_source: bool = False): + pass + +class SecurityTypeEnum(Enum): + NONE = "NONE" + RFC_TLSA = "RFC_TLSA" + W3C_DID = "W3C_DID" + +class SecurityAttributeObject: + def __init__(self, type_: SecurityTypeEnum, key: str, value: str): + + if not isinstance(type_, SecurityTypeEnum): + raise ValueError(f"Invalid security type: {type_}. Must be one of {list(SecurityTypeEnum)}") + if not key or not isinstance(key, str): + raise ValueError("Key must be a non-empty string.") + if not value or not isinstance(value, str): + raise ValueError("Value must be a non-empty string.") + self.type = type_ + self.key = key + self.value = value + + +class ProtocolInformation: + + def __init__( + self, + href: str, + endpoint_protocol: Optional[str] = None, + endpoint_protocol_version: Optional[List[str]] = None, + subprotocol: Optional[str] = None, + subprotocol_body: Optional[str] = None, + subprotocol_body_encoding: Optional[str] = None, + security_attributes: Optional[List[SecurityAttributeObject]] = None + ): + if not href or not isinstance(href, str): + raise ValueError("href must be a non-empty string representing a valid URL.") + + self.href = href + self.endpoint_protocol = endpoint_protocol + self.endpoint_protocol_version = endpoint_protocol_version or [] + self.subprotocol = subprotocol + self.subprotocol_body = subprotocol_body + self.subprotocol_body_encoding = subprotocol_body_encoding + self.security_attributes = security_attributes or [] +class Endpoint: + + INTERFACE_SHORTNAMES = { + "AAS", "SUBMODEL", "SERIALIZE", "AASX-FILE", "AAS-REGISTRY", + "SUBMODEL-REGISTRY", "AAS-REPOSITORY", "SUBMODEL-REPOSITORY", + "CD-REPOSITORY", "AAS-DISCOVERY" + } + VERSION_PATTERN = re.compile(r"^\d+(\.\d+)*$") + + def __init__(self, interface: base.NameType, protocol_information: ProtocolInformation): # noqa: E501 + + self.interface = interface + self.protocol_information = protocol_information + + @property + def interface(self) -> str: + return self._interface + + @interface.setter + def interface(self, interface: base.NameType): + if interface is None: + raise ValueError("Invalid value for `interface`, must not be `None`") + if not self.is_valid_interface(interface): + raise ValueError(f"Invalid interface format: {interface}. Expected format: '-', ") + + self._interface = interface + + @classmethod + def is_valid_interface(cls, interface: base.NameType) -> bool: + parts = interface.split("-", 1) + if len(parts) != 2: + return False + short_name, version = parts + return short_name in cls.INTERFACE_SHORTNAMES and cls.VERSION_PATTERN.match(version) + + @property + def protocol_information(self) -> ProtocolInformation: + return self._protocol_information + + @protocol_information.setter + def protocol_information(self, protocol_information: ProtocolInformation): + if protocol_information is None: + raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 + + self._protocol_information = protocol_information \ No newline at end of file diff --git a/sdk/basyx/aas/model/submodel_descriptor.py b/sdk/basyx/aas/model/submodel_descriptor.py new file mode 100644 index 000000000..62081d6a6 --- /dev/null +++ b/sdk/basyx/aas/model/submodel_descriptor.py @@ -0,0 +1,29 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime + +from typing import List, Dict, Optional,Iterable, Set + +from .base import AdministrativeInformation +from . import descriptor +from . import base +from .base import Reference +import re + + +class SubmodelDescriptor(descriptor.Descriptor): + + def __init__(self, id_: base.Identifier, endpoints: List[descriptor.Endpoint], + administration: Optional[base.AdministrativeInformation] = None, + id_short: Optional[base.NameType]=None, semantic_id: Optional[base.Reference]=None, + supplemental_semantic_id: Iterable[base.Reference] = ()): + + super().__init__() + self.id: base.Identifier = id_ + self.endpoints: List[descriptor.Endpoint] = endpoints + self.administration: Optional[base.AdministrativeInformation] = administration + self.id_short: Optional[base.NameType] = id_short + self.semantic_id: Optional[base.Reference] = semantic_id + self.supplemental_semantic_id: base.ConstrainedList[base.Reference] = \ + base.ConstrainedList(supplemental_semantic_id) diff --git a/test.py b/test.py new file mode 100644 index 000000000..02fa48042 --- /dev/null +++ b/test.py @@ -0,0 +1,67 @@ +""" +from basyx.aas.adapter.resolver import * +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8084, ResolverAPI(create_full_example()), + use_debugger=True, use_reloader=True) + +from basyx.aas.adapter.registry import * +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8083, RegistryAPI(create_full_example()), + use_debugger=True, use_reloader=True) + + +from basyx.aas.adapter.http import * +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8080, WSGIApp(create_full_example(), aasx.DictSupplementaryFileContainer()), + use_debugger=True, use_reloader=True) +""" + +import multiprocessing +from werkzeug.serving import run_simple +from basyx.aas.examples.data.example_aas import create_full_example +from basyx.aas.adapter.resolver import ResolverAPI +from basyx.aas.adapter.registry import RegistryAPI +from basyx.aas.adapter.http import * +import basyx.aas.adapter.aasx as aasx + +def run_resolver_api(): + run_simple("localhost", 8084, ResolverAPI(create_full_example()), + use_debugger=True, + use_reloader=False) + +def run_server_api(): + run_simple("localhost", 8080, WSGIApp(create_full_example(), aasx.DictSupplementaryFileContainer()), + use_debugger=True, + use_reloader=False) + +def run_registry_api(): + run_simple("localhost", 8083, RegistryAPI(create_full_example()), + use_debugger=True, + use_reloader=False) + +if __name__ == "__main__": + resolver_process = multiprocessing.Process(target=run_resolver_api) + registry_process = multiprocessing.Process(target=run_registry_api) + server_process = multiprocessing.Process(target=run_server_api) + + # Starten der Prozesse + resolver_process.start() + registry_process.start() + server_process.start() + + # Warten auf Prozesse + resolver_process.join() + registry_process.join() + server_process.join() + + + From 7204ae281b1e6f02f21385e7c2e7cb69b99cc015 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Mon, 31 Mar 2025 12:31:17 +0200 Subject: [PATCH 02/52] Remove test.py from repository and add it to .gitignore --- Discovery Server/app/main.py | 46 ------------------------- Registry Server/app/main.py | 46 ------------------------- test.py | 67 ------------------------------------ 3 files changed, 159 deletions(-) delete mode 100644 Discovery Server/app/main.py delete mode 100644 Registry Server/app/main.py delete mode 100644 test.py diff --git a/Discovery Server/app/main.py b/Discovery Server/app/main.py deleted file mode 100644 index f3eed03ec..000000000 --- a/Discovery Server/app/main.py +++ /dev/null @@ -1,46 +0,0 @@ -import os -import pathlib -import sys - -from basyx.aas import model, adapter -from basyx.aas.adapter import aasx - -from basyx.aas.backend.local_file import LocalFileObjectStore -from basyx.aas.adapter.registry import RegistryAPI - -storage_path = os.getenv("STORAGE_PATH", "/storage") -storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") -base_path = os.getenv("API_BASE_PATH") - -wsgi_optparams = {} - -if base_path is not None: - wsgi_optparams["base_path"] = base_path - -if storage_type == "LOCAL_FILE_BACKEND": - application = RegistryAPI(LocalFileObjectStore(storage_path), aasx.DictSupplementaryFileContainer(), **wsgi_optparams) - -elif storage_type in "LOCAL_FILE_READ_ONLY": - object_store: model.DictObjectStore = model.DictObjectStore() - file_store: aasx.DictSupplementaryFileContainer = aasx.DictSupplementaryFileContainer() - - for file in pathlib.Path(storage_path).iterdir(): - if not file.is_file(): - continue - print(f"Loading {file}") - - if file.suffix.lower() == ".json": - with open(file) as f: - adapter.json.read_aas_json_file_into(object_store, f) - elif file.suffix.lower() == ".xml": - with open(file) as f: - adapter.xml.read_aas_xml_file_into(object_store, file) - elif file.suffix.lower() == ".aasx": - with aasx.AASXReader(file) as reader: - reader.read_into(object_store=object_store, file_store=file_store) - - application = RegistryAPI(object_store, file_store, **wsgi_optparams) - -else: - print(f"STORAGE_TYPE must be either LOCAL_FILE or LOCAL_FILE_READ_ONLY! Current value: {storage_type}", - file=sys.stderr) diff --git a/Registry Server/app/main.py b/Registry Server/app/main.py deleted file mode 100644 index f3eed03ec..000000000 --- a/Registry Server/app/main.py +++ /dev/null @@ -1,46 +0,0 @@ -import os -import pathlib -import sys - -from basyx.aas import model, adapter -from basyx.aas.adapter import aasx - -from basyx.aas.backend.local_file import LocalFileObjectStore -from basyx.aas.adapter.registry import RegistryAPI - -storage_path = os.getenv("STORAGE_PATH", "/storage") -storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") -base_path = os.getenv("API_BASE_PATH") - -wsgi_optparams = {} - -if base_path is not None: - wsgi_optparams["base_path"] = base_path - -if storage_type == "LOCAL_FILE_BACKEND": - application = RegistryAPI(LocalFileObjectStore(storage_path), aasx.DictSupplementaryFileContainer(), **wsgi_optparams) - -elif storage_type in "LOCAL_FILE_READ_ONLY": - object_store: model.DictObjectStore = model.DictObjectStore() - file_store: aasx.DictSupplementaryFileContainer = aasx.DictSupplementaryFileContainer() - - for file in pathlib.Path(storage_path).iterdir(): - if not file.is_file(): - continue - print(f"Loading {file}") - - if file.suffix.lower() == ".json": - with open(file) as f: - adapter.json.read_aas_json_file_into(object_store, f) - elif file.suffix.lower() == ".xml": - with open(file) as f: - adapter.xml.read_aas_xml_file_into(object_store, file) - elif file.suffix.lower() == ".aasx": - with aasx.AASXReader(file) as reader: - reader.read_into(object_store=object_store, file_store=file_store) - - application = RegistryAPI(object_store, file_store, **wsgi_optparams) - -else: - print(f"STORAGE_TYPE must be either LOCAL_FILE or LOCAL_FILE_READ_ONLY! Current value: {storage_type}", - file=sys.stderr) diff --git a/test.py b/test.py deleted file mode 100644 index 02fa48042..000000000 --- a/test.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -from basyx.aas.adapter.resolver import * -if __name__ == "__main__": - from werkzeug.serving import run_simple - from basyx.aas.examples.data.example_aas import create_full_example - - run_simple("localhost", 8084, ResolverAPI(create_full_example()), - use_debugger=True, use_reloader=True) - -from basyx.aas.adapter.registry import * -if __name__ == "__main__": - from werkzeug.serving import run_simple - from basyx.aas.examples.data.example_aas import create_full_example - - run_simple("localhost", 8083, RegistryAPI(create_full_example()), - use_debugger=True, use_reloader=True) - - -from basyx.aas.adapter.http import * -if __name__ == "__main__": - from werkzeug.serving import run_simple - from basyx.aas.examples.data.example_aas import create_full_example - - run_simple("localhost", 8080, WSGIApp(create_full_example(), aasx.DictSupplementaryFileContainer()), - use_debugger=True, use_reloader=True) -""" - -import multiprocessing -from werkzeug.serving import run_simple -from basyx.aas.examples.data.example_aas import create_full_example -from basyx.aas.adapter.resolver import ResolverAPI -from basyx.aas.adapter.registry import RegistryAPI -from basyx.aas.adapter.http import * -import basyx.aas.adapter.aasx as aasx - -def run_resolver_api(): - run_simple("localhost", 8084, ResolverAPI(create_full_example()), - use_debugger=True, - use_reloader=False) - -def run_server_api(): - run_simple("localhost", 8080, WSGIApp(create_full_example(), aasx.DictSupplementaryFileContainer()), - use_debugger=True, - use_reloader=False) - -def run_registry_api(): - run_simple("localhost", 8083, RegistryAPI(create_full_example()), - use_debugger=True, - use_reloader=False) - -if __name__ == "__main__": - resolver_process = multiprocessing.Process(target=run_resolver_api) - registry_process = multiprocessing.Process(target=run_registry_api) - server_process = multiprocessing.Process(target=run_server_api) - - # Starten der Prozesse - resolver_process.start() - registry_process.start() - server_process.start() - - # Warten auf Prozesse - resolver_process.join() - registry_process.join() - server_process.join() - - - From 25cf2821ab6b6497ed19a7e5b0d83bec78fe5963 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 1 Apr 2025 17:26:35 +0200 Subject: [PATCH 03/52] correct discovery server implementation --- .gitignore | 3 + Discovery Server/Dockerfile | 2 +- Registry Server/Dockerfile | 2 +- sdk/basyx/aas/adapter/resolver.py | 119 ++++++++++++------------------ 4 files changed, 54 insertions(+), 72 deletions(-) diff --git a/.gitignore b/.gitignore index dc7eddbb6..289d593f5 100644 --- a/.gitignore +++ b/.gitignore @@ -31,3 +31,6 @@ sdk/basyx/version.py # ignore the content of the server storage server/storage/ + +# local testing file, do not commit +test.py diff --git a/Discovery Server/Dockerfile b/Discovery Server/Dockerfile index 6dc3c4cac..3d52a15ab 100644 --- a/Discovery Server/Dockerfile +++ b/Discovery Server/Dockerfile @@ -39,7 +39,7 @@ RUN chmod +x /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] -COPY ./app /app +COPY ../server/app /app WORKDIR /app CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Registry Server/Dockerfile b/Registry Server/Dockerfile index 6dc3c4cac..3d52a15ab 100644 --- a/Registry Server/Dockerfile +++ b/Registry Server/Dockerfile @@ -39,7 +39,7 @@ RUN chmod +x /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] -COPY ./app /app +COPY ../server/app /app WORKDIR /app CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/sdk/basyx/aas/adapter/resolver.py b/sdk/basyx/aas/adapter/resolver.py index 6e3261c74..4d2231a00 100644 --- a/sdk/basyx/aas/adapter/resolver.py +++ b/sdk/basyx/aas/adapter/resolver.py @@ -36,7 +36,7 @@ from .http import get_response_type, http_exception_to_response, is_stripped_request -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple +from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple, Set T = TypeVar("T") @@ -44,12 +44,14 @@ BASE64URL_ENCODING = "utf-8" # Klasse, die das externe Mapping verwaltet - +from basyx.aas import model class ResolverAPI: def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/api/v3.0"): self.object_store: model.AbstractObjectStore = object_store + self.aas_to_assets: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} + self.asset_to_aas: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} self.url_map = werkzeug.routing.Map([ Submount(base_path, [ Rule("/lookup/shellsByAssetLink", methods=["POST"], @@ -129,7 +131,6 @@ def handle_request(self, request: Request): endpoint, values = map_adapter.match() return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) - # any raised error that leaves this function will cause a 500 internal server error # so catch raised http exceptions and return them except werkzeug.exceptions.HTTPException as e: @@ -142,25 +143,14 @@ def search_all_aas_ids_by_asset_link(self, request: Request, url_args: Dict, res Returns a list of Asset Administration Shell IDs linked to specific asset identifiers or the global asset ID """ asset_links = HTTPApiDecoder.request_body_list(request, model.AssetLink, - True) - - matching_aas_ids = [] + False) + matching_aas_ids = set() for asset_link in asset_links: - if asset_link.name == "globalAssetId": - for aas in self._get_all_obj_of_type( - model.AssetAdministrationShell): - if aas.asset_information.global_asset_id == asset_link.value: - matching_aas_ids.append(aas.id_) - else: - for aas in self._get_all_obj_of_type( - model.AssetAdministrationShell): - for specific_asset_id in aas.asset_information.specific_asset_id: - if specific_asset_id.name == asset_link.name and specific_asset_id.value == asset_link.value: - matching_aas_ids.append(aas.id) - - paginated_ids, end_index = self._get_slice(request, iter( - matching_aas_ids)) - return response_t(matching_aas_ids, cursor=end_index) + for asset_id, aas_ids in self.asset_to_aas.items(): + if asset_link.name==asset_id.name and asset_link.value==asset_id.value: + matching_aas_ids=aas_ids + matching_aas_ids = list(matching_aas_ids) + return response_t(matching_aas_ids) def get_all_asset_links_by_id(self, request: Request, url_args: Dict, @@ -171,22 +161,12 @@ def get_all_asset_links_by_id(self, request: Request, The global asset ID is returned as specific asset ID with "name" equal to "globalAssetId" (see Constraint AASd-116). """ aas_identifier = url_args.get("aas_id") - try: - aas = self._get_obj_ts(aas_identifier, - model.AssetAdministrationShell) - except NotFound: - raise NotFound( - f"Asset Administration Shell with ID '{aas_identifier}' not found.") - - specific_asset_ids = list(aas.asset_information.specific_asset_id) - - if aas.asset_information.global_asset_id: - specific_asset_ids.append(model.SpecificAssetId( - name="globalAssetId", - value=aas.asset_information.global_asset_id - )) - - return response_t(specific_asset_ids) + matching_asset_ids = set() + for ass_id, asset_ids in self.aas_to_assets.items(): + if ass_id==aas_identifier: + matching_asset_ids=asset_ids + matching_asset_ids = list(matching_asset_ids) + return response_t(matching_asset_ids) def post_all_asset_links_by_id(self, request: Request, url_args: Dict, @@ -196,30 +176,29 @@ def post_all_asset_links_by_id(self, request: Request, Creates specific asset identifiers linked to an Asset Administration Shell to edit discoverable content. """ aas_identifier = url_args.get("aas_id") - - # Try to retrieve the Asset Administration Shell by its identifier - try: - aas = self._get_obj_ts(aas_identifier, - model.AssetAdministrationShell) - except NotFound: - raise NotFound( - f"Asset Administration Shell with ID '{aas_identifier}' not found.") - # Decode the request body to retrieve specific asset identifiers specific_asset_ids = HTTPApiDecoder.request_body_list( - request, model.SpecificAssetId, False) + request, model.SpecificAssetId, False) + + # Ensure the aas_identifier exists in the dictionary + if aas_identifier not in self.aas_to_assets: + self.aas_to_assets[aas_identifier] = set() - # Check for conflicts with existing specific asset identifiers - existing_ids = {id.value for id in - aas.asset_information.specific_asset_id} + # Add specific asset IDs to the aas_to_assets dictionary + asset_ids = self.aas_to_assets[aas_identifier] for specific_asset_id in specific_asset_ids: - if specific_asset_id.value in existing_ids: - raise Conflict( - f"Specific asset identifier with value '{specific_asset_id.value}' already exists.") - else: - aas.asset_information.specific_asset_id.add(specific_asset_id) + asset_ids.add(specific_asset_id) + + # Update asset_to_aas dictionary + for specific_asset_id in specific_asset_ids: + if specific_asset_id not in self.asset_to_aas: + self.asset_to_aas[specific_asset_id] = set() + self.asset_to_aas[specific_asset_id].add(aas_identifier) + + # Convert sets to lists for JSON serialization + serializable_aas_to_assets = {key: list(value) for key, value in self.aas_to_assets.items()} - return response_t(specific_asset_ids) + return response_t(serializable_aas_to_assets) def delete_all_asset_links_by_id(self, request: Request, url_args: Dict, @@ -229,20 +208,20 @@ def delete_all_asset_links_by_id(self, request: Request, Deletes all specific asset identifiers linked to an Asset Administration Shell to edit discoverable content. """ aas_identifier = url_args.get("aas_id") - - # Try to retrieve the Asset Administration Shell by its identifier - try: - aas = self._get_obj_ts(aas_identifier, - model.AssetAdministrationShell) - except NotFound: - raise NotFound( - f"Asset Administration Shell with ID '{aas_identifier}' not found.") - - # Clear all specific asset identifiers from the AAS - aas.asset_information.specific_asset_id.clear() - - # Return 204 No Content response - return Response(status=204) + # Ensure the aas_identifier exists in the dictionary + if aas_identifier in self.aas_to_assets: + # Remove the links from aas_to_asset dictionary + del self.aas_to_assets[aas_identifier] + + # Remove the aas_identifier from asset_to_aas dictionary + for asset_id, aas_ids in list(self.asset_to_aas.items()): + if aas_identifier in aas_ids: + aas_ids.discard(aas_identifier) + # Clean up empty sets + if not aas_ids: + del self.asset_to_aas[asset_id] + + return response_t() if __name__ == "__main__": From b68efaa08bed1473f8728cfc1cc0b30b148da902 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Wed, 2 Apr 2025 11:40:36 +0200 Subject: [PATCH 04/52] remove unused code --- sdk/basyx/aas/adapter/resolver.py | 52 +++---------------------------- 1 file changed, 4 insertions(+), 48 deletions(-) diff --git a/sdk/basyx/aas/adapter/resolver.py b/sdk/basyx/aas/adapter/resolver.py index 4d2231a00..c720d29cc 100644 --- a/sdk/basyx/aas/adapter/resolver.py +++ b/sdk/basyx/aas/adapter/resolver.py @@ -8,33 +8,20 @@ This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". """ -import abc import base64 -import binascii import datetime -import enum import io import json import itertools -from lxml import etree import werkzeug.exceptions import werkzeug.routing -import werkzeug.urls -import werkzeug.utils from werkzeug.exceptions import BadRequest, Conflict, NotFound, UnprocessableEntity from werkzeug.routing import MapAdapter, Rule, Submount from werkzeug.wrappers import Request, Response -from werkzeug.datastructures import FileStorage from basyx.aas import model -from ._generic import XML_NS_MAP -from .xml import XMLConstructables, read_aas_xml_element, xml_serialization, object_to_xml_element -from .json import AASToJsonEncoder, StrictAASFromJsonDecoder, StrictStrippedAASFromJsonDecoder -from . import aasx -from .http import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder - -from .http import get_response_type, http_exception_to_response, is_stripped_request +from .http import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple, Set @@ -73,22 +60,6 @@ def __call__(self, environ, start_response) -> Iterable[bytes]: response: Response = self.handle_request(Request(environ)) return response(environ, start_response) - def _get_obj_ts(self, identifier: model.Identifier, type_: Type[ - model.provider._IT]) -> model.provider._IT: - identifiable = self.object_store.get(identifier) - if not isinstance(identifiable, type_): - raise NotFound( - f"No {type_.__name__} with {identifier} found!") - identifiable.update() - return identifiable - - def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> \ - Iterator[model.provider._IT]: - for obj in self.object_store: - if isinstance(obj, type_): - obj.update() - yield obj - @classmethod def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: limit_str = request.args.get('limit', default="10") @@ -104,22 +75,6 @@ def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T paginated_slice = itertools.islice(iterator, start_index, end_index) return paginated_slice, end_index - def _get_assets(self, request: Request) -> Tuple[ - Iterator[model.SpecificAssetId], int]: - specific_asset_ids: Iterator[ - model.SpecificAssetId] = self._get_all_obj_of_type( - model.SpecificAssetId) - - asset_name = request.args.get("name") - if asset_name is not None: - specific_asset_ids = filter( - lambda asset: asset.name == asset_name, - specific_asset_ids) - - paginated_assets, end_index = self._get_slice(request, - specific_asset_ids) - return paginated_assets, end_index - def handle_request(self, request: Request): map_adapter: MapAdapter = self.url_map.bind_to_environ( request.environ) @@ -150,7 +105,9 @@ def search_all_aas_ids_by_asset_link(self, request: Request, url_args: Dict, res if asset_link.name==asset_id.name and asset_link.value==asset_id.value: matching_aas_ids=aas_ids matching_aas_ids = list(matching_aas_ids) - return response_t(matching_aas_ids) + paginated_slice, cursor= self._get_slice(request, matching_aas_ids) + + return response_t(list(paginated_slice), cursor=cursor) def get_all_asset_links_by_id(self, request: Request, url_args: Dict, @@ -223,7 +180,6 @@ def delete_all_asset_links_by_id(self, request: Request, return response_t() - if __name__ == "__main__": from werkzeug.serving import run_simple from basyx.aas.examples.data.example_aas import create_full_example From b590e1ce613dbd7036aa1062b6bc2843b4a8c436 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Mon, 14 Apr 2025 11:13:31 +0200 Subject: [PATCH 05/52] add in-memory storage and adapt README --- Discovery Server/Dockerfile | 2 +- Discovery Server/README.md | 130 +++++++---------- Discovery Server/app/main.py | 17 +++ Discovery Server/compose.yml | 2 - sdk/basyx/aas/adapter/discovery.py | 224 +++++++++++++++++++++++++++++ sdk/basyx/aas/adapter/resolver.py | 188 ------------------------ 6 files changed, 290 insertions(+), 273 deletions(-) create mode 100644 Discovery Server/app/main.py create mode 100644 sdk/basyx/aas/adapter/discovery.py delete mode 100644 sdk/basyx/aas/adapter/resolver.py diff --git a/Discovery Server/Dockerfile b/Discovery Server/Dockerfile index 3d52a15ab..6dc3c4cac 100644 --- a/Discovery Server/Dockerfile +++ b/Discovery Server/Dockerfile @@ -39,7 +39,7 @@ RUN chmod +x /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] -COPY ../server/app /app +COPY ./app /app WORKDIR /app CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Discovery Server/README.md b/Discovery Server/README.md index 339226c53..fe66542ce 100644 --- a/Discovery Server/README.md +++ b/Discovery Server/README.md @@ -1,85 +1,51 @@ -# Eclipse BaSyx Python SDK - HTTP Server - -This package contains a Dockerfile to spin up an exemplary HTTP/REST server following the [Specification of the AAS Part 2 API][6] with ease. -The server currently implements the following interfaces: - -- [Asset Administration Shell Repository Service][4] -- [Submodel Repository Service][5] - -It uses the [HTTP API][1] and the [AASX][7], [JSON][8], and [XML][9] Adapters of the [BaSyx Python SDK][3], to serve regarding files from a given directory. -The files are only read, changes won't persist. - -Alternatively, the container can also be told to use the [Local-File Backend][2] instead, which stores AAS and Submodels as individual JSON files and allows for persistent changes (except supplementary files, i.e. files referenced by `File` submodel elements). -See [below](#options) on how to configure this. - -## Building -The container image can be built via: -``` -$ docker buildx build -t basyx-python-sdk-http-server . -``` - -## Running - -### Storage -The container needs to be provided with the directory `/storage` to store AAS and Submodel files: AASX, JSON, XML or JSON files of Local-File Backend. - -This directory can be mapped via the `-v` option from another image or a local directory. -To map the directory `storage` inside the container, `-v ./storage:/storage` can be used. -The directory `storage` will be created in the current working directory, if it doesn't already exist. - -### Port -The HTTP server inside the container listens on port 80 by default. -To expose it on the host on port 8080, use the option `-p 8080:80` when running it. - -### Options -The container can be configured via environment variables: -- `API_BASE_PATH` determines the base path under which all other API paths are made available. - Default: `/api/v3.0` -- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: - - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve AASX, JSON, XML files from the storage directory. - The files are not modified, all changes done via the API are only stored in memory. - - When instead set to `LOCAL_FILE`, the server makes use of the [LocalFileBackend][2], where AAS and Submodels are persistently stored as JSON files. - Supplementary files, i.e. files referenced by `File` submodel elements, are not stored in this case. -- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. - -### Running Examples - -Putting it all together, the container can be started via the following command: -``` -$ docker run -p 8080:80 -v ./storage:/storage basyx-python-sdk-http-server -``` - -Since Windows uses backslashes instead of forward slashes in paths, you'll have to adjust the path to the storage directory there: -``` -> docker run -p 8080:80 -v .\storage:/storage basyx-python-sdk-http-server -``` - -Per default, the server will use the `LOCAL_FILE_READ_ONLY` storage type and serve the API under `/api/v3.0` and read files from `/storage`. If you want to change this, you can do so like this: -``` -$ docker run -p 8080:80 -v ./storage2:/storage2 -e API_BASE_PATH=/api/v3.1 -e STORAGE_TYPE=LOCAL_FILE_BACKEND -e STORAGE_PATH=/storage2 basyx-python-sdk-http-server -``` - -## Building and running the image with docker-compose - -The container image can also be built and run via: -``` -$ docker compose up -``` - -This is the exemplary `docker-compose` file for the server: -````yaml -services: - app: - build: . - ports: - - "8080:80" - volumes: - - ./storage:/storage - -```` - -Here files are read from `/storage` and the server can be accessed at http://localhost:8080/api/v3.0/ from your host system. -To get a different setup this compose.yaml file can be adapted and expanded. +# Eclipse BaSyx Python SDK - Dicovery Service + +This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Discovery Service**. +It provides basic discovery functionality for AAS IDs and their corresponding assets, as specified in the official [Discovery Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/DiscoveryServiceSpecification/V3.1.0_SSP-001). + +## Overview + +The Discovery Service is a core component in the Asset Administration Shell ecosystem. Its main responsibility is to store and retrieve relations between AAS identifiers and asset identifiers. It acts as a lookup service for resolving asset-related queries to corresponding AAS. + +This implementation supports: + +- Adding links between AAS and assets +- Querying AAS by asset links +- Querying asset links by AAS ID +- Removing AAS-related asset links +- Configurable in-memory or MongoDB-based persistent storage + +## Features + +| Feature | Description | +|---------------------------------------------|-----------------------------------------------------------------------------| +| `add_asset_links` | Register specific asset identifiers linked to an AAS | +| `get_asset_links_by_aas` | Retrieve asset links associated with an AAS | +| `search_aas_by_asset_link` | Find AAS identifiers by providing asset link values | +| `remove_asset_links_for_aas` | Delete all asset links associated with a specific AAS | + +## Specification Compliance + +- Complies with: **Discovery Service Specification v3.1.0_SSP-001** + +## Configuration + +The service can be configured to use either: + +- **In-memory storage** (default): Temporary data storage that resets on service restart. +- **MongoDB storage**: Persistent backend storage using MongoDB. + +### Configuration via Environment Variables + +| Variable | Description | Default | +|----------------|--------------------------------------------|-------------------------| +| `STORAGE_TYPE` | `inmemory` or `mongodb` | `inmemory` | +| `MONGODB_URI` | MongoDB connection URI | `mongodb://localhost:27017` | +| `MONGODB_DBNAME` | Name of the MongoDB database | `basyx_registry` | + +## Deployment via Docker + +A `Dockerfile` and `docker-compose.yml` are provided for simple deployment. ## Acknowledgments diff --git a/Discovery Server/app/main.py b/Discovery Server/app/main.py new file mode 100644 index 000000000..8ee4d1cb6 --- /dev/null +++ b/Discovery Server/app/main.py @@ -0,0 +1,17 @@ +import os +from basyx.aas.adapter.discovery import DiscoveryAPI, MongoDiscoveryStore,InMemoryDiscoveryStore + +def get_discovery_store(): + storage_type = os.getenv("STORAGE_TYPE", "inmemory").lower() + if storage_type == "mongodb": + uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") + dbname = os.getenv("MONGODB_DBNAME", "basyx_registry") + return MongoDiscoveryStore(uri=uri, db_name=dbname) + else: + return InMemoryDiscoveryStore() + + +if __name__ == "__main__": + persistent_store = get_discovery_store() + run_simple("localhost", 8084, DiscoveryAPI(persistent_store=persistent_store), + use_debugger=True, use_reloader=True) \ No newline at end of file diff --git a/Discovery Server/compose.yml b/Discovery Server/compose.yml index 90840a09b..08db6be2d 100644 --- a/Discovery Server/compose.yml +++ b/Discovery Server/compose.yml @@ -3,5 +3,3 @@ services: build: . ports: - "8084:80" - volumes: - - ./storage:/storage diff --git a/sdk/basyx/aas/adapter/discovery.py b/sdk/basyx/aas/adapter/discovery.py new file mode 100644 index 000000000..3c5beab12 --- /dev/null +++ b/sdk/basyx/aas/adapter/discovery.py @@ -0,0 +1,224 @@ +import itertools +import werkzeug.exceptions +from werkzeug.wrappers import Request, Response +from basyx.aas import model +from .http import APIResponse, http_exception_to_response, get_response_type, HTTPApiDecoder +from werkzeug.routing import MapAdapter, Rule, Submount +from .http import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request +from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple, Set + +import copy +from pymongo import MongoClient +from pymongo.collection import Collection + +import json +from basyx.aas.adapter.json import AASToJsonEncoder + +def specific_asset_to_json_obj(asset_id: model.SpecificAssetId) -> dict: + # Encode the asset to a JSON string and then decode to a dict. + json_str = AASToJsonEncoder().encode(asset_id) + return json.loads(json_str) + +class InMemoryDiscoveryStore: + def __init__(self): + self.aas_to_assets: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} + self.asset_to_aas: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} + + def get_asset_links_by_aas(self, aas_identifier: model.Identifier) -> List[dict]: + key = aas_identifier + return list(self.aas_to_assets.get(key, set())) + + def add_asset_links(self, aas_identifier: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: + key = aas_identifier + serialized_assets = [specific_asset_to_json_obj(aid) for aid in asset_ids] + if key in self.aas_to_assets: + for asset in serialized_assets: + if asset not in self.aas_to_assets[key]: + self.aas_to_assets[key].append(asset) + else: + self.aas_to_assets[key] = serialized_assets[:] + + def delete_asset_links_by_aas(self, aas_identifier: model.Identifier) -> None: + key = aas_identifier + if key in self.aas_to_assets: + del self.aas_to_assets[key] + + def search_aas_by_asset_link(self, asset_link: model.AssetLink) -> List[str]: + result = [] + for asset_key, aas_ids in self.asset_to_aas.items(): + expected_key = f"{asset_link.name}:{asset_link.value}" + if asset_key == expected_key: + result.extend(list(aas_ids)) + return result + + def add_aas_for_asset_link(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + asset_key = f"{asset_id.name}:{asset_id.value}" + aas_key = aas_identifier + if asset_key in self.asset_to_aas: + self.asset_to_aas[asset_key].add(aas_key) + else: + self.asset_to_aas[asset_key] = {aas_key} + + def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + asset_key = f"{asset_id.name}:{asset_id.value}" + aas_key = aas_identifier + if asset_key in self.asset_to_aas: + self.asset_to_aas[asset_key].discard(aas_key) + +class MongoDiscoveryStore: + def __init__(self, + uri: str = "mongodb://localhost:27017", + db_name: str = "basyx", + coll_aas_to_assets: str = "aas_to_assets", + coll_asset_to_aas: str = "asset_to_aas"): + self.client = MongoClient(uri) + self.db = self.client[db_name] + self.coll_aas_to_assets: Collection = self.db[coll_aas_to_assets] + self.coll_asset_to_aas: Collection = self.db[coll_asset_to_aas] + # Create an index for fast asset reverse lookups. + self.coll_asset_to_aas.create_index("_id") + + def get_asset_links_by_aas(self, aas_identifier: model.Identifier) -> List[dict]: + key = aas_identifier + doc = self.coll_aas_to_assets.find_one({"_id": key}) + return doc["asset_ids"] if doc and "asset_ids" in doc else [] + + def add_asset_links(self, aas_identifier: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: + key = aas_identifier + # Convert each SpecificAssetId using the serialization helper. + serializable_assets = [specific_asset_to_json_obj(aid) for aid in asset_ids] + self.coll_aas_to_assets.update_one( + {"_id": key}, + {"$addToSet": {"asset_ids": {"$each": serializable_assets}}}, + upsert=True + ) + + def delete_asset_links_by_aas(self, aas_identifier: model.Identifier) -> None: + key = aas_identifier + self.coll_aas_to_assets.delete_one({"_id": key}) + + def search_aas_by_asset_link(self, asset_link: model.AssetLink) -> List[str]: + # Query MongoDB for specificAssetIds where 'name' and 'value' match + doc = self.coll_asset_to_aas.find_one({ + "name": asset_link.name, + "value": asset_link.value + }) + return doc["aas_ids"] if doc and "aas_ids" in doc else [] + + def add_aas_for_asset_link(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + asset_key = str(specific_asset_to_json_obj(asset_id)) + aas_key = aas_identifier + self.coll_asset_to_aas.update_one( + {"_id": asset_key}, + {"$addToSet": {"aas_ids": aas_key}}, + upsert=True + ) + + def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + asset_key = str(specific_asset_to_json_obj(asset_id)) + aas_key = aas_identifier + self.coll_asset_to_aas.update_one( + {"_id": asset_key}, + {"$pull": {"aas_ids": aas_key}} + ) + + + +T = TypeVar("T") + +BASE64URL_ENCODING = "utf-8" + +class DiscoveryAPI: + def __init__(self, + base_path: str = "/api/v3.0", + persistent_store: MongoDiscoveryStore = None): + self.persistent_store = persistent_store or InMemoryDiscoveryStore() + self.url_map = werkzeug.routing.Map([ + Submount(base_path, [ + Rule("/lookup/shellsByAssetLink", methods=["POST"], + endpoint=self.search_all_aas_ids_by_asset_link), + Submount("/lookup/shells", [ + Rule("/", methods=["GET"], + endpoint=self.get_all_asset_links_by_id), + Rule("/", methods=["POST"], + endpoint=self.post_all_asset_links_by_id), + Rule("/", methods=["DELETE"], + endpoint=self.delete_all_asset_links_by_id), + ]), + ]) + ], converters={ + "base64url": Base64URLConverter + }, strict_slashes=False) + + def __call__(self, environ, start_response) -> Iterable[bytes]: + response: Response = self.handle_request(Request(environ)) + return response(environ, start_response) + + def _get_slice(self, request: Request, iterator): + limit_str = request.args.get('limit', default="10") + cursor_str = request.args.get('cursor', default="0") + try: + limit, cursor = int(limit_str), int(cursor_str) + if limit < 0 or cursor < 0: + raise ValueError + except ValueError: + raise werkzeug.exceptions.BadRequest("Cursor and limit must be positive integers!") + paginated_slice = itertools.islice(iterator, cursor, cursor + limit) + return paginated_slice, cursor + limit + + + def handle_request(self, request: Request): + map_adapter: MapAdapter = self.url_map.bind_to_environ( + request.environ) + try: + response_t = get_response_type(request) + except werkzeug.exceptions.NotAcceptable as e: + return e + try: + endpoint, values = map_adapter.match() + return endpoint(request, values, response_t=response_t, + map_adapter=map_adapter) + # any raised error that leaves this function will cause a 500 internal server error + # so catch raised http exceptions and return them + except werkzeug.exceptions.HTTPException as e: + return http_exception_to_response(e, response_t) + + def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + asset_links = HTTPApiDecoder.request_body_list(request, model.AssetLink, False) + matching_aas_keys = set() + for asset_link in asset_links: + aas_keys = self.persistent_store.search_aas_by_asset_link(asset_link) + matching_aas_keys.update(aas_keys) + matching_aas_keys = list(matching_aas_keys) + paginated_slice, cursor = self._get_slice(request, matching_aas_keys) + return response_t(list(paginated_slice), cursor=cursor) + + def get_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + asset_ids = self.persistent_store.get_asset_links_by_aas(aas_identifier) + return response_t(asset_ids) + + def post_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + specific_asset_ids = HTTPApiDecoder.request_body_list(request, model.SpecificAssetId, False) + self.persistent_store.add_asset_links(aas_identifier, specific_asset_ids) + for asset_id in specific_asset_ids: + self.persistent_store.add_aas_for_asset_link(asset_id, aas_identifier) + updated = {aas_identifier: self.persistent_store.get_asset_links_by_aas(aas_identifier)} + return response_t(updated) + + def delete_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + self.persistent_store.delete_asset_links_by_aas(aas_identifier) + for key in list(self.persistent_store.asset_to_aas.keys()): + self.persistent_store.asset_to_aas[key].discard(aas_identifier) + return response_t() +""""" +if __name__ == "__main__": + from werkzeug.serving import run_simple + persistent_store = MongoDiscoveryStore(uri="mongodb://localhost:27017", db_name="basyx_registry") + # run_simple("localhost", 8084, ResolverAPI(), + # use_debugger=True, use_reloader=True) + run_simple("localhost", 8084, ResolverAPI(persistent_store=persistent_store), + use_debugger=True, use_reloader=True) +""" \ No newline at end of file diff --git a/sdk/basyx/aas/adapter/resolver.py b/sdk/basyx/aas/adapter/resolver.py deleted file mode 100644 index c720d29cc..000000000 --- a/sdk/basyx/aas/adapter/resolver.py +++ /dev/null @@ -1,188 +0,0 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors -# -# This program and the accompanying materials are made available under the terms of the MIT License, available in -# the LICENSE file of this project. -# -# SPDX-License-Identifier: MIT -""" -This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". -""" - -import base64 -import datetime -import io -import json -import itertools - -import werkzeug.exceptions -import werkzeug.routing -from werkzeug.exceptions import BadRequest, Conflict, NotFound, UnprocessableEntity -from werkzeug.routing import MapAdapter, Rule, Submount -from werkzeug.wrappers import Request, Response - -from basyx.aas import model -from .http import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request - -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple, Set - - -T = TypeVar("T") - -BASE64URL_ENCODING = "utf-8" - -# Klasse, die das externe Mapping verwaltet -from basyx.aas import model - -class ResolverAPI: - def __init__(self, object_store: model.AbstractObjectStore, - base_path: str = "/api/v3.0"): - self.object_store: model.AbstractObjectStore = object_store - self.aas_to_assets: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} - self.asset_to_aas: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} - self.url_map = werkzeug.routing.Map([ - Submount(base_path, [ - Rule("/lookup/shellsByAssetLink", methods=["POST"], - endpoint=self.search_all_aas_ids_by_asset_link), - Submount("/lookup/shells", [ - Rule("/", methods=["GET"], - endpoint=self.get_all_asset_links_by_id), - Rule("/", methods=["POST"], - endpoint=self.post_all_asset_links_by_id), - Rule("/", methods=["DELETE"], - endpoint=self.delete_all_asset_links_by_id), - ]), - ]) - ], converters={ - "base64url": Base64URLConverter - }, strict_slashes=False) - - def __call__(self, environ, start_response) -> Iterable[bytes]: - response: Response = self.handle_request(Request(environ)) - return response(environ, start_response) - - @classmethod - def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: - limit_str = request.args.get('limit', default="10") - cursor_str = request.args.get('cursor', default="0") - try: - limit, cursor = int(limit_str), int(cursor_str) - if limit < 0 or cursor < 0: - raise ValueError - except ValueError: - raise BadRequest("Cursor and limit must be positive integers!") - start_index = cursor - end_index = cursor + limit - paginated_slice = itertools.islice(iterator, start_index, end_index) - return paginated_slice, end_index - - def handle_request(self, request: Request): - map_adapter: MapAdapter = self.url_map.bind_to_environ( - request.environ) - try: - response_t = get_response_type(request) - except werkzeug.exceptions.NotAcceptable as e: - return e - try: - endpoint, values = map_adapter.match() - return endpoint(request, values, response_t=response_t, - map_adapter=map_adapter) - # any raised error that leaves this function will cause a 500 internal server error - # so catch raised http exceptions and return them - except werkzeug.exceptions.HTTPException as e: - return http_exception_to_response(e, response_t) - - # ------ Discovery ROUTES ------- - def search_all_aas_ids_by_asset_link(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - """ - Returns a list of Asset Administration Shell IDs linked to specific asset identifiers or the global asset ID - """ - asset_links = HTTPApiDecoder.request_body_list(request, model.AssetLink, - False) - matching_aas_ids = set() - for asset_link in asset_links: - for asset_id, aas_ids in self.asset_to_aas.items(): - if asset_link.name==asset_id.name and asset_link.value==asset_id.value: - matching_aas_ids=aas_ids - matching_aas_ids = list(matching_aas_ids) - paginated_slice, cursor= self._get_slice(request, matching_aas_ids) - - return response_t(list(paginated_slice), cursor=cursor) - - def get_all_asset_links_by_id(self, request: Request, - url_args: Dict, - response_t: Type[APIResponse], - **_kwargs) -> Response: - """ - Returns a list of specific asset identifiers based on an Asset Administration Shell ID to edit discoverable content. - The global asset ID is returned as specific asset ID with "name" equal to "globalAssetId" (see Constraint AASd-116). - """ - aas_identifier = url_args.get("aas_id") - matching_asset_ids = set() - for ass_id, asset_ids in self.aas_to_assets.items(): - if ass_id==aas_identifier: - matching_asset_ids=asset_ids - matching_asset_ids = list(matching_asset_ids) - return response_t(matching_asset_ids) - - def post_all_asset_links_by_id(self, request: Request, - url_args: Dict, - response_t: Type[APIResponse], - **_kwargs) -> Response: - """ - Creates specific asset identifiers linked to an Asset Administration Shell to edit discoverable content. - """ - aas_identifier = url_args.get("aas_id") - # Decode the request body to retrieve specific asset identifiers - specific_asset_ids = HTTPApiDecoder.request_body_list( - request, model.SpecificAssetId, False) - - # Ensure the aas_identifier exists in the dictionary - if aas_identifier not in self.aas_to_assets: - self.aas_to_assets[aas_identifier] = set() - - # Add specific asset IDs to the aas_to_assets dictionary - asset_ids = self.aas_to_assets[aas_identifier] - for specific_asset_id in specific_asset_ids: - asset_ids.add(specific_asset_id) - - # Update asset_to_aas dictionary - for specific_asset_id in specific_asset_ids: - if specific_asset_id not in self.asset_to_aas: - self.asset_to_aas[specific_asset_id] = set() - self.asset_to_aas[specific_asset_id].add(aas_identifier) - - # Convert sets to lists for JSON serialization - serializable_aas_to_assets = {key: list(value) for key, value in self.aas_to_assets.items()} - - return response_t(serializable_aas_to_assets) - - def delete_all_asset_links_by_id(self, request: Request, - url_args: Dict, - response_t: Type[APIResponse], - **_kwargs) -> Response: - """ - Deletes all specific asset identifiers linked to an Asset Administration Shell to edit discoverable content. - """ - aas_identifier = url_args.get("aas_id") - # Ensure the aas_identifier exists in the dictionary - if aas_identifier in self.aas_to_assets: - # Remove the links from aas_to_asset dictionary - del self.aas_to_assets[aas_identifier] - - # Remove the aas_identifier from asset_to_aas dictionary - for asset_id, aas_ids in list(self.asset_to_aas.items()): - if aas_identifier in aas_ids: - aas_ids.discard(aas_identifier) - # Clean up empty sets - if not aas_ids: - del self.asset_to_aas[asset_id] - - return response_t() - -if __name__ == "__main__": - from werkzeug.serving import run_simple - from basyx.aas.examples.data.example_aas import create_full_example - - run_simple("localhost", 8084, ResolverAPI(create_full_example()), - use_debugger=True, use_reloader=True) From 1b676e7ea91d764f92a9f8ea9267dba69e1293c1 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Mon, 14 Apr 2025 18:45:39 +0200 Subject: [PATCH 06/52] change main.py and disccovery.py --- Discovery Server/app/main.py | 32 +++++++++++++++++++----------- Discovery Server/compose.yml | 2 ++ sdk/basyx/aas/adapter/discovery.py | 24 ++++++++++++---------- 3 files changed, 35 insertions(+), 23 deletions(-) diff --git a/Discovery Server/app/main.py b/Discovery Server/app/main.py index 8ee4d1cb6..19c97b416 100644 --- a/Discovery Server/app/main.py +++ b/Discovery Server/app/main.py @@ -1,17 +1,25 @@ import os +import sys from basyx.aas.adapter.discovery import DiscoveryAPI, MongoDiscoveryStore,InMemoryDiscoveryStore -def get_discovery_store(): - storage_type = os.getenv("STORAGE_TYPE", "inmemory").lower() - if storage_type == "mongodb": - uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") - dbname = os.getenv("MONGODB_DBNAME", "basyx_registry") - return MongoDiscoveryStore(uri=uri, db_name=dbname) - else: - return InMemoryDiscoveryStore() +storage_type = os.getenv("STORAGE_TYPE", "inmemory") +base_path = os.getenv("API_BASE_PATH") +wsgi_optparams = {} + +if base_path is not None: + wsgi_optparams["base_path"] = base_path + +if storage_type == "inmemory": + application = DiscoveryAPI(InMemoryDiscoveryStore(), **wsgi_optparams) + +elif storage_type in "mongodb": + uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") + dbname = os.getenv("MONGODB_DBNAME", "basyx_registry") + + application = DiscoveryAPI(MongoDiscoveryStore(uri,dbname), **wsgi_optparams) + +else: + print(f"STORAGE_TYPE must be either inmemory or mongodb! Current value: {storage_type}", + file=sys.stderr) -if __name__ == "__main__": - persistent_store = get_discovery_store() - run_simple("localhost", 8084, DiscoveryAPI(persistent_store=persistent_store), - use_debugger=True, use_reloader=True) \ No newline at end of file diff --git a/Discovery Server/compose.yml b/Discovery Server/compose.yml index 08db6be2d..6e1d65404 100644 --- a/Discovery Server/compose.yml +++ b/Discovery Server/compose.yml @@ -3,3 +3,5 @@ services: build: . ports: - "8084:80" + environment: + STORAGE_TYPE: inmemory diff --git a/sdk/basyx/aas/adapter/discovery.py b/sdk/basyx/aas/adapter/discovery.py index 3c5beab12..e6e68cc67 100644 --- a/sdk/basyx/aas/adapter/discovery.py +++ b/sdk/basyx/aas/adapter/discovery.py @@ -7,6 +7,8 @@ from .http import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple, Set +import abc + import copy from pymongo import MongoClient from pymongo.collection import Collection @@ -19,7 +21,12 @@ def specific_asset_to_json_obj(asset_id: model.SpecificAssetId) -> dict: json_str = AASToJsonEncoder().encode(asset_id) return json.loads(json_str) -class InMemoryDiscoveryStore: +class AbstractDiscoveryStore(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __init__(self): + pass + +class InMemoryDiscoveryStore(AbstractDiscoveryStore): def __init__(self): self.aas_to_assets: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} self.asset_to_aas: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} @@ -65,7 +72,7 @@ def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_identi if asset_key in self.asset_to_aas: self.asset_to_aas[asset_key].discard(aas_key) -class MongoDiscoveryStore: +class MongoDiscoveryStore(AbstractDiscoveryStore): def __init__(self, uri: str = "mongodb://localhost:27017", db_name: str = "basyx", @@ -130,9 +137,8 @@ def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_identi class DiscoveryAPI: def __init__(self, - base_path: str = "/api/v3.0", - persistent_store: MongoDiscoveryStore = None): - self.persistent_store = persistent_store or InMemoryDiscoveryStore() + persistent_store: AbstractDiscoveryStore, base_path: str = "/api/v3.0"): + self.persistent_store: AbstractDiscoveryStore = persistent_store self.url_map = werkzeug.routing.Map([ Submount(base_path, [ Rule("/lookup/shellsByAssetLink", methods=["POST"], @@ -213,12 +219,8 @@ def delete_all_asset_links_by_id(self, request: Request, url_args: dict, respons for key in list(self.persistent_store.asset_to_aas.keys()): self.persistent_store.asset_to_aas[key].discard(aas_identifier) return response_t() -""""" + if __name__ == "__main__": from werkzeug.serving import run_simple - persistent_store = MongoDiscoveryStore(uri="mongodb://localhost:27017", db_name="basyx_registry") - # run_simple("localhost", 8084, ResolverAPI(), - # use_debugger=True, use_reloader=True) - run_simple("localhost", 8084, ResolverAPI(persistent_store=persistent_store), + run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), use_debugger=True, use_reloader=True) -""" \ No newline at end of file From 7cff8cfdc5c1f778f7fbb26903f38bbc687c53f2 Mon Sep 17 00:00:00 2001 From: zrgt Date: Tue, 15 Apr 2025 17:17:41 +0200 Subject: [PATCH 07/52] Extract server-related components into server app This refactoring separates server functionalities (Discovery, Registries, Repositories) from the core SDK to improve modularity and maintainability. Changes: - Server is now a separate app - Added an initial pyproject.toml for the server app - Moved all server-related classes and functions from sdk to server - Consolidated descriptor.py, submodel_descriptor.py, and aas_descriptor.py into a single server_model.py file --- .../aas/adapter/json/json_deserialization.py | 147 +---- .../aas/adapter/json/json_serialization.py | 114 ---- .../aas/adapter/xml/xml_deserialization.py | 58 -- sdk/basyx/aas/model/__init__.py | 6 +- sdk/basyx/aas/model/aas.py | 11 - sdk/basyx/aas/model/aas_descriptor.py | 67 --- sdk/basyx/aas/model/descriptor.py | 133 ----- sdk/basyx/aas/model/submodel_descriptor.py | 29 - sdk/pyproject.toml | 6 +- server/app/__init__.py | 0 server/app/adapter/__init__.py | 0 server/app/adapter/jsonization.py | 544 ++++++++++++++++++ server/app/adapter/xmlization.py | 162 ++++++ .../aas/adapter => server/app}/discovery.py | 13 +- {sdk/basyx/aas/adapter => server/app}/http.py | 35 +- server/app/main.py | 2 +- server/app/py.typed | 0 .../aas/adapter => server/app}/registry.py | 57 +- server/app/server_model.py | 220 +++++++ server/pyproject.toml | 63 ++ 20 files changed, 1047 insertions(+), 620 deletions(-) delete mode 100644 sdk/basyx/aas/model/aas_descriptor.py delete mode 100644 sdk/basyx/aas/model/descriptor.py delete mode 100644 sdk/basyx/aas/model/submodel_descriptor.py create mode 100644 server/app/__init__.py create mode 100644 server/app/adapter/__init__.py create mode 100644 server/app/adapter/jsonization.py create mode 100644 server/app/adapter/xmlization.py rename {sdk/basyx/aas/adapter => server/app}/discovery.py (96%) rename {sdk/basyx/aas/adapter => server/app}/http.py (98%) create mode 100644 server/app/py.typed rename {sdk/basyx/aas/adapter => server/app}/registry.py (90%) create mode 100644 server/app/server_model.py create mode 100644 server/pyproject.toml diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index 3a50975a9..c1ce35fef 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -34,7 +34,6 @@ import json import logging import pprint -from abc import abstractmethod from typing import Dict, Callable, ContextManager, TypeVar, Type, List, IO, Optional, Set, get_args from basyx.aas import model @@ -189,9 +188,6 @@ def object_hook(cls, dct: Dict[str, object]) -> object: 'Range': cls._construct_range, 'ReferenceElement': cls._construct_reference_element, 'DataSpecificationIec61360': cls._construct_data_specification_iec61360, - 'AssetAdministrationShellDescriptor': cls._construct_asset_administration_shell_descriptor, - 'SubmodelDescriptor': cls._construct_submodel_descriptor, - 'AssetLink': cls._construct_asset_link, } # Get modelType and constructor function @@ -279,16 +275,7 @@ def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None if 'extensions' in dct: for extension in _get_ts(dct, 'extensions', list): obj.extension.add(cls._construct_extension(extension)) - if isinstance(obj, model.Descriptor): - if 'description' in dct: - obj.description = cls._construct_lang_string_set(_get_ts(dct, 'description', list), - model.MultiLanguageTextType) - if 'displayName' in dct: - obj.display_name = cls._construct_lang_string_set(_get_ts(dct, 'displayName', list), - model.MultiLanguageNameType) - if 'extensions' in dct: - for extension in _get_ts(dct, 'extensions', list): - obj.extension.add(cls._construct_extension(extension)) + @classmethod def _get_kind(cls, dct: Dict[str, object]) -> model.ModellingKind: """ @@ -760,134 +747,6 @@ def _construct_reference_element( ret.value = cls._construct_reference(_get_ts(dct, 'value', dict)) return ret - @classmethod - def _construct_asset_administration_shell_descriptor( - cls, dct: Dict[str, object], object_class=model.AssetAdministrationShellDescriptor) -> model.AssetAdministrationShellDescriptor: - ret = object_class(id_=_get_ts(dct, 'id', str)) - cls._amend_abstract_attributes(ret, dct) - if 'administration' in dct: - ret.administration = cls._construct_administrative_information(_get_ts(dct, 'administration', dict)) - if 'assetkind' in dct: - asset_kind=ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] - if 'assetType' in dct: - ret.asset_type = _get_ts(dct, 'assetType', str) - global_asset_id = None - if 'globalAssetId' in dct: - global_asset_id = _get_ts(dct, 'globalAssetId', str) - specific_asset_id = set() - if 'specificAssetIds' in dct: - for desc_data in _get_ts(dct, "specificAssetIds", list): - specific_asset_id.add(cls._construct_specific_asset_id(desc_data, model.SpecificAssetId)) - if 'endpoints' in dct: - for endpoint_dct in _get_ts(dct, 'endpoints', list): - if 'protocolInformation' in endpoint_dct: - ret.endpoints.append( - cls._construct_endpoint(endpoint_dct, - model.Endpoint)) - elif 'href' in endpoint_dct: - protocol_info = model.ProtocolInformation( - href=_get_ts(endpoint_dct['href'], 'href', str), - endpoint_protocol=_get_ts(endpoint_dct['href'], - 'endpointProtocol', - str) if 'endpointProtocol' in - endpoint_dct[ - 'href'] else None, - endpoint_protocol_version=_get_ts( - endpoint_dct['href'], - 'endpointProtocolVersion', - list) if 'endpointProtocolVersion' in - endpoint_dct['href'] else None - ) - ret.endpoints.append(model.Endpoint( - protocol_information=protocol_info, - interface=_get_ts(endpoint_dct, 'interface', - str))) - if 'idShort' in dct: - ret.id_short = _get_ts(dct, 'idShort', str) - if 'submodelDescriptors' in dct: - ret.submodel_descriptors = cls._construct_submodel_descriptor(_get_ts(dct, 'submodelDescriptors', list), model.SubmodelDescriptor) - return ret - - @classmethod - def _construct_protocol_information(cls, dct: Dict[str, object], - object_class=model.ProtocolInformation) -> model.ProtocolInformation: - ret = object_class( - href=_get_ts(dct, 'href', str), - endpoint_protocol=_get_ts(dct, 'endpointProtocol', - str) if 'endpointProtocol' in dct else None, - endpoint_protocol_version=_get_ts(dct, - 'endpointProtocolVersion', - list) if 'endpointProtocolVersion' in dct else None, - subprotocol=_get_ts(dct, 'subprotocol', - str) if 'subprotocol' in dct else None, - subprotocol_body=_get_ts(dct, 'subprotocolBody', - str) if 'subprotocolBody' in dct else None, - subprotocol_body_encoding=_get_ts(dct, - 'subprotocolBodyEncoding', - str) if 'subprotocolBodyEncoding' in dct else None - ) - return ret - - @classmethod - def _construct_endpoint(cls, dct: Dict[str, object], - object_class=model.Endpoint) -> model.Endpoint: - ret = object_class( - protocol_information=cls._construct_protocol_information( - _get_ts(dct, 'protocolInformation', dict), - model.ProtocolInformation - ), - interface=_get_ts(dct, 'interface', - str) - ) - cls._amend_abstract_attributes(ret, dct) - return ret - - @classmethod - def _construct_submodel_descriptor( - cls, dct: Dict[str, object], object_class=model.SubmodelDescriptor) -> model.SubmodelDescriptor: - ret = object_class(id_=_get_ts(dct, 'id', str), - endpoints=[]) - cls._amend_abstract_attributes(ret, dct) - for endpoint_dct in _get_ts(dct, 'endpoints', list): - if 'protocolInformation' in endpoint_dct: - ret.endpoints.append( - cls._construct_endpoint(endpoint_dct, - model.Endpoint)) - elif 'href' in endpoint_dct: - protocol_info = model.ProtocolInformation( - href=_get_ts(endpoint_dct['href'], 'href', str), - endpoint_protocol=_get_ts(endpoint_dct['href'], - 'endpointProtocol', - str) if 'endpointProtocol' in - endpoint_dct[ - 'href'] else None, - endpoint_protocol_version=_get_ts( - endpoint_dct['href'], - 'endpointProtocolVersion', - list) if 'endpointProtocolVersion' in - endpoint_dct['href'] else None - ) - ret.endpoints.append(model.Endpoint( - protocol_information=protocol_info, - interface=_get_ts(endpoint_dct, 'interface', - str))) - if 'administration' in dct: - ret.administration = cls._construct_administrative_information( - _get_ts(dct, 'administration', dict)) - if 'idShort' in dct: - ret.id_short = _get_ts(dct, 'idShort', str) - if 'semanticId' in dct: - ret.semantic_id = cls._construct_reference(_get_ts(dct, 'semanticId', dict)) - if 'supplementalSemanticIds' in dct: - for ref in _get_ts(dct, 'supplementalSemanticIds', list): - ret.supplemental_semantic_id.append(cls._construct_reference(ref)) - return ret - @classmethod - def _construct_asset_link ( - cls, dct: Dict[str, object], object_class=model.AssetLink) -> model.AssetLink: - ret = object_class(name=_get_ts(dct, 'name', str), - value=_get_ts(dct, 'value', str)) - return ret class StrictAASFromJsonDecoder(AASFromJsonDecoder): """ @@ -986,9 +845,7 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO for name, expected_type in (('assetAdministrationShells', model.AssetAdministrationShell), ('submodels', model.Submodel), - ('conceptDescriptions', model.ConceptDescription), - ('assetAdministrationShellDescriptors', model.AssetAdministrationShellDescriptor), - ('submodelDescriptors', model.SubmodelDescriptor)): + ('conceptDescriptions', model.ConceptDescription)): try: lst = _get_ts(data, name, list) except (KeyError, TypeError): diff --git a/sdk/basyx/aas/adapter/json/json_serialization.py b/sdk/basyx/aas/adapter/json/json_serialization.py index 29d614fa8..8c6a671f1 100644 --- a/sdk/basyx/aas/adapter/json/json_serialization.py +++ b/sdk/basyx/aas/adapter/json/json_serialization.py @@ -93,11 +93,6 @@ def default(self, obj: object) -> object: model.SubmodelElementCollection: self._submodel_element_collection_to_json, model.SubmodelElementList: self._submodel_element_list_to_json, model.ValueReferencePair: self._value_reference_pair_to_json, - model.AssetAdministrationShellDescriptor: self._asset_administration_shell_descriptor_to_json, - model.SubmodelDescriptor: self._submodel_descriptor_to_json, - model.Endpoint: self._endpoint_to_json, - model.ProtocolInformation: self._protocol_information_to_json, - model.AssetLink: self._asset_link_to_json } for typ in mapping: if isinstance(obj, typ): @@ -155,14 +150,6 @@ def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: if isinstance(obj, model.Qualifiable) and not cls.stripped: if obj.qualifier: data['qualifiers'] = list(obj.qualifier) - if isinstance(obj, model.Descriptor): - if obj.description: - data['description'] = obj.description - if obj.display_name: - data['displayName'] = obj.display_name - if obj.extension: - data['extensions'] = list(obj.extension) - return data # ############################################################# @@ -683,92 +670,6 @@ def _basic_event_element_to_json(cls, obj: model.BasicEventElement) -> Dict[str, data['maxInterval'] = model.datatypes.xsd_repr(obj.max_interval) return data - @classmethod - def _asset_administration_shell_descriptor_to_json(cls, obj: model.AssetAdministrationShellDescriptor) -> Dict[str, object]: - """ - serialization of an object from class AssetAdministrationShell to json - - :param obj: object of class AssetAdministrationShell - :return: dict with the serialized attributes of this object - """ - data = cls._abstract_classes_to_json(obj) - data.update(cls._namespace_to_json(obj)) - data['id'] = obj.id - if obj.administration: - data['administration'] = obj.administration - if obj.asset_kind: - data['assetKind'] = _generic.ASSET_KIND[obj.asset_kind] - if obj.asset_type: - data['assetType'] = obj.asset_type - if obj.global_asset_id: - data['globalAssetId'] = obj.global_asset_id - if obj.specific_asset_id: - data['specificAssetIds'] = list(obj.specific_asset_id) - if obj.endpoints: - data['endpoints'] = list(obj.endpoints) - if obj.id_short: - data['idShort'] = obj.id_short - if obj.submodel_descriptors: - data['submodelDescriptors'] = list(obj.submodel_descriptors) - return data - - @classmethod - def _protocol_information_to_json(cls, - obj: model.ProtocolInformation) -> \ - Dict[str, object]: - data = cls._abstract_classes_to_json(obj) - - data['href'] = obj.href - if obj.endpoint_protocol: - data['endpointProtocol'] = obj.endpoint_protocol - if obj.endpoint_protocol_version: - data['endpointProtocolVersion'] = obj.endpoint_protocol_version - if obj.subprotocol: - data['subprotocol'] = obj.subprotocol - if obj.subprotocol_body: - data['subprotocolBody'] = obj.subprotocol_body - if obj.subprotocol_body_encoding: - data['subprotocolBodyEncoding'] = obj.subprotocol_body_encoding - - return data - - - @classmethod - def _endpoint_to_json(cls, obj: model.Endpoint) -> Dict[ - str, object]: - data = cls._abstract_classes_to_json(obj) - data['protocolInformation'] = cls._protocol_information_to_json( - obj.protocol_information) - data['interface'] = obj.interface - return data - - @classmethod - def _submodel_descriptor_to_json(cls, obj: model.SubmodelDescriptor) -> Dict[str, object]: - """ - serialization of an object from class Submodel to json - - :param obj: object of class Submodel - :return: dict with the serialized attributes of this object - """ - data = cls._abstract_classes_to_json(obj) - data['id'] = obj.id - data['endpoints'] = [cls._endpoint_to_json(ep) for ep in - obj.endpoints] - if obj.id_short: - data['idShort'] = obj.id_short - if obj.administration: - data['administration'] = obj.administration - if obj.semantic_id: - data['semanticId'] = obj.semantic_id - if obj.supplemental_semantic_id: - data['supplementalSemanticIds'] = list(obj.supplemental_semantic_id) - return data - @classmethod - def _asset_link_to_json(cls, obj: model.AssetLink) -> Dict[str, object]: - data = cls._abstract_classes_to_json(obj) - data['name'] = obj.name - data['value'] = obj.value - return data class StrippedAASToJsonEncoder(AASToJsonEncoder): """ @@ -797,9 +698,6 @@ def _create_dict(data: model.AbstractObjectStore) -> dict: asset_administration_shells: List[model.AssetAdministrationShell] = [] submodels: List[model.Submodel] = [] concept_descriptions: List[model.ConceptDescription] = [] - asset_administration_shell_descriptors: List[model.AssetAdministrationShellDescriptor] = [] - submodel_descriptors: List[model.SubmodelDescriptor] = [] - assets_links: List[model.AssetLink] = [] for obj in data: if isinstance(obj, model.AssetAdministrationShell): asset_administration_shells.append(obj) @@ -807,12 +705,6 @@ def _create_dict(data: model.AbstractObjectStore) -> dict: submodels.append(obj) elif isinstance(obj, model.ConceptDescription): concept_descriptions.append(obj) - elif isinstance(obj, model.AssetAdministrationShellDescriptor): - asset_administration_shell_descriptors.append(obj) - elif isinstance(obj, model.SubmodelDescriptor): - submodel_descriptors.append(obj) - elif isinstance(obj, model.AssetLink): - assets_links.append(obj) dict_: Dict[str, List] = {} if asset_administration_shells: dict_['assetAdministrationShells'] = asset_administration_shells @@ -820,12 +712,6 @@ def _create_dict(data: model.AbstractObjectStore) -> dict: dict_['submodels'] = submodels if concept_descriptions: dict_['conceptDescriptions'] = concept_descriptions - if asset_administration_shell_descriptors: - dict_['assetAdministrationShellDescriptors'] = asset_administration_shell_descriptors - if submodel_descriptors: - dict_['submodelDescriptors'] = submodel_descriptors - if assets_links: - dict_['assetLinks'] = assets_links return dict_ diff --git a/sdk/basyx/aas/adapter/xml/xml_deserialization.py b/sdk/basyx/aas/adapter/xml/xml_deserialization.py index 591d82107..ab78d3c2e 100644 --- a/sdk/basyx/aas/adapter/xml/xml_deserialization.py +++ b/sdk/basyx/aas/adapter/xml/xml_deserialization.py @@ -1181,58 +1181,7 @@ def construct_data_specification_iec61360(cls, element: etree._Element, ds_iec.level_types.add(IEC61360_LEVEL_TYPES_INVERSE[tag]) cls._amend_abstract_attributes(ds_iec, element) return ds_iec - @classmethod - def construct_asset_administration_shell_descriptor(cls, element: etree._Element, object_class=model.AssetAdministrationShellDescriptor, - **_kwargs: Any) -> model.AssetAdministrationShellDescriptor: - id_value = _child_text_mandatory(element, NS_AAS + "id") - id_short = _child_text_mandatory(element, NS_AAS + "idShort") - endpoints_elem = element.find(NS_AAS + "endpoints") - endpoints: List[str] = [] - if endpoints_elem is not None: - endpoints = [child.text.strip() for child in endpoints_elem.findall(NS_AAS + "endpoint") if child.text] - - asset_kind = _child_text_mandatory(element, NS_AAS + "assetKind") - - specific_asset_ids_elem = element.find(NS_AAS + "specificAssetIds") - specific_asset_ids: List[Dict[str, Any]] = [] - if specific_asset_ids_elem is not None: - for sid_elem in specific_asset_ids_elem.findall(NS_AAS + "specificAssetId"): - name = sid_elem.findtext(NS_AAS + "name") - value = sid_elem.findtext(NS_AAS + "value") - if name is not None and value is not None: - specific_asset_ids.append({"name": name.strip(), "value": value.strip()}) - - descriptor = object_class( - id=id_value, - id_short=id_short, - endpoints=endpoints, - asset_kind=asset_kind, - specific_asset_ids=specific_asset_ids - ) - - cls._amend_abstract_attributes(descriptor, element) - return descriptor - @classmethod - def construct_submodel_descriptor(cls, element: etree._Element, object_class=model.SubmodelDescriptor, - **_kwargs: Any) -> model.SubmodelDescriptor: - submodel_id = _child_text_mandatory(element, NS_AAS + "id") - id_short = _child_text_mandatory(element, NS_AAS + "idShort") - - endpoints_elem = element.find(NS_AAS + "endpoints") - endpoints: List[str] = [] - if endpoints_elem is not None: - endpoints = [child.text.strip() for child in endpoints_elem.findall(NS_AAS + "endpoint") if child.text] - - # Hier können weitere optionale Felder verarbeitet werden, z.B. semanticId, etc. - - submodel_descriptor = object_class( - id=submodel_id, - id_short=id_short, - endpoints=endpoints - ) - cls._amend_abstract_attributes(submodel_descriptor, element) - return submodel_descriptor class StrictAASFromXmlDecoder(AASFromXmlDecoder): """ @@ -1358,9 +1307,6 @@ class XMLConstructables(enum.Enum): EMBEDDED_DATA_SPECIFICATION = enum.auto() DATA_SPECIFICATION_CONTENT = enum.auto() DATA_SPECIFICATION_IEC61360 = enum.auto() - ASSET_ADMINISTRATION_SHELL_DESCRIPTOR = enum.auto() - SUBMODEL_DESCRIPTOR = enum.auto() - ASSET_LINK = enum.auto() def read_aas_xml_element(file: PathOrIO, construct: XMLConstructables, failsafe: bool = True, stripped: bool = False, @@ -1468,10 +1414,6 @@ def read_aas_xml_element(file: PathOrIO, construct: XMLConstructables, failsafe: # type aliases elif construct == XMLConstructables.VALUE_LIST: constructor = decoder_.construct_value_list - elif construct == XMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR: - constructor = decoder_.construct_asset_administration_shell_descriptor - elif construct == XMLConstructables.SUBMODEL_DESCRIPTOR: - constructor = decoder_.construct_submodel_descriptor else: raise ValueError(f"{construct.name} cannot be constructed!") diff --git a/sdk/basyx/aas/model/__init__.py b/sdk/basyx/aas/model/__init__.py index 1dcc29966..e541968b5 100644 --- a/sdk/basyx/aas/model/__init__.py +++ b/sdk/basyx/aas/model/__init__.py @@ -7,16 +7,14 @@ from basyx.aas.model import AssetAdministrationShell, Submodel, Property """ -from __future__ import absolute_import + from .aas import * from .base import * from .submodel import * from .provider import * from .concept import ConceptDescription from . import datatypes -from .aas_descriptor import AssetAdministrationShellDescriptor -from .descriptor import * -from .submodel_descriptor import SubmodelDescriptor + # A mapping of BaSyx Python SDK implementation classes to the corresponding `KeyTypes` enum members for all classes # that are covered by this enum. KEY_TYPES_CLASSES: Dict[Type[Referable], KeyTypes] = { diff --git a/sdk/basyx/aas/model/aas.py b/sdk/basyx/aas/model/aas.py index dd3a12c23..684a1ff06 100644 --- a/sdk/basyx/aas/model/aas.py +++ b/sdk/basyx/aas/model/aas.py @@ -172,14 +172,3 @@ def __init__(self, self.submodel: Set[base.ModelReference[Submodel]] = set() if submodel is None else submodel self.embedded_data_specifications: List[base.EmbeddedDataSpecification] = list(embedded_data_specifications) self.extension = base.NamespaceSet(self, [("name", True)], extension) - -class AssetLink: - - def __init__(self, name: base.LabelType, value: base.Identifier): - if not name: - raise ValueError("AssetLink 'name' must be a non-empty string.") - if not value: - raise ValueError("AssetLink 'value' must be a non-empty string.") - self.name = name - self.value = value - \ No newline at end of file diff --git a/sdk/basyx/aas/model/aas_descriptor.py b/sdk/basyx/aas/model/aas_descriptor.py deleted file mode 100644 index a4051abaf..000000000 --- a/sdk/basyx/aas/model/aas_descriptor.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime - -from typing import List, Dict, Optional,Iterable, Set - -from . import base, _string_constraints, aas -from . import descriptor -from .submodel_descriptor import SubmodelDescriptor -import re - -class AssetAdministrationShellDescriptor(descriptor.Descriptor): - - def __init__(self, - id_: base.Identifier, - administration: Optional[base.AdministrativeInformation] = None, - asset_kind: Optional[base.AssetKind] = None, - asset_type: Optional[base.Identifier] = None, - endpoints: Optional[List[descriptor.Endpoint]] = None, - global_asset_id: Optional[base.Identifier] = None, - id_short: Optional[base.NameType] = None, - specific_asset_id: Iterable[base.SpecificAssetId] = (), - submodel_descriptors: Optional[List[SubmodelDescriptor]] = None, - description: Optional[base.MultiLanguageTextType] = None, - display_name: Optional[base.MultiLanguageNameType] = None, - extension: Iterable[base.Extension] = ()): - """AssetAdministrationShellDescriptor - - - Nur das 'id'-Feld (id_) ist zwingend erforderlich. Alle anderen Felder erhalten Defaultwerte. - """ - super().__init__() - self.administration: Optional[base.AdministrativeInformation] = administration - self.asset_kind: Optional[base.AssetKind] = asset_kind - self.asset_type: Optional[base.Identifier] = asset_type - self.endpoints: Optional[List[descriptor.Endpoint]] = endpoints if endpoints is not None else [] # leere Liste, falls nicht gesetzt - self.global_asset_id: Optional[base.Identifier] = global_asset_id - self.id_short: Optional[base.NameType] = id_short - self.id: base.Identifier = id_ - self._specific_asset_id: base.ConstrainedList[base.SpecificAssetId] = base.ConstrainedList( - specific_asset_id, - item_set_hook=self._check_constraint_set_spec_asset_id, - item_del_hook=self._check_constraint_del_spec_asset_id - ) - self.submodel_descriptors = submodel_descriptors if submodel_descriptors is not None else [] - self.description: Optional[base.MultiLanguageTextType] = description - self.display_name: Optional[base.MultiLanguageNameType] = display_name - self.extension = base.NamespaceSet(self, [("name", True)], extension) - - @property - def specific_asset_id(self) -> base.ConstrainedList[base.SpecificAssetId]: - return self._specific_asset_id - - @specific_asset_id.setter - def specific_asset_id(self, specific_asset_id: Iterable[base.SpecificAssetId]) -> None: - # constraints are checked via _check_constraint_set_spec_asset_id() in this case - self._specific_asset_id[:] = specific_asset_id - - def _check_constraint_set_spec_asset_id(self, items_to_replace: List[base.SpecificAssetId], - new_items: List[base.SpecificAssetId], - old_list: List[base.SpecificAssetId]) -> None: - self._validate_aasd_131(self.global_asset_id, - len(old_list) - len(items_to_replace) + len(new_items) > 0) - - def _check_constraint_del_spec_asset_id(self, _item_to_del: base.SpecificAssetId, - old_list: List[base.SpecificAssetId]) -> None: - self._validate_aasd_131(self.global_asset_id, len(old_list) > 1) diff --git a/sdk/basyx/aas/model/descriptor.py b/sdk/basyx/aas/model/descriptor.py deleted file mode 100644 index d9071fc84..000000000 --- a/sdk/basyx/aas/model/descriptor.py +++ /dev/null @@ -1,133 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import - -import abc -from datetime import date, datetime # noqa: F401 - -from typing import List, Dict, Optional,Iterable, Set - -from enum import Enum -import re - -from . import base, _string_constraints - -class Descriptor(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __init__(self, description: Optional[base.MultiLanguageTextType] = None, display_name: Optional[base.MultiLanguageNameType] = None, extension: Iterable[base.Extension] = ()): - - super().__init__() - self.namespace_element_sets: List[NamespaceSet] = [] - self.description: Optional[base.MultiLanguageTextType] = description - self.display_name: Optional[base.MultiLanguageNameType] = display_name - self.extension = base.NamespaceSet(self, [("name", True)], extension) - - @property - def description(self) -> Optional[base.MultiLanguageTextType]: - return self._description - - @description.setter - def description(self, value: Optional[base.MultiLanguageTextType]): - self._description = value - - @property - def display_name(self) -> Optional[base.MultiLanguageNameType]: - return self._display_name - - @display_name.setter - def display_name(self, value: Optional[base.MultiLanguageNameType]): - self._display_name = value - - def commit(self): - pass - def update(self): - pass - def update_from(self, other: "Referable", update_source: bool = False): - pass - -class SecurityTypeEnum(Enum): - NONE = "NONE" - RFC_TLSA = "RFC_TLSA" - W3C_DID = "W3C_DID" - -class SecurityAttributeObject: - def __init__(self, type_: SecurityTypeEnum, key: str, value: str): - - if not isinstance(type_, SecurityTypeEnum): - raise ValueError(f"Invalid security type: {type_}. Must be one of {list(SecurityTypeEnum)}") - if not key or not isinstance(key, str): - raise ValueError("Key must be a non-empty string.") - if not value or not isinstance(value, str): - raise ValueError("Value must be a non-empty string.") - self.type = type_ - self.key = key - self.value = value - - -class ProtocolInformation: - - def __init__( - self, - href: str, - endpoint_protocol: Optional[str] = None, - endpoint_protocol_version: Optional[List[str]] = None, - subprotocol: Optional[str] = None, - subprotocol_body: Optional[str] = None, - subprotocol_body_encoding: Optional[str] = None, - security_attributes: Optional[List[SecurityAttributeObject]] = None - ): - if not href or not isinstance(href, str): - raise ValueError("href must be a non-empty string representing a valid URL.") - - self.href = href - self.endpoint_protocol = endpoint_protocol - self.endpoint_protocol_version = endpoint_protocol_version or [] - self.subprotocol = subprotocol - self.subprotocol_body = subprotocol_body - self.subprotocol_body_encoding = subprotocol_body_encoding - self.security_attributes = security_attributes or [] -class Endpoint: - - INTERFACE_SHORTNAMES = { - "AAS", "SUBMODEL", "SERIALIZE", "AASX-FILE", "AAS-REGISTRY", - "SUBMODEL-REGISTRY", "AAS-REPOSITORY", "SUBMODEL-REPOSITORY", - "CD-REPOSITORY", "AAS-DISCOVERY" - } - VERSION_PATTERN = re.compile(r"^\d+(\.\d+)*$") - - def __init__(self, interface: base.NameType, protocol_information: ProtocolInformation): # noqa: E501 - - self.interface = interface - self.protocol_information = protocol_information - - @property - def interface(self) -> str: - return self._interface - - @interface.setter - def interface(self, interface: base.NameType): - if interface is None: - raise ValueError("Invalid value for `interface`, must not be `None`") - if not self.is_valid_interface(interface): - raise ValueError(f"Invalid interface format: {interface}. Expected format: '-', ") - - self._interface = interface - - @classmethod - def is_valid_interface(cls, interface: base.NameType) -> bool: - parts = interface.split("-", 1) - if len(parts) != 2: - return False - short_name, version = parts - return short_name in cls.INTERFACE_SHORTNAMES and cls.VERSION_PATTERN.match(version) - - @property - def protocol_information(self) -> ProtocolInformation: - return self._protocol_information - - @protocol_information.setter - def protocol_information(self, protocol_information: ProtocolInformation): - if protocol_information is None: - raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 - - self._protocol_information = protocol_information \ No newline at end of file diff --git a/sdk/basyx/aas/model/submodel_descriptor.py b/sdk/basyx/aas/model/submodel_descriptor.py deleted file mode 100644 index 62081d6a6..000000000 --- a/sdk/basyx/aas/model/submodel_descriptor.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding: utf-8 - -from __future__ import absolute_import -from datetime import date, datetime - -from typing import List, Dict, Optional,Iterable, Set - -from .base import AdministrativeInformation -from . import descriptor -from . import base -from .base import Reference -import re - - -class SubmodelDescriptor(descriptor.Descriptor): - - def __init__(self, id_: base.Identifier, endpoints: List[descriptor.Endpoint], - administration: Optional[base.AdministrativeInformation] = None, - id_short: Optional[base.NameType]=None, semantic_id: Optional[base.Reference]=None, - supplemental_semantic_id: Iterable[base.Reference] = ()): - - super().__init__() - self.id: base.Identifier = id_ - self.endpoints: List[descriptor.Endpoint] = endpoints - self.administration: Optional[base.AdministrativeInformation] = administration - self.id_short: Optional[base.NameType] = id_short - self.semantic_id: Optional[base.Reference] = semantic_id - self.supplemental_semantic_id: base.ConstrainedList[base.Reference] = \ - base.ConstrainedList(supplemental_semantic_id) diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index baaf6ff05..70308891b 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -20,7 +20,7 @@ root = ".." # Defines the path to the root of the repository version_file = "basyx/version.py" [project] -name = "basyx-python-sdk" + name = "basyx-python-sdk" dynamic = ["version"] description = "The Eclipse BaSyx Python SDK, an implementation of the Asset Administration Shell for Industry 4.0 systems" authors = [ @@ -38,9 +38,7 @@ requires-python = ">=3.9" dependencies = [ "lxml>=4.2,<5", "python-dateutil>=2.8,<3", - "pyecma376-2>=1.0.1", - "urllib3>=1.26,<3", - "Werkzeug>=3.0.3,<4", + "pyecma376-2>=1.0.1" ] [project.optional-dependencies] diff --git a/server/app/__init__.py b/server/app/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/adapter/__init__.py b/server/app/adapter/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py new file mode 100644 index 000000000..251127339 --- /dev/null +++ b/server/app/adapter/jsonization.py @@ -0,0 +1,544 @@ +from typing import Dict, Set, ContextManager, IO, get_args, Optional, Type, List + +import server.app.server_model as server_model +from basyx.aas import model +from basyx.aas.adapter import _generic +from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, Path, PathOrIO +from basyx.aas.adapter.json import AASToJsonEncoder +from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, _select_decoder + +import json +import logging +import pprint +from typing import Callable + +import contextlib + + +logger = logging.getLogger(__name__) + + +class ServerAASFromJsonDecoder(AASFromJsonDecoder): + @classmethod + def object_hook(cls, dct: Dict[str, object]) -> object: + # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON + # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. + if 'modelType' not in dct: + return dct + + # The following dict specifies a constructor method for all AAS classes that may be identified using the + # ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON + # representation of an object and tries to construct a Python object from it. Embedded objects that have a + # modelType themselves are expected to be converted to the correct PythonType already. Additionally, each + # function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects + # instead of raising an Exception. + AAS_CLASS_PARSERS: Dict[str, Callable[[Dict[str, object]], object]] = { + 'AssetAdministrationShell': cls._construct_asset_administration_shell, + 'AssetInformation': cls._construct_asset_information, + 'SpecificAssetId': cls._construct_specific_asset_id, + 'ConceptDescription': cls._construct_concept_description, + 'Extension': cls._construct_extension, + 'Submodel': cls._construct_submodel, + 'Capability': cls._construct_capability, + 'Entity': cls._construct_entity, + 'BasicEventElement': cls._construct_basic_event_element, + 'Operation': cls._construct_operation, + 'RelationshipElement': cls._construct_relationship_element, + 'AnnotatedRelationshipElement': cls._construct_annotated_relationship_element, + 'SubmodelElementCollection': cls._construct_submodel_element_collection, + 'SubmodelElementList': cls._construct_submodel_element_list, + 'Blob': cls._construct_blob, + 'File': cls._construct_file, + 'MultiLanguageProperty': cls._construct_multi_language_property, + 'Property': cls._construct_property, + 'Range': cls._construct_range, + 'ReferenceElement': cls._construct_reference_element, + 'DataSpecificationIec61360': cls._construct_data_specification_iec61360, + 'AssetAdministrationShellDescriptor': cls._construct_asset_administration_shell_descriptor, + 'SubmodelDescriptor': cls._construct_submodel_descriptor, + 'AssetLink': cls._construct_asset_link, + } + + # Get modelType and constructor function + if not isinstance(dct['modelType'], str): + logger.warning("JSON object has unexpected format of modelType: %s", dct['modelType']) + # Even in strict mode, we consider 'modelType' attributes of wrong type as non-AAS objects instead of + # raising an exception. However, the object's type will probably checked later by read_json_aas_file() or + # _expect_type() + return dct + model_type = dct['modelType'] + if model_type not in AAS_CLASS_PARSERS: + if not cls.failsafe: + raise TypeError("Found JSON object with modelType=\"%s\", which is not a known AAS class" % model_type) + logger.error("Found JSON object with modelType=\"%s\", which is not a known AAS class", model_type) + return dct + + # Use constructor function to transform JSON representation into BaSyx Python SDK model object + try: + return AAS_CLASS_PARSERS[model_type](dct) + except (KeyError, TypeError, model.AASConstraintViolation) as e: + error_message = "Error while trying to convert JSON object into {}: {} >>> {}".format( + model_type, e, pprint.pformat(dct, depth=2, width=2 ** 14, compact=True)) + if cls.failsafe: + logger.error(error_message, exc_info=e) + # In failsafe mode, we return the raw JSON object dict, if there were errors while parsing an object, so + # a client application is able to handle this data. The read_json_aas_file() function and all + # constructors for complex objects will skip those items by using _expect_type(). + return dct + else: + raise (type(e) if isinstance(e, (KeyError, TypeError)) else TypeError)(error_message) from e + + # ################################################################################################## + # Utility Methods used in constructor methods to add general attributes (from abstract base classes) + # ################################################################################################## + + @classmethod + def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None: + super()._amend_abstract_attributes(obj, dct) + + if isinstance(obj, server_model.Descriptor): + if 'description' in dct: + obj.description = cls._construct_lang_string_set(_get_ts(dct, 'description', list), + model.MultiLanguageTextType) + if 'displayName' in dct: + obj.display_name = cls._construct_lang_string_set(_get_ts(dct, 'displayName', list), + model.MultiLanguageNameType) + if 'extensions' in dct: + for extension in _get_ts(dct, 'extensions', list): + obj.extension.add(cls._construct_extension(extension)) + + @classmethod + def _construct_asset_administration_shell_descriptor( + cls, dct: Dict[str, object], + object_class=server_model.AssetAdministrationShellDescriptor) -> server_model.AssetAdministrationShellDescriptor: + ret = object_class(id_=_get_ts(dct, 'id', str)) + cls._amend_abstract_attributes(ret, dct) + if 'administration' in dct: + ret.administration = cls._construct_administrative_information(_get_ts(dct, 'administration', dict)) + if 'assetkind' in dct: + # FIXME + asset_kind = ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] + if 'assetType' in dct: + ret.asset_type = _get_ts(dct, 'assetType', str) + global_asset_id = None + if 'globalAssetId' in dct: + # FIXME + global_asset_id = _get_ts(dct, 'globalAssetId', str) + specific_asset_id = set() + if 'specificAssetIds' in dct: + for desc_data in _get_ts(dct, "specificAssetIds", list): + specific_asset_id.add(cls._construct_specific_asset_id(desc_data, model.SpecificAssetId)) + if 'endpoints' in dct: + for endpoint_dct in _get_ts(dct, 'endpoints', list): + if 'protocolInformation' in endpoint_dct: + ret.endpoints.append( + cls._construct_endpoint(endpoint_dct, + server_model.Endpoint)) + elif 'href' in endpoint_dct: + protocol_info = server_model.ProtocolInformation( + href=_get_ts(endpoint_dct['href'], 'href', str), + endpoint_protocol=_get_ts(endpoint_dct['href'], + 'endpointProtocol', + str) if 'endpointProtocol' in + endpoint_dct[ + 'href'] else None, + endpoint_protocol_version=_get_ts( + endpoint_dct['href'], + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in + endpoint_dct['href'] else None + ) + ret.endpoints.append(server_model.Endpoint( + protocol_information=protocol_info, + interface=_get_ts(endpoint_dct, 'interface', + str))) + if 'idShort' in dct: + ret.id_short = _get_ts(dct, 'idShort', str) + if 'submodelDescriptors' in dct: + ret.submodel_descriptors = cls._construct_submodel_descriptor(_get_ts(dct, 'submodelDescriptors', list), + server_model.SubmodelDescriptor) + return ret + + @classmethod + def _construct_protocol_information(cls, dct: Dict[str, object], + object_class=server_model.ProtocolInformation) -> server_model.ProtocolInformation: + ret = object_class( + href=_get_ts(dct, 'href', str), + endpoint_protocol=_get_ts(dct, 'endpointProtocol', + str) if 'endpointProtocol' in dct else None, + endpoint_protocol_version=_get_ts(dct, + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in dct else None, + subprotocol=_get_ts(dct, 'subprotocol', + str) if 'subprotocol' in dct else None, + subprotocol_body=_get_ts(dct, 'subprotocolBody', + str) if 'subprotocolBody' in dct else None, + subprotocol_body_encoding=_get_ts(dct, + 'subprotocolBodyEncoding', + str) if 'subprotocolBodyEncoding' in dct else None + ) + return ret + + @classmethod + def _construct_endpoint(cls, dct: Dict[str, object], + object_class=server_model.Endpoint) -> server_model.Endpoint: + ret = object_class( + protocol_information=cls._construct_protocol_information( + _get_ts(dct, 'protocolInformation', dict), + server_model.ProtocolInformation + ), + interface=_get_ts(dct, 'interface', + str) + ) + cls._amend_abstract_attributes(ret, dct) + return ret + + @classmethod + def _construct_submodel_descriptor( + cls, dct: Dict[str, object], object_class=server_model.SubmodelDescriptor) -> server_model.SubmodelDescriptor: + ret = object_class(id_=_get_ts(dct, 'id', str), + endpoints=[]) + cls._amend_abstract_attributes(ret, dct) + for endpoint_dct in _get_ts(dct, 'endpoints', list): + if 'protocolInformation' in endpoint_dct: + ret.endpoints.append( + cls._construct_endpoint(endpoint_dct, + server_model.Endpoint)) + elif 'href' in endpoint_dct: + protocol_info = server_model.ProtocolInformation( + href=_get_ts(endpoint_dct['href'], 'href', str), + endpoint_protocol=_get_ts(endpoint_dct['href'], + 'endpointProtocol', + str) if 'endpointProtocol' in + endpoint_dct[ + 'href'] else None, + endpoint_protocol_version=_get_ts( + endpoint_dct['href'], + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in + endpoint_dct['href'] else None + ) + ret.endpoints.append(server_model.Endpoint( + protocol_information=protocol_info, + interface=_get_ts(endpoint_dct, 'interface', + str))) + if 'administration' in dct: + ret.administration = cls._construct_administrative_information( + _get_ts(dct, 'administration', dict)) + if 'idShort' in dct: + ret.id_short = _get_ts(dct, 'idShort', str) + if 'semanticId' in dct: + ret.semantic_id = cls._construct_reference(_get_ts(dct, 'semanticId', dict)) + if 'supplementalSemanticIds' in dct: + for ref in _get_ts(dct, 'supplementalSemanticIds', list): + ret.supplemental_semantic_id.append(cls._construct_reference(ref)) + return ret + + @classmethod + def _construct_asset_link( + cls, dct: Dict[str, object], object_class=server_model.AssetLink) -> server_model.AssetLink: + ret = object_class(name=_get_ts(dct, 'name', str), + value=_get_ts(dct, 'value', str)) + return ret + + +class ServerStrictAASFromJsonDecoder(ServerAASFromJsonDecoder): + """ + A strict version of the AASFromJsonDecoder class for deserializing Asset Administration Shell data from the + official JSON format + + This version has set ``failsafe = False``, which will lead to Exceptions raised for every missing attribute or wrong + object type. + """ + failsafe = False + + +class ServerStrippedAASFromJsonDecoder(ServerAASFromJsonDecoder): + """ + Decoder for stripped JSON objects. Used in the HTTP adapter. + """ + stripped = True + + +class ServerStrictStrippedAASFromJsonDecoder(ServerStrictAASFromJsonDecoder, ServerStrippedAASFromJsonDecoder): + """ + Non-failsafe decoder for stripped JSON objects. + """ + pass + + +def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, + ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, + decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: + """ + Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 + into a given object store. + + :param object_store: The :class:`ObjectStore ` in which the + identifiable objects should be stored + :param file: A filename or file-like object to read the JSON-serialized data from + :param replace_existing: Whether to replace existing objects with the same identifier in the object store or not + :param ignore_existing: Whether to ignore existing objects (e.g. log a message) or raise an error. + This parameter is ignored if replace_existing is ``True``. + :param failsafe: If ``True``, the document is parsed in a failsafe way: Missing attributes and elements are logged + instead of causing exceptions. Defect objects are skipped. + This parameter is ignored if a decoder class is specified. + :param stripped: If ``True``, stripped JSON objects are parsed. + See https://git.rwth-aachen.de/acplt/pyi40aas/-/issues/91 + This parameter is ignored if a decoder class is specified. + :param decoder: The decoder class used to decode the JSON objects + :raises KeyError: **Non-failsafe**: Encountered a duplicate identifier + :raises KeyError: Encountered an identifier that already exists in the given ``object_store`` with both + ``replace_existing`` and ``ignore_existing`` set to ``False`` + :raises (~basyx.aas.model.base.AASConstraintViolation, KeyError, ValueError, TypeError): **Non-failsafe**: + Errors during construction of the objects + :raises TypeError: **Non-failsafe**: Encountered an element in the wrong list + (e.g. an AssetAdministrationShell in ``submodels``) + :return: A set of :class:`Identifiers ` that were added to object_store + """ + ret: Set[model.Identifier] = set() + decoder_ = _select_decoder(failsafe, stripped, decoder) + + # json.load() accepts TextIO and BinaryIO + cm: ContextManager[IO] + if isinstance(file, get_args(Path)): + # 'file' is a path, needs to be opened first + cm = open(file, "r", encoding="utf-8-sig") + else: + # 'file' is not a path, thus it must already be IO + # mypy seems to have issues narrowing the type due to get_args() + cm = contextlib.nullcontext(file) # type: ignore[arg-type] + + # read, parse and convert JSON file + with cm as fp: + data = json.load(fp, cls=decoder_) + + for name, expected_type in (('assetAdministrationShells', model.AssetAdministrationShell), + ('submodels', model.Submodel), + ('conceptDescriptions', model.ConceptDescription), + ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), + ('submodelDescriptors', server_model.SubmodelDescriptor)): + try: + lst = _get_ts(data, name, list) + except (KeyError, TypeError): + continue + + for item in lst: + error_message = "Expected a {} in list '{}', but found {}".format( + expected_type.__name__, name, repr(item)) + if isinstance(item, model.Identifiable): + if not isinstance(item, expected_type): + if decoder_.failsafe: + logger.warning("{} was in wrong list '{}'; nevertheless, we'll use it".format(item, name)) + else: + raise TypeError(error_message) + if item.id in ret: + error_message = f"{item} has a duplicate identifier already parsed in the document!" + if not decoder_.failsafe: + raise KeyError(error_message) + logger.error(error_message + " skipping it...") + continue + existing_element = object_store.get(item.id) + if existing_element is not None: + if not replace_existing: + error_message = f"object with identifier {item.id} already exists " \ + f"in the object store: {existing_element}!" + if not ignore_existing: + raise KeyError(error_message + f" failed to insert {item}!") + logger.info(error_message + f" skipping insertion of {item}...") + continue + object_store.discard(existing_element) + object_store.add(item) + ret.add(item.id) + elif decoder_.failsafe: + logger.error(error_message) + else: + raise TypeError(error_message) + return ret + + +class ServerAASToJsonEncoder(AASToJsonEncoder): + + def default(self, obj: object) -> object: + """ + The overwritten ``default`` method for :class:`json.JSONEncoder` + + :param obj: The object to serialize to json + :return: The serialized object + """ + mapping: Dict[Type, Callable] = { + model.AdministrativeInformation: self._administrative_information_to_json, + model.AnnotatedRelationshipElement: self._annotated_relationship_element_to_json, + model.AssetAdministrationShell: self._asset_administration_shell_to_json, + model.AssetInformation: self._asset_information_to_json, + model.BasicEventElement: self._basic_event_element_to_json, + model.Blob: self._blob_to_json, + model.Capability: self._capability_to_json, + model.ConceptDescription: self._concept_description_to_json, + model.DataSpecificationIEC61360: self._data_specification_iec61360_to_json, + model.Entity: self._entity_to_json, + model.Extension: self._extension_to_json, + model.File: self._file_to_json, + model.Key: self._key_to_json, + model.LangStringSet: self._lang_string_set_to_json, + model.MultiLanguageProperty: self._multi_language_property_to_json, + model.Operation: self._operation_to_json, + model.Property: self._property_to_json, + model.Qualifier: self._qualifier_to_json, + model.Range: self._range_to_json, + model.Reference: self._reference_to_json, + model.ReferenceElement: self._reference_element_to_json, + model.RelationshipElement: self._relationship_element_to_json, + model.Resource: self._resource_to_json, + model.SpecificAssetId: self._specific_asset_id_to_json, + model.Submodel: self._submodel_to_json, + model.SubmodelElementCollection: self._submodel_element_collection_to_json, + model.SubmodelElementList: self._submodel_element_list_to_json, + model.ValueReferencePair: self._value_reference_pair_to_json, + server_model.AssetAdministrationShellDescriptor: self._asset_administration_shell_descriptor_to_json, + server_model.SubmodelDescriptor: self._submodel_descriptor_to_json, + server_model.Endpoint: self._endpoint_to_json, + server_model.ProtocolInformation: self._protocol_information_to_json, + server_model.AssetLink: self._asset_link_to_json + } + for typ in mapping: + if isinstance(obj, typ): + mapping_method = mapping[typ] + return mapping_method(obj) + return super().default(obj) + + @classmethod + def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: + data: Dict[str, object] = super()._abstract_classes_to_json(obj) + if isinstance(obj, server_model.Descriptor): + if obj.description: + data['description'] = obj.description + if obj.display_name: + data['displayName'] = obj.display_name + if obj.extension: + data['extensions'] = list(obj.extension) + + return data + + + @classmethod + def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> Dict[str, object]: + """ + serialization of an object from class AssetAdministrationShell to json + + :param obj: object of class AssetAdministrationShell + :return: dict with the serialized attributes of this object + """ + data = cls._abstract_classes_to_json(obj) + data.update(cls._namespace_to_json(obj)) + data['id'] = obj.id + if obj.administration: + data['administration'] = obj.administration + if obj.asset_kind: + data['assetKind'] = _generic.ASSET_KIND[obj.asset_kind] + if obj.asset_type: + data['assetType'] = obj.asset_type + if obj.global_asset_id: + data['globalAssetId'] = obj.global_asset_id + if obj.specific_asset_id: + data['specificAssetIds'] = list(obj.specific_asset_id) + if obj.endpoints: + data['endpoints'] = list(obj.endpoints) + if obj.id_short: + data['idShort'] = obj.id_short + if obj.submodel_descriptors: + data['submodelDescriptors'] = list(obj.submodel_descriptors) + return data + + @classmethod + def _protocol_information_to_json(cls, + obj: server_model.ProtocolInformation) -> \ + Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + + data['href'] = obj.href + if obj.endpoint_protocol: + data['endpointProtocol'] = obj.endpoint_protocol + if obj.endpoint_protocol_version: + data['endpointProtocolVersion'] = obj.endpoint_protocol_version + if obj.subprotocol: + data['subprotocol'] = obj.subprotocol + if obj.subprotocol_body: + data['subprotocolBody'] = obj.subprotocol_body + if obj.subprotocol_body_encoding: + data['subprotocolBodyEncoding'] = obj.subprotocol_body_encoding + return data + + @classmethod + def _endpoint_to_json(cls, obj: server_model.Endpoint) -> Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + data['protocolInformation'] = cls._protocol_information_to_json( + obj.protocol_information) + data['interface'] = obj.interface + return data + + @classmethod + def _submodel_descriptor_to_json(cls, obj: server_model.SubmodelDescriptor) -> Dict[str, object]: + """ + serialization of an object from class Submodel to json + + :param obj: object of class Submodel + :return: dict with the serialized attributes of this object + """ + data = cls._abstract_classes_to_json(obj) + data['id'] = obj.id + data['endpoints'] = [cls._endpoint_to_json(ep) for ep in + obj.endpoints] + if obj.id_short: + data['idShort'] = obj.id_short + if obj.administration: + data['administration'] = obj.administration + if obj.semantic_id: + data['semanticId'] = obj.semantic_id + if obj.supplemental_semantic_id: + data['supplementalSemanticIds'] = list(obj.supplemental_semantic_id) + return data + + @classmethod + def _asset_link_to_json(cls, obj: server_model.AssetLink) -> Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + data['name'] = obj.name + data['value'] = obj.value + return data + + +def _create_dict(data: model.AbstractObjectStore) -> dict: + # separate different kind of objects + asset_administration_shells: List[model.AssetAdministrationShell] = [] + submodels: List[model.Submodel] = [] + concept_descriptions: List[model.ConceptDescription] = [] + asset_administration_shell_descriptors: List[server_model.AssetAdministrationShellDescriptor] = [] + submodel_descriptors: List[server_model.SubmodelDescriptor] = [] + assets_links: List[server_model.AssetLink] = [] + for obj in data: + if isinstance(obj, model.AssetAdministrationShell): + asset_administration_shells.append(obj) + elif isinstance(obj, model.Submodel): + submodels.append(obj) + elif isinstance(obj, model.ConceptDescription): + concept_descriptions.append(obj) + elif isinstance(obj, server_model.AssetAdministrationShellDescriptor): + asset_administration_shell_descriptors.append(obj) + elif isinstance(obj, server_model.SubmodelDescriptor): + submodel_descriptors.append(obj) + elif isinstance(obj, server_model.AssetLink): + assets_links.append(obj) + dict_: Dict[str, List] = {} + if asset_administration_shells: + dict_['assetAdministrationShells'] = asset_administration_shells + if submodels: + dict_['submodels'] = submodels + if concept_descriptions: + dict_['conceptDescriptions'] = concept_descriptions + if asset_administration_shell_descriptors: + dict_['assetAdministrationShellDescriptors'] = asset_administration_shell_descriptors + if submodel_descriptors: + dict_['submodelDescriptors'] = submodel_descriptors + if assets_links: + dict_['assetLinks'] = assets_links + return dict_ diff --git a/server/app/adapter/xmlization.py b/server/app/adapter/xmlization.py new file mode 100644 index 000000000..e13a5ebed --- /dev/null +++ b/server/app/adapter/xmlization.py @@ -0,0 +1,162 @@ +import enum +from typing import Optional, Type, Callable, Any, List, Dict +from lxml import etree + +from basyx.aas.adapter._generic import PathOrIO +from basyx.aas.adapter.xml import XMLConstructables, AASFromXmlDecoder +from basyx.aas.adapter.xml.xml_deserialization import _parse_xml_document, _failsafe_construct, \ + _child_text_mandatory, NS_AAS, read_aas_xml_element +import server.app.server_model as server_model + + +class ServerAASFromXmlDecoder(AASFromXmlDecoder): + + @classmethod + def construct_asset_administration_shell_descriptor(cls, element: etree._Element, + object_class=server_model.AssetAdministrationShellDescriptor, + **_kwargs: Any) -> server_model.AssetAdministrationShellDescriptor: + id_value = _child_text_mandatory(element, NS_AAS + "id") + id_short = _child_text_mandatory(element, NS_AAS + "idShort") + endpoints_elem = element.find(NS_AAS + "endpoints") + endpoints: List[str] = [] + if endpoints_elem is not None: + endpoints = [child.text.strip() for child in endpoints_elem.findall(NS_AAS + "endpoint") if child.text] + + asset_kind = _child_text_mandatory(element, NS_AAS + "assetKind") + + specific_asset_ids_elem = element.find(NS_AAS + "specificAssetIds") + specific_asset_ids: List[Dict[str, Any]] = [] + if specific_asset_ids_elem is not None: + for sid_elem in specific_asset_ids_elem.findall(NS_AAS + "specificAssetId"): + name = sid_elem.findtext(NS_AAS + "name") + value = sid_elem.findtext(NS_AAS + "value") + if name is not None and value is not None: + specific_asset_ids.append({"name": name.strip(), "value": value.strip()}) + + descriptor = object_class( + id=id_value, + id_short=id_short, + endpoints=endpoints, + asset_kind=asset_kind, + specific_asset_ids=specific_asset_ids + ) + + cls._amend_abstract_attributes(descriptor, element) + return descriptor + + @classmethod + def construct_submodel_descriptor(cls, element: etree._Element, object_class=server_model.SubmodelDescriptor, + **_kwargs: Any) -> server_model.SubmodelDescriptor: + submodel_id = _child_text_mandatory(element, NS_AAS + "id") + id_short = _child_text_mandatory(element, NS_AAS + "idShort") + + endpoints_elem = element.find(NS_AAS + "endpoints") + endpoints: List[str] = [] + if endpoints_elem is not None: + endpoints = [child.text.strip() for child in endpoints_elem.findall(NS_AAS + "endpoint") if child.text] + + # Hier können weitere optionale Felder verarbeitet werden, z.B. semanticId, etc. + + submodel_descriptor = object_class( + id=submodel_id, + id_short=id_short, + endpoints=endpoints + ) + + cls._amend_abstract_attributes(submodel_descriptor, element) + return submodel_descriptor + + +class ServerStrictAASFromXmlDecoder(ServerAASFromXmlDecoder): + """ + Non-failsafe XML decoder. Encountered errors won't be caught and abort parsing. + """ + failsafe = False + + +class ServerStrippedAASFromXmlDecoder(ServerAASFromXmlDecoder): + """ + Decoder for stripped XML elements. Used in the HTTP adapter. + """ + stripped = True + + +class ServerStrictStrippedAASFromXmlDecoder(ServerStrictAASFromXmlDecoder, ServerStrippedAASFromXmlDecoder): + """ + Non-failsafe decoder for stripped XML elements. + """ + pass + + +@enum.unique +class ServerXMLConstructables(enum.Enum): + ASSET_ADMINISTRATION_SHELL_DESCRIPTOR = enum.auto() + SUBMODEL_DESCRIPTOR = enum.auto() + ASSET_LINK = enum.auto() + + +def _select_server_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[ServerAASFromXmlDecoder]]) \ + -> Type[ServerAASFromXmlDecoder]: + """ + Returns the correct decoder based on the parameters failsafe and stripped. If a decoder class is given, failsafe + and stripped are ignored. + + :param failsafe: If true, a failsafe decoder is selected. Ignored if a decoder class is specified. + :param stripped: If true, a decoder for parsing stripped XML elements is selected. Ignored if a decoder class is + specified. + :param decoder: Is returned, if specified. + :return: A AASFromXmlDecoder (sub)class. + """ + if decoder is not None: + return decoder + if failsafe: + if stripped: + return ServerStrippedAASFromXmlDecoder + return ServerAASFromXmlDecoder + else: + if stripped: + return ServerStrictStrippedAASFromXmlDecoder + return ServerStrictAASFromXmlDecoder + + +def read_server_aas_xml_element(file: PathOrIO, construct: XMLConstructables, failsafe: bool = True, + stripped: bool = False, + decoder: Optional[Type[AASFromXmlDecoder]] = None, **constructor_kwargs) -> Optional[ + object]: + """ + Construct a single object from an XML string. The namespaces have to be declared on the object itself, since there + is no surrounding environment element. + + :param file: A filename or file-like object to read the XML-serialized data from + :param construct: A member of the enum :class:`~.XMLConstructables`, specifying which type to construct. + :param failsafe: If true, the document is parsed in a failsafe way: missing attributes and elements are logged + instead of causing exceptions. Defect objects are skipped. + This parameter is ignored if a decoder class is specified. + :param stripped: If true, stripped XML elements are parsed. + See https://git.rwth-aachen.de/acplt/pyi40aas/-/issues/91 + This parameter is ignored if a decoder class is specified. + :param decoder: The decoder class used to decode the XML elements + :param constructor_kwargs: Keyword arguments passed to the constructor function + :raises ~lxml.etree.XMLSyntaxError: **Non-failsafe**: If the given file(-handle) has invalid XML + :raises KeyError: **Non-failsafe**: If a required namespace has not been declared on the XML document + :raises (~basyx.aas.model.base.AASConstraintViolation, KeyError, ValueError): **Non-failsafe**: Errors during + construction of the objects + :return: The constructed object or None, if an error occurred in failsafe mode. + """ + + try: + return read_aas_xml_element(file, construct, failsafe=failsafe, stripped=stripped, decoder=decoder, + **constructor_kwargs) + except ValueError: + decoder_ = _select_server_decoder(failsafe, stripped, decoder) + constructor: Callable[..., object] + + if construct == ServerXMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR: + constructor = decoder_.construct_asset_administration_shell_descriptor + elif construct == ServerXMLConstructables.SUBMODEL_DESCRIPTOR: + constructor = decoder_.construct_submodel_descriptor + else: + raise ValueError(f"{construct.name} cannot be constructed!") + + element = _parse_xml_document(file, failsafe=decoder_.failsafe) + return _failsafe_construct(element, constructor, decoder_.failsafe, **constructor_kwargs) diff --git a/sdk/basyx/aas/adapter/discovery.py b/server/app/discovery.py similarity index 96% rename from sdk/basyx/aas/adapter/discovery.py rename to server/app/discovery.py index e6e68cc67..f5f0a215f 100644 --- a/sdk/basyx/aas/adapter/discovery.py +++ b/server/app/discovery.py @@ -1,7 +1,11 @@ import itertools import werkzeug.exceptions from werkzeug.wrappers import Request, Response + +import server.app.server_model from basyx.aas import model +from server.app import server_model +from server.app.adapter.jsonization import ServerAASToJsonEncoder from .http import APIResponse, http_exception_to_response, get_response_type, HTTPApiDecoder from werkzeug.routing import MapAdapter, Rule, Submount from .http import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request @@ -14,11 +18,10 @@ from pymongo.collection import Collection import json -from basyx.aas.adapter.json import AASToJsonEncoder def specific_asset_to_json_obj(asset_id: model.SpecificAssetId) -> dict: # Encode the asset to a JSON string and then decode to a dict. - json_str = AASToJsonEncoder().encode(asset_id) + json_str = ServerAASToJsonEncoder().encode(asset_id) return json.loads(json_str) class AbstractDiscoveryStore(metaclass=abc.ABCMeta): @@ -50,7 +53,7 @@ def delete_asset_links_by_aas(self, aas_identifier: model.Identifier) -> None: if key in self.aas_to_assets: del self.aas_to_assets[key] - def search_aas_by_asset_link(self, asset_link: model.AssetLink) -> List[str]: + def search_aas_by_asset_link(self, asset_link: server_model.AssetLink) -> List[str]: result = [] for asset_key, aas_ids in self.asset_to_aas.items(): expected_key = f"{asset_link.name}:{asset_link.value}" @@ -104,7 +107,7 @@ def delete_asset_links_by_aas(self, aas_identifier: model.Identifier) -> None: key = aas_identifier self.coll_aas_to_assets.delete_one({"_id": key}) - def search_aas_by_asset_link(self, asset_link: model.AssetLink) -> List[str]: + def search_aas_by_asset_link(self, asset_link: server_model.AssetLink) -> List[str]: # Query MongoDB for specificAssetIds where 'name' and 'value' match doc = self.coll_asset_to_aas.find_one({ "name": asset_link.name, @@ -190,7 +193,7 @@ def handle_request(self, request: Request): return http_exception_to_response(e, response_t) def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: - asset_links = HTTPApiDecoder.request_body_list(request, model.AssetLink, False) + asset_links = HTTPApiDecoder.request_body_list(request, server_model.AssetLink, False) matching_aas_keys = set() for asset_link in asset_links: aas_keys = self.persistent_store.search_aas_by_asset_link(asset_link) diff --git a/sdk/basyx/aas/adapter/http.py b/server/app/http.py similarity index 98% rename from sdk/basyx/aas/adapter/http.py rename to server/app/http.py index 30e26e419..554fb3211 100644 --- a/sdk/basyx/aas/adapter/http.py +++ b/server/app/http.py @@ -53,11 +53,16 @@ from werkzeug.wrappers import Request, Response from werkzeug.datastructures import FileStorage +import server.app.server_model as server_model from basyx.aas import model -from ._generic import XML_NS_MAP -from .xml import XMLConstructables, read_aas_xml_element, xml_serialization, object_to_xml_element -from .json import AASToJsonEncoder, StrictAASFromJsonDecoder, StrictStrippedAASFromJsonDecoder -from . import aasx +from basyx.aas.adapter._generic import XML_NS_MAP + +from basyx.aas.adapter import aasx + +from basyx.aas.adapter.xml import xml_serialization, XMLConstructables + +from server.app.adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element +from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple @@ -92,7 +97,7 @@ def __init__(self, success: bool, messages: Optional[List[Message]] = None): self.messages: List[Message] = messages -class ResultToJsonEncoder(AASToJsonEncoder): +class ResultToJsonEncoder(ServerAASToJsonEncoder): @classmethod def _result_to_json(cls, result: Result) -> Dict[str, object]: return { @@ -174,10 +179,10 @@ def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> root_elem.append(child) elif isinstance(obj, list): for item in obj: - item_elem = object_to_xml_element(item) + item_elem = xml_serialization.object_to_xml_element(item) root_elem.append(item_elem) else: - obj_elem = object_to_xml_element(obj) + obj_elem = xml_serialization.object_to_xml_element(obj) for child in obj_elem: root_elem.append(child) etree.cleanup_namespaces(root_elem) @@ -290,9 +295,9 @@ class HTTPApiDecoder: model.Submodel: XMLConstructables.SUBMODEL, model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, model.Reference: XMLConstructables.REFERENCE, - model.AssetAdministrationShellDescriptor: XMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR, - model.SubmodelDescriptor: XMLConstructables.SUBMODEL_DESCRIPTOR, - model.AssetLink: XMLConstructables.ASSET_LINK, + server_model.AssetAdministrationShellDescriptor: ServerXMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR, + server_model.SubmodelDescriptor: ServerXMLConstructables.SUBMODEL_DESCRIPTOR, + server_model.AssetLink: ServerXMLConstructables.ASSET_LINK, } @classmethod @@ -309,8 +314,8 @@ def assert_type(cls, obj: object, type_: Type[T]) -> T: @classmethod def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: cls.check_type_supportance(expect_type) - decoder: Type[StrictAASFromJsonDecoder] = StrictStrippedAASFromJsonDecoder if stripped \ - else StrictAASFromJsonDecoder + decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ + else ServerStrictAASFromJsonDecoder try: parsed = json.loads(data, cls=decoder) if not isinstance(parsed, list): @@ -340,7 +345,7 @@ def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool constructor = decoder._construct_asset_administration_shell_descriptor elif expect_type is model.SubmodelDescriptor: constructor = decoder._construct_submodel_descriptor - elif expect_type is model.AssetLink: + elif expect_type is server_model.AssetLink: constructor = decoder._construct_asset_link if constructor is not None: @@ -371,8 +376,8 @@ def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: cls.check_type_supportance(expect_type) try: xml_data = io.BytesIO(data) - rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], - stripped=stripped, failsafe=False) + rv = read_server_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], + stripped=stripped, failsafe=False) except (KeyError, ValueError) as e: # xml deserialization creates an error chain. since we only return one error, return the root cause f: BaseException = e diff --git a/server/app/main.py b/server/app/main.py index c502bfbe0..816bf621a 100644 --- a/server/app/main.py +++ b/server/app/main.py @@ -6,7 +6,7 @@ from basyx.aas.adapter import aasx from basyx.aas.backend.local_file import LocalFileObjectStore -from basyx.aas.adapter.http import WSGIApp +from server.app.http import WSGIApp storage_path = os.getenv("STORAGE_PATH", "/storage") storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") diff --git a/server/app/py.typed b/server/app/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/sdk/basyx/aas/adapter/registry.py b/server/app/registry.py similarity index 90% rename from sdk/basyx/aas/adapter/registry.py rename to server/app/registry.py index c3871c612..58e578c90 100644 --- a/sdk/basyx/aas/adapter/registry.py +++ b/server/app/registry.py @@ -8,34 +8,23 @@ This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". """ -import abc -import base64 -import binascii -import datetime -import enum -import io -import json import itertools -from lxml import etree import werkzeug.exceptions import werkzeug.routing import werkzeug.urls import werkzeug.utils -from werkzeug.exceptions import BadRequest, Conflict, NotFound, UnprocessableEntity +from werkzeug.exceptions import BadRequest, Conflict, NotFound from werkzeug.routing import MapAdapter, Rule, Submount from werkzeug.wrappers import Request, Response -from werkzeug.datastructures import FileStorage from basyx.aas import model -from ._generic import XML_NS_MAP -from .xml import XMLConstructables, read_aas_xml_element, xml_serialization, object_to_xml_element -from .json import AASToJsonEncoder, StrictAASFromJsonDecoder, StrictStrippedAASFromJsonDecoder -from . import aasx +import server.app.server_model as server_model + from .http import APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder from .http import Base64URLConverter -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple +from typing import Dict, Iterable, Iterator, List, Type, TypeVar, Tuple def get_response_type(request: Request) -> Type[APIResponse]: response_types: Dict[str, Type[APIResponse]] = { @@ -134,12 +123,12 @@ def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T paginated_slice = itertools.islice(iterator, start_index, end_index) return paginated_slice, end_index - def _get_descriptors(self, request: "Request") -> Tuple[Iterator[model.AssetAdministrationShellDescriptor], int]: + def _get_descriptors(self, request: "Request") -> Tuple[Iterator[server_model.AssetAdministrationShellDescriptor], int]: """ Returns all Asset Administration Shell Descriptors """ - descriptors: Iterator[model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( - model.AssetAdministrationShellDescriptor + descriptors: Iterator[server_model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( + server_model.AssetAdministrationShellDescriptor ) id_short = request.args.get("idShort") @@ -162,14 +151,14 @@ def _get_descriptors(self, request: "Request") -> Tuple[Iterator[model.AssetAdmi paginated_descriptors, end_index = self._get_slice(request, descriptors) return paginated_descriptors, end_index - def _get_descriptor(self, url_args: Dict) -> model.AssetAdministrationShellDescriptor: - return self._get_obj_ts(url_args["aas_id"], model.AssetAdministrationShellDescriptor) + def _get_descriptor(self, url_args: Dict) -> server_model.AssetAdministrationShellDescriptor: + return self._get_obj_ts(url_args["aas_id"], server_model.AssetAdministrationShellDescriptor) - def _get_submodel_descriptors(self, request: Request) -> Tuple[Iterator[model.SubmodelDescriptor], int]: - submodel_descriptors: Iterator[model.Submodel] = self._get_all_obj_of_type(model.SubmodelDescriptor) + def _get_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: + submodel_descriptors: Iterator[model.Submodel] = self._get_all_obj_of_type(server_model.SubmodelDescriptor) id_short = request.args.get("idShort") if id_short is not None: - submodel_descriptors= filter(lambda sm: sm.id_short == id_short, submodels) + submodel_descriptors = filter(lambda sm: sm.id_short == id_short, submodel_descriptors) semantic_id = request.args.get("semanticId") if semantic_id is not None: spec_semantic_id = HTTPApiDecoder.base64urljson( @@ -178,8 +167,8 @@ def _get_submodel_descriptors(self, request: Request) -> Tuple[Iterator[model.Su paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) return paginated_submodel_descriptors, end_index - def _get_submodel_descriptor(self, url_args: Dict) -> model.SubmodelDescriptor: - return self._get_obj_ts(url_args["submodel_id"], model.SubmodelDescriptor) + def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescriptor: + return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) def handle_request(self, request: Request): map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) @@ -199,12 +188,12 @@ def handle_request(self, request: Request): # ------ AAS REGISTRY ROUTES ------- def get_aas_descriptors_all(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: - aasdescriptors, cursor = self._get_descriptors(request) - return response_t(list(aasdescriptors), cursor=cursor) + aas_descriptors, cursor = self._get_descriptors(request) + return response_t(list(aas_descriptors), cursor=cursor) def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], map_adapter: MapAdapter) -> Response: - descriptor = HTTPApiDecoder.request_body(request, model.AssetAdministrationShellDescriptor, False) + descriptor = HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, False) try: self.object_store.add(descriptor) except KeyError as e: @@ -221,7 +210,7 @@ def get_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[ def put_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: descriptor = self._get_descriptor(url_args) - descriptor.update_from(HTTPApiDecoder.request_body(request, model.AssetAdministrationShellDescriptor, + descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, is_stripped_request(request))) descriptor.commit() return response_t() @@ -267,7 +256,7 @@ def post_submodel_descriptor_through_superpath(self, map_adapter: MapAdapter) -> Response: aas_descriptor = self._get_descriptor(url_args) submodel_descriptor = HTTPApiDecoder.request_body(request, - model.SubmodelDescriptor, + server_model.SubmodelDescriptor, is_stripped_request( request)) if any(sd.id == submodel_descriptor.id for sd in @@ -301,7 +290,7 @@ def put_submodel_descriptor_by_id_through_superpath(self, f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") submodel_descriptor.update_from( HTTPApiDecoder.request_body(request, - model.SubmodelDescriptor, + server_model.SubmodelDescriptor, is_stripped_request(request))) aas_descriptor.commit() return response_t() @@ -336,7 +325,7 @@ def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, respon def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], map_adapter: MapAdapter) -> Response: - submodel_descriptor = HTTPApiDecoder.request_body(request, model.SubmodelDescriptor, is_stripped_request(request)) + submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request)) try: self.object_store.add(submodel_descriptor) except KeyError as e: @@ -350,12 +339,12 @@ def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: submodel_descriptor = self._get_submodel_descriptor(url_args) - submodel_descriptor.update_from(HTTPApiDecoder.request_body(request, model.SubmodelDescriptor, is_stripped_request(request))) + submodel_descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request))) submodel_descriptor.commit() return response_t() def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: - self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], model.SubmodelDescriptor)) + self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor)) return response_t() diff --git a/server/app/server_model.py b/server/app/server_model.py new file mode 100644 index 000000000..bf6d50bee --- /dev/null +++ b/server/app/server_model.py @@ -0,0 +1,220 @@ +from __future__ import absolute_import + +import abc +import re +from enum import Enum + +from typing import Optional, List, Iterable + +import server.app +from basyx.aas.model import base, NamespaceSet + + +class AssetLink: + def __init__(self, name: base.LabelType, value: base.Identifier): + if not name: + raise ValueError("AssetLink 'name' must be a non-empty string.") + if not value: + raise ValueError("AssetLink 'value' must be a non-empty string.") + self.name = name + self.value = value + + +class SecurityTypeEnum(Enum): + NONE = "NONE" + RFC_TLSA = "RFC_TLSA" + W3C_DID = "W3C_DID" + + +class SecurityAttributeObject: + def __init__(self, type_: SecurityTypeEnum, key: str, value: str): + + if not isinstance(type_, SecurityTypeEnum): + raise ValueError(f"Invalid security type: {type_}. Must be one of {list(SecurityTypeEnum)}") + if not key or not isinstance(key, str): + raise ValueError("Key must be a non-empty string.") + if not value or not isinstance(value, str): + raise ValueError("Value must be a non-empty string.") + self.type = type_ + self.key = key + self.value = value + + +class ProtocolInformation: + + def __init__( + self, + href: str, + endpoint_protocol: Optional[str] = None, + endpoint_protocol_version: Optional[List[str]] = None, + subprotocol: Optional[str] = None, + subprotocol_body: Optional[str] = None, + subprotocol_body_encoding: Optional[str] = None, + security_attributes: Optional[List[SecurityAttributeObject]] = None + ): + if not href or not isinstance(href, str): + raise ValueError("href must be a non-empty string representing a valid URL.") + + self.href = href + self.endpoint_protocol = endpoint_protocol + self.endpoint_protocol_version = endpoint_protocol_version or [] + self.subprotocol = subprotocol + self.subprotocol_body = subprotocol_body + self.subprotocol_body_encoding = subprotocol_body_encoding + self.security_attributes = security_attributes or [] + + +class Endpoint: + INTERFACE_SHORTNAMES = { + "AAS", "SUBMODEL", "SERIALIZE", "AASX-FILE", "AAS-REGISTRY", + "SUBMODEL-REGISTRY", "AAS-REPOSITORY", "SUBMODEL-REPOSITORY", + "CD-REPOSITORY", "AAS-DISCOVERY" + } + VERSION_PATTERN = re.compile(r"^\d+(\.\d+)*$") + + def __init__(self, interface: base.NameType, protocol_information: ProtocolInformation): # noqa: E501 + + self.interface = interface + self.protocol_information = protocol_information + + @property + def interface(self) -> str: + return self._interface + + @interface.setter + def interface(self, interface: base.NameType): + if interface is None: + raise ValueError("Invalid value for `interface`, must not be `None`") + if not self.is_valid_interface(interface): + raise ValueError(f"Invalid interface format: {interface}. Expected format: '-', ") + + self._interface = interface + + @classmethod + def is_valid_interface(cls, interface: base.NameType) -> bool: + parts = interface.split("-", 1) + if len(parts) != 2: + return False + short_name, version = parts + return short_name in cls.INTERFACE_SHORTNAMES and cls.VERSION_PATTERN.match(version) + + @property + def protocol_information(self) -> ProtocolInformation: + return self._protocol_information + + @protocol_information.setter + def protocol_information(self, protocol_information: ProtocolInformation): + if protocol_information is None: + raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 + + self._protocol_information = protocol_information + + +class Descriptor(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __init__(self, description: Optional[base.MultiLanguageTextType] = None, + display_name: Optional[base.MultiLanguageNameType] = None, extension: Iterable[base.Extension] = ()): + super().__init__() + self.namespace_element_sets: List[NamespaceSet] = [] + self.description: Optional[base.MultiLanguageTextType] = description + self.display_name: Optional[base.MultiLanguageNameType] = display_name + self.extension = base.NamespaceSet(self, [("name", True)], extension) + + @property + def description(self) -> Optional[base.MultiLanguageTextType]: + return self._description + + @description.setter + def description(self, value: Optional[base.MultiLanguageTextType]): + self._description = value + + @property + def display_name(self) -> Optional[base.MultiLanguageNameType]: + return self._display_name + + @display_name.setter + def display_name(self, value: Optional[base.MultiLanguageNameType]): + self._display_name = value + + def commit(self): + pass + + def update(self): + pass + + def update_from(self, other: "Referable", update_source: bool = False): + pass + + +class SubmodelDescriptor(Descriptor): + + def __init__(self, id_: base.Identifier, endpoints: List[Endpoint], + administration: Optional[base.AdministrativeInformation] = None, + id_short: Optional[base.NameType] = None, semantic_id: Optional[base.Reference] = None, + supplemental_semantic_id: Iterable[base.Reference] = ()): + super().__init__() + self.id: base.Identifier = id_ + self.endpoints: List[Endpoint] = endpoints + self.administration: Optional[base.AdministrativeInformation] = administration + self.id_short: Optional[base.NameType] = id_short + self.semantic_id: Optional[base.Reference] = semantic_id + self.supplemental_semantic_id: base.ConstrainedList[base.Reference] = \ + base.ConstrainedList(supplemental_semantic_id) + + +class AssetAdministrationShellDescriptor(Descriptor): + + def __init__(self, + id_: base.Identifier, + administration: Optional[base.AdministrativeInformation] = None, + asset_kind: Optional[base.AssetKind] = None, + asset_type: Optional[base.Identifier] = None, + endpoints: Optional[List[Endpoint]] = None, + global_asset_id: Optional[base.Identifier] = None, + id_short: Optional[base.NameType] = None, + specific_asset_id: Iterable[base.SpecificAssetId] = (), + submodel_descriptors: Optional[List[SubmodelDescriptor]] = None, + description: Optional[base.MultiLanguageTextType] = None, + display_name: Optional[base.MultiLanguageNameType] = None, + extension: Iterable[base.Extension] = ()): + """AssetAdministrationShellDescriptor - + + Nur das 'id'-Feld (id_) ist zwingend erforderlich. Alle anderen Felder erhalten Defaultwerte. + """ + super().__init__() + self.administration: Optional[base.AdministrativeInformation] = administration + self.asset_kind: Optional[base.AssetKind] = asset_kind + self.asset_type: Optional[base.Identifier] = asset_type + self.endpoints: Optional[ + List[Endpoint]] = endpoints if endpoints is not None else [] # leere Liste, falls nicht gesetzt + self.global_asset_id: Optional[base.Identifier] = global_asset_id + self.id_short: Optional[base.NameType] = id_short + self.id: base.Identifier = id_ + self._specific_asset_id: base.ConstrainedList[base.SpecificAssetId] = base.ConstrainedList( + specific_asset_id, + item_set_hook=self._check_constraint_set_spec_asset_id, + item_del_hook=self._check_constraint_del_spec_asset_id + ) + self.submodel_descriptors = submodel_descriptors if submodel_descriptors is not None else [] + self.description: Optional[base.MultiLanguageTextType] = description + self.display_name: Optional[base.MultiLanguageNameType] = display_name + self.extension = base.NamespaceSet(self, [("name", True)], extension) + + @property + def specific_asset_id(self) -> base.ConstrainedList[base.SpecificAssetId]: + return self._specific_asset_id + + @specific_asset_id.setter + def specific_asset_id(self, specific_asset_id: Iterable[base.SpecificAssetId]) -> None: + # constraints are checked via _check_constraint_set_spec_asset_id() in this case + self._specific_asset_id[:] = specific_asset_id + + def _check_constraint_set_spec_asset_id(self, items_to_replace: List[base.SpecificAssetId], + new_items: List[base.SpecificAssetId], + old_list: List[base.SpecificAssetId]) -> None: + self._validate_aasd_131(self.global_asset_id, + len(old_list) - len(items_to_replace) + len(new_items) > 0) + + def _check_constraint_del_spec_asset_id(self, _item_to_del: base.SpecificAssetId, + old_list: List[base.SpecificAssetId]) -> None: + self._validate_aasd_131(self.global_asset_id, len(old_list) > 1) diff --git a/server/pyproject.toml b/server/pyproject.toml new file mode 100644 index 000000000..7a6af3106 --- /dev/null +++ b/server/pyproject.toml @@ -0,0 +1,63 @@ +[build-system] +requires = [ + "setuptools>=45", + "wheel", + "setuptools_scm[toml]>=6.2" +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +# Configure setuptools_scm for version management: +# - Automatically infers the version number from the most recent git tag +# - Generates a version.py file in the package directory +# - Allows for automatic versioning between releases (e.g., 1.0.1.dev4+g12345) +# If you want to use the version anywhere in the code, use +# ``` +# from basyx.version import version +# print(f"Project version: {version}") +# ``` +root = ".." # Defines the path to the root of the repository +version_file = "app/version.py" + +[project] +name = "basyx-python-server" +dynamic = ["version"] +description = "The Eclipse BaSyx Python Server, an implementation of the BaSyx AAS Server" #FIXME +authors = [ + { name = "The Eclipse BaSyx Authors", email = "admins@iat.rwth-aachen.de" } +] +readme = "README.md" +license = { file = "LICENSE" } +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Development Status :: 5 - Production/Stable" +] +requires-python = ">=3.9" +dependencies = [ + "basyx-python-sdk", #TODO: Think about the version + "urllib3>=1.26,<3", + "Werkzeug>=3.0.3,<4", +] + +[project.optional-dependencies] +dev = [ + "mypy", + "pycodestyle", + "codeblocks", + "coverage", + "schemathesis~=3.7", + "jsonschema~=4.7", + "hypothesis~=6.13", + "types-python-dateutil", +] + +[project.urls] +"Homepage" = "https://github.com/eclipse-basyx/basyx-python-sdk" + +[tool.setuptools] +packages = { find = { include = ["basyx*"], exclude = ["test*"] } } + +[tool.setuptools.package-data] +app = ["py.typed"] From a6577beb642f449f2af7fbddf5b3b82b36f64cb0 Mon Sep 17 00:00:00 2001 From: zrgt Date: Tue, 15 Apr 2025 17:46:49 +0200 Subject: [PATCH 08/52] Refactor `_get_aas_class_parsers` Create a static method for the `aas_class_parsers` so that we can overload this method in `ServerAASFromJsonDecoder` and avoid code duplication by copy/paste `object_hook()` --- .../aas/adapter/json/json_deserialization.py | 35 +++++---- server/app/adapter/jsonization.py | 72 ++----------------- 2 files changed, 30 insertions(+), 77 deletions(-) diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index c1ce35fef..c1765eb95 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -154,19 +154,20 @@ def __init__(self, *args, **kwargs): json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs) @classmethod - def object_hook(cls, dct: Dict[str, object]) -> object: - # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON - # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. - if 'modelType' not in dct: - return dct + def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], object]]: + """ + Returns the dictionary of AAS class parsers. - # The following dict specifies a constructor method for all AAS classes that may be identified using the - # ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON - # representation of an object and tries to construct a Python object from it. Embedded objects that have a - # modelType themselves are expected to be converted to the correct PythonType already. Additionally, each - # function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects - # instead of raising an Exception. - AAS_CLASS_PARSERS: Dict[str, Callable[[Dict[str, object]], object]] = { + The following dict specifies a constructor method for all AAS classes that may be identified using the + ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON + representation of an object and tries to construct a Python object from it. Embedded objects that have a + modelType themselves are expected to be converted to the correct PythonType already. Additionally, each + function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects + instead of raising an Exception. + + :return: The dictionary of AAS class parsers + """ + aas_class_parsers: Dict[str, Callable[[Dict[str, object]], object]] = { 'AssetAdministrationShell': cls._construct_asset_administration_shell, 'AssetInformation': cls._construct_asset_information, 'SpecificAssetId': cls._construct_specific_asset_id, @@ -189,6 +190,16 @@ def object_hook(cls, dct: Dict[str, object]) -> object: 'ReferenceElement': cls._construct_reference_element, 'DataSpecificationIec61360': cls._construct_data_specification_iec61360, } + return aas_class_parsers + + @classmethod + def object_hook(cls, dct: Dict[str, object]) -> object: + # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON + # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. + if 'modelType' not in dct: + return dct + + AAS_CLASS_PARSERS = cls._get_aas_class_parsers() # Get modelType and constructor function if not isinstance(dct['modelType'], str): diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index 251127339..b0dcdfa04 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -9,7 +9,6 @@ import json import logging -import pprint from typing import Callable import contextlib @@ -20,73 +19,16 @@ class ServerAASFromJsonDecoder(AASFromJsonDecoder): @classmethod - def object_hook(cls, dct: Dict[str, object]) -> object: - # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON - # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. - if 'modelType' not in dct: - return dct - - # The following dict specifies a constructor method for all AAS classes that may be identified using the - # ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON - # representation of an object and tries to construct a Python object from it. Embedded objects that have a - # modelType themselves are expected to be converted to the correct PythonType already. Additionally, each - # function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects - # instead of raising an Exception. - AAS_CLASS_PARSERS: Dict[str, Callable[[Dict[str, object]], object]] = { - 'AssetAdministrationShell': cls._construct_asset_administration_shell, - 'AssetInformation': cls._construct_asset_information, - 'SpecificAssetId': cls._construct_specific_asset_id, - 'ConceptDescription': cls._construct_concept_description, - 'Extension': cls._construct_extension, - 'Submodel': cls._construct_submodel, - 'Capability': cls._construct_capability, - 'Entity': cls._construct_entity, - 'BasicEventElement': cls._construct_basic_event_element, - 'Operation': cls._construct_operation, - 'RelationshipElement': cls._construct_relationship_element, - 'AnnotatedRelationshipElement': cls._construct_annotated_relationship_element, - 'SubmodelElementCollection': cls._construct_submodel_element_collection, - 'SubmodelElementList': cls._construct_submodel_element_list, - 'Blob': cls._construct_blob, - 'File': cls._construct_file, - 'MultiLanguageProperty': cls._construct_multi_language_property, - 'Property': cls._construct_property, - 'Range': cls._construct_range, - 'ReferenceElement': cls._construct_reference_element, - 'DataSpecificationIec61360': cls._construct_data_specification_iec61360, + def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], object]]: + aas_class_parsers = super()._get_aas_class_parsers() + aas_class_parsers.update({ 'AssetAdministrationShellDescriptor': cls._construct_asset_administration_shell_descriptor, 'SubmodelDescriptor': cls._construct_submodel_descriptor, 'AssetLink': cls._construct_asset_link, - } - - # Get modelType and constructor function - if not isinstance(dct['modelType'], str): - logger.warning("JSON object has unexpected format of modelType: %s", dct['modelType']) - # Even in strict mode, we consider 'modelType' attributes of wrong type as non-AAS objects instead of - # raising an exception. However, the object's type will probably checked later by read_json_aas_file() or - # _expect_type() - return dct - model_type = dct['modelType'] - if model_type not in AAS_CLASS_PARSERS: - if not cls.failsafe: - raise TypeError("Found JSON object with modelType=\"%s\", which is not a known AAS class" % model_type) - logger.error("Found JSON object with modelType=\"%s\", which is not a known AAS class", model_type) - return dct - - # Use constructor function to transform JSON representation into BaSyx Python SDK model object - try: - return AAS_CLASS_PARSERS[model_type](dct) - except (KeyError, TypeError, model.AASConstraintViolation) as e: - error_message = "Error while trying to convert JSON object into {}: {} >>> {}".format( - model_type, e, pprint.pformat(dct, depth=2, width=2 ** 14, compact=True)) - if cls.failsafe: - logger.error(error_message, exc_info=e) - # In failsafe mode, we return the raw JSON object dict, if there were errors while parsing an object, so - # a client application is able to handle this data. The read_json_aas_file() function and all - # constructors for complex objects will skip those items by using _expect_type(). - return dct - else: - raise (type(e) if isinstance(e, (KeyError, TypeError)) else TypeError)(error_message) from e + 'ProtocolInformation': cls._construct_protocol_information, + 'Endpoint': cls._construct_endpoint + }) + return aas_class_parsers # ################################################################################################## # Utility Methods used in constructor methods to add general attributes (from abstract base classes) From 11c59bc0287086bfadcdc076e0c621b730a42f4e Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 15 Apr 2025 20:53:01 +0200 Subject: [PATCH 09/52] fix aas_descriptor construct method --- server/app/adapter/jsonization.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index b0dcdfa04..ebfe06895 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -58,14 +58,12 @@ def _construct_asset_administration_shell_descriptor( if 'administration' in dct: ret.administration = cls._construct_administrative_information(_get_ts(dct, 'administration', dict)) if 'assetkind' in dct: - # FIXME - asset_kind = ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] + ret.asset_kind = ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] if 'assetType' in dct: ret.asset_type = _get_ts(dct, 'assetType', str) global_asset_id = None if 'globalAssetId' in dct: - # FIXME - global_asset_id = _get_ts(dct, 'globalAssetId', str) + ret.global_asset_id = _get_ts(dct, 'globalAssetId', str) specific_asset_id = set() if 'specificAssetIds' in dct: for desc_data in _get_ts(dct, "specificAssetIds", list): From 6d4aab1aa8b3478641a57a02b9caf2b8375493d6 Mon Sep 17 00:00:00 2001 From: zrgt Date: Tue, 15 Apr 2025 20:59:54 +0200 Subject: [PATCH 10/52] Refactor `read_aas_json_file_into` Create a KEYS_TO_TYPE tuple with top-level JSON keys and the corresponding SDK types. By providing this tuple as a param for `read_aas_json_file_into` we can reuse the method in `read_server_aas_json_file_into` and avoid code duplication --- .../aas/adapter/json/json_deserialization.py | 70 ++++++------ server/app/adapter/jsonization.py | 105 +++--------------- 2 files changed, 49 insertions(+), 126 deletions(-) diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index c1765eb95..83616f5b2 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -34,7 +34,7 @@ import json import logging import pprint -from typing import Dict, Callable, ContextManager, TypeVar, Type, List, IO, Optional, Set, get_args +from typing import Dict, Callable, ContextManager, TypeVar, Type, List, IO, Optional, Set, get_args, Tuple, Iterable from basyx.aas import model from .._generic import MODELLING_KIND_INVERSE, ASSET_KIND_INVERSE, KEY_TYPES_INVERSE, ENTITY_TYPES_INVERSE, \ @@ -808,9 +808,17 @@ def _select_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[AASFr return StrictAASFromJsonDecoder +KEYS_TO_TYPES = ( + ('assetAdministrationShells', model.AssetAdministrationShell), + ('submodels', model.Submodel), + ('conceptDescriptions', model.ConceptDescription) +) + + def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, - decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: + decoder: Optional[Type[AASFromJsonDecoder]] = None, + keys_to_types: Iterable[Tuple[str, any]] = KEYS_TO_TYPES) -> Set[model.Identifier]: """ Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 into a given object store. @@ -828,6 +836,7 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO See https://git.rwth-aachen.de/acplt/pyi40aas/-/issues/91 This parameter is ignored if a decoder class is specified. :param decoder: The decoder class used to decode the JSON objects + :param keys_to_types: A dictionary of JSON keys to expected types. This is used to check the type of the objects :raises KeyError: **Non-failsafe**: Encountered a duplicate identifier :raises KeyError: Encountered an identifier that already exists in the given ``object_store`` with both ``replace_existing`` and ``ignore_existing`` set to ``False`` @@ -854,45 +863,38 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO with cm as fp: data = json.load(fp, cls=decoder_) - for name, expected_type in (('assetAdministrationShells', model.AssetAdministrationShell), - ('submodels', model.Submodel), - ('conceptDescriptions', model.ConceptDescription)): + for name, expected_type in keys_to_types: try: lst = _get_ts(data, name, list) except (KeyError, TypeError): continue for item in lst: - error_message = "Expected a {} in list '{}', but found {}".format( - expected_type.__name__, name, repr(item)) - if isinstance(item, model.Identifiable): - if not isinstance(item, expected_type): - if decoder_.failsafe: - logger.warning("{} was in wrong list '{}'; nevertheless, we'll use it".format(item, name)) - else: - raise TypeError(error_message) - if item.id in ret: - error_message = f"{item} has a duplicate identifier already parsed in the document!" - if not decoder_.failsafe: - raise KeyError(error_message) - logger.error(error_message + " skipping it...") + if not isinstance(item, expected_type): + if not decoder_.failsafe: + raise TypeError(f"{item} was in the wrong list '{name}'") + logger.warning(f"{item} was in the wrong list '{name}'; nevertheless, we'll use it") + + if item.id in ret: + error_msg = f"{item} has a duplicate identifier already parsed in the document!" + if not decoder_.failsafe: + raise KeyError(error_msg) + logger.error(f"{error_msg} skipping it...") + continue + + existing_element = object_store.get(item.id) + if existing_element is not None: + if not replace_existing: + error_msg = f"Object with id '{item.id}' already exists in store: {existing_element}!" + if not ignore_existing: + raise KeyError(f"{error_msg} Failed to insert {item}!") + logger.info(f"{error_msg}; Skipping {item}...") continue - existing_element = object_store.get(item.id) - if existing_element is not None: - if not replace_existing: - error_message = f"object with identifier {item.id} already exists " \ - f"in the object store: {existing_element}!" - if not ignore_existing: - raise KeyError(error_message + f" failed to insert {item}!") - logger.info(error_message + f" skipping insertion of {item}...") - continue - object_store.discard(existing_element) - object_store.add(item) - ret.add(item.id) - elif decoder_.failsafe: - logger.error(error_message) - else: - raise TypeError(error_message) + object_store.discard(existing_element) + + object_store.add(item) + ret.add(item.id) + return ret diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index b0dcdfa04..6fdea97d9 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -1,18 +1,16 @@ -from typing import Dict, Set, ContextManager, IO, get_args, Optional, Type, List +from typing import Dict, Set, Optional, Type, List import server.app.server_model as server_model from basyx.aas import model from basyx.aas.adapter import _generic -from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, Path, PathOrIO +from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, PathOrIO from basyx.aas.adapter.json import AASToJsonEncoder -from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, _select_decoder +from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, KEYS_TO_TYPES, \ + read_aas_json_file_into -import json import logging from typing import Callable -import contextlib - logger = logging.getLogger(__name__) @@ -209,94 +207,17 @@ class ServerStrictStrippedAASFromJsonDecoder(ServerStrictAASFromJsonDecoder, Ser pass -def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, +def read_server_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: - """ - Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 - into a given object store. - - :param object_store: The :class:`ObjectStore ` in which the - identifiable objects should be stored - :param file: A filename or file-like object to read the JSON-serialized data from - :param replace_existing: Whether to replace existing objects with the same identifier in the object store or not - :param ignore_existing: Whether to ignore existing objects (e.g. log a message) or raise an error. - This parameter is ignored if replace_existing is ``True``. - :param failsafe: If ``True``, the document is parsed in a failsafe way: Missing attributes and elements are logged - instead of causing exceptions. Defect objects are skipped. - This parameter is ignored if a decoder class is specified. - :param stripped: If ``True``, stripped JSON objects are parsed. - See https://git.rwth-aachen.de/acplt/pyi40aas/-/issues/91 - This parameter is ignored if a decoder class is specified. - :param decoder: The decoder class used to decode the JSON objects - :raises KeyError: **Non-failsafe**: Encountered a duplicate identifier - :raises KeyError: Encountered an identifier that already exists in the given ``object_store`` with both - ``replace_existing`` and ``ignore_existing`` set to ``False`` - :raises (~basyx.aas.model.base.AASConstraintViolation, KeyError, ValueError, TypeError): **Non-failsafe**: - Errors during construction of the objects - :raises TypeError: **Non-failsafe**: Encountered an element in the wrong list - (e.g. an AssetAdministrationShell in ``submodels``) - :return: A set of :class:`Identifiers ` that were added to object_store - """ - ret: Set[model.Identifier] = set() - decoder_ = _select_decoder(failsafe, stripped, decoder) - - # json.load() accepts TextIO and BinaryIO - cm: ContextManager[IO] - if isinstance(file, get_args(Path)): - # 'file' is a path, needs to be opened first - cm = open(file, "r", encoding="utf-8-sig") - else: - # 'file' is not a path, thus it must already be IO - # mypy seems to have issues narrowing the type due to get_args() - cm = contextlib.nullcontext(file) # type: ignore[arg-type] - - # read, parse and convert JSON file - with cm as fp: - data = json.load(fp, cls=decoder_) - - for name, expected_type in (('assetAdministrationShells', model.AssetAdministrationShell), - ('submodels', model.Submodel), - ('conceptDescriptions', model.ConceptDescription), - ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), - ('submodelDescriptors', server_model.SubmodelDescriptor)): - try: - lst = _get_ts(data, name, list) - except (KeyError, TypeError): - continue - - for item in lst: - error_message = "Expected a {} in list '{}', but found {}".format( - expected_type.__name__, name, repr(item)) - if isinstance(item, model.Identifiable): - if not isinstance(item, expected_type): - if decoder_.failsafe: - logger.warning("{} was in wrong list '{}'; nevertheless, we'll use it".format(item, name)) - else: - raise TypeError(error_message) - if item.id in ret: - error_message = f"{item} has a duplicate identifier already parsed in the document!" - if not decoder_.failsafe: - raise KeyError(error_message) - logger.error(error_message + " skipping it...") - continue - existing_element = object_store.get(item.id) - if existing_element is not None: - if not replace_existing: - error_message = f"object with identifier {item.id} already exists " \ - f"in the object store: {existing_element}!" - if not ignore_existing: - raise KeyError(error_message + f" failed to insert {item}!") - logger.info(error_message + f" skipping insertion of {item}...") - continue - object_store.discard(existing_element) - object_store.add(item) - ret.add(item.id) - elif decoder_.failsafe: - logger.error(error_message) - else: - raise TypeError(error_message) - return ret + keys_to_types = list(KEYS_TO_TYPES) + keys_to_types.extend([ + ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), + ('submodelDescriptors', server_model.SubmodelDescriptor) + ]) + return read_aas_json_file_into(object_store=object_store, file=file, replace_existing=replace_existing, + ignore_existing=ignore_existing, failsafe=failsafe, stripped=stripped, + decoder=decoder, keys_to_types=keys_to_types) class ServerAASToJsonEncoder(AASToJsonEncoder): From a34230f166bd4a56a2479735a3683617b2bed8d3 Mon Sep 17 00:00:00 2001 From: zrgt Date: Tue, 15 Apr 2025 21:11:24 +0200 Subject: [PATCH 11/52] Refactor `default()` Create a class method `_get_aas_class_serializers` for the mapping in `default()` so that we can overload `_get_aas_class_serializers` in `ServerAASToJsonEncoder` and avoid code duplication by copy/paste `default()` --- .../aas/adapter/json/json_serialization.py | 65 ++++++++++--------- server/app/adapter/jsonization.py | 58 ++++------------- 2 files changed, 46 insertions(+), 77 deletions(-) diff --git a/sdk/basyx/aas/adapter/json/json_serialization.py b/sdk/basyx/aas/adapter/json/json_serialization.py index 8c6a671f1..1cdfe12d9 100644 --- a/sdk/basyx/aas/adapter/json/json_serialization.py +++ b/sdk/basyx/aas/adapter/json/json_serialization.py @@ -57,6 +57,40 @@ class AASToJsonEncoder(json.JSONEncoder): """ stripped = False + @classmethod + def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: + mapping: Dict[Type, Callable] = { + model.AdministrativeInformation: cls._administrative_information_to_json, + model.AnnotatedRelationshipElement: cls._annotated_relationship_element_to_json, + model.AssetAdministrationShell: cls._asset_administration_shell_to_json, + model.AssetInformation: cls._asset_information_to_json, + model.BasicEventElement: cls._basic_event_element_to_json, + model.Blob: cls._blob_to_json, + model.Capability: cls._capability_to_json, + model.ConceptDescription: cls._concept_description_to_json, + model.DataSpecificationIEC61360: cls._data_specification_iec61360_to_json, + model.Entity: cls._entity_to_json, + model.Extension: cls._extension_to_json, + model.File: cls._file_to_json, + model.Key: cls._key_to_json, + model.LangStringSet: cls._lang_string_set_to_json, + model.MultiLanguageProperty: cls._multi_language_property_to_json, + model.Operation: cls._operation_to_json, + model.Property: cls._property_to_json, + model.Qualifier: cls._qualifier_to_json, + model.Range: cls._range_to_json, + model.Reference: cls._reference_to_json, + model.ReferenceElement: cls._reference_element_to_json, + model.RelationshipElement: cls._relationship_element_to_json, + model.Resource: cls._resource_to_json, + model.SpecificAssetId: cls._specific_asset_id_to_json, + model.Submodel: cls._submodel_to_json, + model.SubmodelElementCollection: cls._submodel_element_collection_to_json, + model.SubmodelElementList: cls._submodel_element_list_to_json, + model.ValueReferencePair: cls._value_reference_pair_to_json, + } + return mapping + def default(self, obj: object) -> object: """ The overwritten ``default`` method for :class:`json.JSONEncoder` @@ -64,36 +98,7 @@ def default(self, obj: object) -> object: :param obj: The object to serialize to json :return: The serialized object """ - mapping: Dict[Type, Callable] = { - model.AdministrativeInformation: self._administrative_information_to_json, - model.AnnotatedRelationshipElement: self._annotated_relationship_element_to_json, - model.AssetAdministrationShell: self._asset_administration_shell_to_json, - model.AssetInformation: self._asset_information_to_json, - model.BasicEventElement: self._basic_event_element_to_json, - model.Blob: self._blob_to_json, - model.Capability: self._capability_to_json, - model.ConceptDescription: self._concept_description_to_json, - model.DataSpecificationIEC61360: self._data_specification_iec61360_to_json, - model.Entity: self._entity_to_json, - model.Extension: self._extension_to_json, - model.File: self._file_to_json, - model.Key: self._key_to_json, - model.LangStringSet: self._lang_string_set_to_json, - model.MultiLanguageProperty: self._multi_language_property_to_json, - model.Operation: self._operation_to_json, - model.Property: self._property_to_json, - model.Qualifier: self._qualifier_to_json, - model.Range: self._range_to_json, - model.Reference: self._reference_to_json, - model.ReferenceElement: self._reference_element_to_json, - model.RelationshipElement: self._relationship_element_to_json, - model.Resource: self._resource_to_json, - model.SpecificAssetId: self._specific_asset_id_to_json, - model.Submodel: self._submodel_to_json, - model.SubmodelElementCollection: self._submodel_element_collection_to_json, - model.SubmodelElementList: self._submodel_element_list_to_json, - model.ValueReferencePair: self._value_reference_pair_to_json, - } + mapping = self._get_aas_class_serializers() for typ in mapping: if isinstance(obj, typ): mapping_method = mapping[typ] diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index 6fdea97d9..58c9ec6f0 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -222,53 +222,17 @@ def read_server_aas_json_file_into(object_store: model.AbstractObjectStore, file class ServerAASToJsonEncoder(AASToJsonEncoder): - def default(self, obj: object) -> object: - """ - The overwritten ``default`` method for :class:`json.JSONEncoder` - - :param obj: The object to serialize to json - :return: The serialized object - """ - mapping: Dict[Type, Callable] = { - model.AdministrativeInformation: self._administrative_information_to_json, - model.AnnotatedRelationshipElement: self._annotated_relationship_element_to_json, - model.AssetAdministrationShell: self._asset_administration_shell_to_json, - model.AssetInformation: self._asset_information_to_json, - model.BasicEventElement: self._basic_event_element_to_json, - model.Blob: self._blob_to_json, - model.Capability: self._capability_to_json, - model.ConceptDescription: self._concept_description_to_json, - model.DataSpecificationIEC61360: self._data_specification_iec61360_to_json, - model.Entity: self._entity_to_json, - model.Extension: self._extension_to_json, - model.File: self._file_to_json, - model.Key: self._key_to_json, - model.LangStringSet: self._lang_string_set_to_json, - model.MultiLanguageProperty: self._multi_language_property_to_json, - model.Operation: self._operation_to_json, - model.Property: self._property_to_json, - model.Qualifier: self._qualifier_to_json, - model.Range: self._range_to_json, - model.Reference: self._reference_to_json, - model.ReferenceElement: self._reference_element_to_json, - model.RelationshipElement: self._relationship_element_to_json, - model.Resource: self._resource_to_json, - model.SpecificAssetId: self._specific_asset_id_to_json, - model.Submodel: self._submodel_to_json, - model.SubmodelElementCollection: self._submodel_element_collection_to_json, - model.SubmodelElementList: self._submodel_element_list_to_json, - model.ValueReferencePair: self._value_reference_pair_to_json, - server_model.AssetAdministrationShellDescriptor: self._asset_administration_shell_descriptor_to_json, - server_model.SubmodelDescriptor: self._submodel_descriptor_to_json, - server_model.Endpoint: self._endpoint_to_json, - server_model.ProtocolInformation: self._protocol_information_to_json, - server_model.AssetLink: self._asset_link_to_json - } - for typ in mapping: - if isinstance(obj, typ): - mapping_method = mapping[typ] - return mapping_method(obj) - return super().default(obj) + @classmethod + def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: + serializers = super()._get_aas_class_serializers() + serializers.update({ + server_model.AssetAdministrationShellDescriptor: cls._asset_administration_shell_descriptor_to_json, + server_model.SubmodelDescriptor: cls._submodel_descriptor_to_json, + server_model.Endpoint: cls._endpoint_to_json, + server_model.ProtocolInformation: cls._protocol_information_to_json, + server_model.AssetLink: cls._asset_link_to_json + }) + return serializers @classmethod def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: From 9079d82c2542e8d54129811a868f53f2e67e4eea Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 15 Apr 2025 22:19:14 +0200 Subject: [PATCH 12/52] fix method update_from --- server/app/server_model.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/server/app/server_model.py b/server/app/server_model.py index bf6d50bee..65fde1161 100644 --- a/server/app/server_model.py +++ b/server/app/server_model.py @@ -142,8 +142,17 @@ def commit(self): def update(self): pass - def update_from(self, other: "Referable", update_source: bool = False): - pass + def update_from(self, other: "Descriptor", update_source: bool = False): + """ + Updates the descriptor's attributes from another descriptor. + + :param other: The descriptor to update from. + :param update_source: Placeholder for compatibility; not used in this context. + """ + for attr in vars(other): + if attr == "id": + continue # Skip updating the unique identifier of the AAS + setattr(self, attr, getattr(other, attr)) class SubmodelDescriptor(Descriptor): From a366538a0175f815b22a20fc80c697f0c81267a1 Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 01:05:20 +0200 Subject: [PATCH 13/52] Refactor `_create_dict()` - Refactor `_create_dict()` - Add `JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES` in `_generic` and use it for `_create_dict()` Create a class method `_get_aas_class_serializers` for the mapping in `default()` and in `read_aas_json_file_into()` - Use extended `JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES` in `read_server_aas_json_file_into` --- sdk/basyx/aas/adapter/_generic.py | 7 +++ .../aas/adapter/json/json_deserialization.py | 11 +---- .../aas/adapter/json/json_serialization.py | 48 +++++++++++-------- server/app/adapter/jsonization.py | 16 +++---- 4 files changed, 44 insertions(+), 38 deletions(-) diff --git a/sdk/basyx/aas/adapter/_generic.py b/sdk/basyx/aas/adapter/_generic.py index 6a37c7412..00d78caff 100644 --- a/sdk/basyx/aas/adapter/_generic.py +++ b/sdk/basyx/aas/adapter/_generic.py @@ -19,6 +19,13 @@ PathOrBinaryIO = Union[Path, BinaryIO] PathOrIO = Union[Path, IO] # IO is TextIO or BinaryIO +# JSON top-level keys and their corresponding model classes +JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES = ( + ('assetAdministrationShells', model.AssetAdministrationShell), + ('submodels', model.Submodel), + ('conceptDescriptions', model.ConceptDescription), +) + # XML Namespace definition XML_NS_MAP = {"aas": "https://admin-shell.io/aas/3/0"} XML_NS_AAS = "{" + XML_NS_MAP["aas"] + "}" diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index 83616f5b2..3e3f960a6 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -39,7 +39,7 @@ from basyx.aas import model from .._generic import MODELLING_KIND_INVERSE, ASSET_KIND_INVERSE, KEY_TYPES_INVERSE, ENTITY_TYPES_INVERSE, \ IEC61360_DATA_TYPES_INVERSE, IEC61360_LEVEL_TYPES_INVERSE, KEY_TYPES_CLASSES_INVERSE, REFERENCE_TYPES_INVERSE, \ - DIRECTION_INVERSE, STATE_OF_EVENT_INVERSE, QUALIFIER_KIND_INVERSE, PathOrIO, Path + DIRECTION_INVERSE, STATE_OF_EVENT_INVERSE, QUALIFIER_KIND_INVERSE, PathOrIO, Path, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES logger = logging.getLogger(__name__) @@ -808,17 +808,10 @@ def _select_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[AASFr return StrictAASFromJsonDecoder -KEYS_TO_TYPES = ( - ('assetAdministrationShells', model.AssetAdministrationShell), - ('submodels', model.Submodel), - ('conceptDescriptions', model.ConceptDescription) -) - - def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, decoder: Optional[Type[AASFromJsonDecoder]] = None, - keys_to_types: Iterable[Tuple[str, any]] = KEYS_TO_TYPES) -> Set[model.Identifier]: + keys_to_types: Iterable[Tuple[str, any]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) -> Set[model.Identifier]: """ Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 into a given object store. diff --git a/sdk/basyx/aas/adapter/json/json_serialization.py b/sdk/basyx/aas/adapter/json/json_serialization.py index 1cdfe12d9..07820b7fe 100644 --- a/sdk/basyx/aas/adapter/json/json_serialization.py +++ b/sdk/basyx/aas/adapter/json/json_serialization.py @@ -30,11 +30,12 @@ import contextlib import inspect import io -from typing import ContextManager, List, Dict, Optional, TextIO, Type, Callable, get_args +from typing import ContextManager, List, Dict, Optional, TextIO, Type, Callable, get_args, Iterable, Tuple import json from basyx.aas import model from .. import _generic +from .._generic import JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES class AASToJsonEncoder(json.JSONEncoder): @@ -698,26 +699,33 @@ def _select_encoder(stripped: bool, encoder: Optional[Type[AASToJsonEncoder]] = return AASToJsonEncoder if not stripped else StrippedAASToJsonEncoder -def _create_dict(data: model.AbstractObjectStore) -> dict: - # separate different kind of objects - asset_administration_shells: List[model.AssetAdministrationShell] = [] - submodels: List[model.Submodel] = [] - concept_descriptions: List[model.ConceptDescription] = [] +def _create_dict(data: model.AbstractObjectStore, + keys_to_types: Iterable[Tuple[str, Type]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) -> Dict[str, List[object]]: + """ + Categorizes objects from an AbstractObjectStore into a dictionary based on their types. + + This function iterates over the objects in the provided AbstractObjectStore and groups them into lists + based on their types, as defined in the `keys_to_types` mapping. The resulting dictionary contains + keys corresponding to the names in `keys_to_types` and values as lists of objects of the respective types. + + :param data: An AbstractObjectStore containing objects to be categorized. + :param keys_to_types: An iterable of tuples where each tuple contains: + - A string key representing the category name. + - A type to match objects against. + :return: A dictionary where keys are category names and values are lists of objects of the corresponding types. + """ + objects = {} + for obj in data: - if isinstance(obj, model.AssetAdministrationShell): - asset_administration_shells.append(obj) - elif isinstance(obj, model.Submodel): - submodels.append(obj) - elif isinstance(obj, model.ConceptDescription): - concept_descriptions.append(obj) - dict_: Dict[str, List] = {} - if asset_administration_shells: - dict_['assetAdministrationShells'] = asset_administration_shells - if submodels: - dict_['submodels'] = submodels - if concept_descriptions: - dict_['conceptDescriptions'] = concept_descriptions - return dict_ + # Iterate through the mapping of category names to expected types + for name, expected_type in keys_to_types: + # Check if the object matches the expected type + if isinstance(obj, expected_type): + # Add the object to the appropriate category in the dictionary + objects.setdefault(name, []) + objects[name].append(obj) + break # Exit the inner loop once a match is found + return objects def object_store_to_json(data: model.AbstractObjectStore, stripped: bool = False, diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index 73f5220a1..86ffe6050 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -5,7 +5,7 @@ from basyx.aas.adapter import _generic from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, PathOrIO from basyx.aas.adapter.json import AASToJsonEncoder -from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, KEYS_TO_TYPES, \ +from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES, \ read_aas_json_file_into import logging @@ -14,6 +14,11 @@ logger = logging.getLogger(__name__) +JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES + ( + ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), + ('submodelDescriptors', server_model.SubmodelDescriptor) +) + class ServerAASFromJsonDecoder(AASFromJsonDecoder): @classmethod @@ -208,14 +213,9 @@ class ServerStrictStrippedAASFromJsonDecoder(ServerStrictAASFromJsonDecoder, Ser def read_server_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: - keys_to_types = list(KEYS_TO_TYPES) - keys_to_types.extend([ - ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), - ('submodelDescriptors', server_model.SubmodelDescriptor) - ]) return read_aas_json_file_into(object_store=object_store, file=file, replace_existing=replace_existing, ignore_existing=ignore_existing, failsafe=failsafe, stripped=stripped, - decoder=decoder, keys_to_types=keys_to_types) + decoder=decoder, keys_to_types=JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES) class ServerAASToJsonEncoder(AASToJsonEncoder): @@ -242,10 +242,8 @@ def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: data['displayName'] = obj.display_name if obj.extension: data['extensions'] = list(obj.extension) - return data - @classmethod def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> Dict[str, object]: """ From 72297f460ab445d8b06a2ae647bd9377f0e61842 Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 01:05:31 +0200 Subject: [PATCH 14/52] Remove `jsonization._create_dict` as not used --- server/app/adapter/jsonization.py | 37 ------------------------------- 1 file changed, 37 deletions(-) diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index 86ffe6050..b6ae1635c 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -328,40 +328,3 @@ def _asset_link_to_json(cls, obj: server_model.AssetLink) -> Dict[str, object]: data['name'] = obj.name data['value'] = obj.value return data - - -def _create_dict(data: model.AbstractObjectStore) -> dict: - # separate different kind of objects - asset_administration_shells: List[model.AssetAdministrationShell] = [] - submodels: List[model.Submodel] = [] - concept_descriptions: List[model.ConceptDescription] = [] - asset_administration_shell_descriptors: List[server_model.AssetAdministrationShellDescriptor] = [] - submodel_descriptors: List[server_model.SubmodelDescriptor] = [] - assets_links: List[server_model.AssetLink] = [] - for obj in data: - if isinstance(obj, model.AssetAdministrationShell): - asset_administration_shells.append(obj) - elif isinstance(obj, model.Submodel): - submodels.append(obj) - elif isinstance(obj, model.ConceptDescription): - concept_descriptions.append(obj) - elif isinstance(obj, server_model.AssetAdministrationShellDescriptor): - asset_administration_shell_descriptors.append(obj) - elif isinstance(obj, server_model.SubmodelDescriptor): - submodel_descriptors.append(obj) - elif isinstance(obj, server_model.AssetLink): - assets_links.append(obj) - dict_: Dict[str, List] = {} - if asset_administration_shells: - dict_['assetAdministrationShells'] = asset_administration_shells - if submodels: - dict_['submodels'] = submodels - if concept_descriptions: - dict_['conceptDescriptions'] = concept_descriptions - if asset_administration_shell_descriptors: - dict_['assetAdministrationShellDescriptors'] = asset_administration_shell_descriptors - if submodel_descriptors: - dict_['submodelDescriptors'] = submodel_descriptors - if assets_links: - dict_['assetLinks'] = assets_links - return dict_ From bd48dec3e01b13f99ffc3e5f1a2dd1454354f93c Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 01:49:22 +0200 Subject: [PATCH 15/52] Split `http.py` into `repository` and `http_api_helpers` In 'repository' we keep only AAS/Submodel/CD Repository App, in `http_api_helpers` we keep all classes/funcs which will be used across discovery/repository/registry apps --- server/app/discovery.py | 4 +- server/app/http_api_helpers.py | 433 +++++++++++++++++++++ server/app/main.py | 2 +- server/app/registry.py | 4 +- server/app/{http.py => repository.py} | 526 ++------------------------ 5 files changed, 473 insertions(+), 496 deletions(-) create mode 100644 server/app/http_api_helpers.py rename server/app/{http.py => repository.py} (67%) diff --git a/server/app/discovery.py b/server/app/discovery.py index f5f0a215f..524b3c123 100644 --- a/server/app/discovery.py +++ b/server/app/discovery.py @@ -6,9 +6,9 @@ from basyx.aas import model from server.app import server_model from server.app.adapter.jsonization import ServerAASToJsonEncoder -from .http import APIResponse, http_exception_to_response, get_response_type, HTTPApiDecoder +from .http_api_helpers import APIResponse, http_exception_to_response, get_response_type, HTTPApiDecoder from werkzeug.routing import MapAdapter, Rule, Submount -from .http import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request +from .http_api_helpers import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple, Set import abc diff --git a/server/app/http_api_helpers.py b/server/app/http_api_helpers.py new file mode 100644 index 000000000..1f6f96770 --- /dev/null +++ b/server/app/http_api_helpers.py @@ -0,0 +1,433 @@ +# Copyright (c) 2024 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +import abc +import base64 +import binascii +import datetime +import enum +import io +import json + +from lxml import etree +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.urls +import werkzeug.utils +from werkzeug.exceptions import BadRequest, UnprocessableEntity +from werkzeug.wrappers import Request, Response + +import server.app.server_model as server_model +from basyx.aas import model +from basyx.aas.adapter._generic import XML_NS_MAP + +from basyx.aas.adapter.xml import xml_serialization, XMLConstructables + +from server.app.adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element +from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder + +from typing import Callable, Dict, List, Optional, Type, TypeVar, Union + +@enum.unique +class MessageType(enum.Enum): + UNDEFINED = enum.auto() + INFO = enum.auto() + WARNING = enum.auto() + ERROR = enum.auto() + EXCEPTION = enum.auto() + + def __str__(self): + return self.name.capitalize() + + +class Message: + def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, + timestamp: Optional[datetime.datetime] = None): + self.code: str = code + self.text: str = text + self.message_type: MessageType = message_type + self.timestamp: datetime.datetime = timestamp if timestamp is not None \ + else datetime.datetime.now(datetime.timezone.utc) + + +class Result: + def __init__(self, success: bool, messages: Optional[List[Message]] = None): + if messages is None: + messages = [] + self.success: bool = success + self.messages: List[Message] = messages + + +class ResultToJsonEncoder(ServerAASToJsonEncoder): + @classmethod + def _result_to_json(cls, result: Result) -> Dict[str, object]: + return { + "success": result.success, + "messages": result.messages + } + + @classmethod + def _message_to_json(cls, message: Message) -> Dict[str, object]: + return { + "messageType": message.message_type, + "text": message.text, + "code": message.code, + "timestamp": message.timestamp.isoformat() + } + + def default(self, obj: object) -> object: + if isinstance(obj, Result): + return self._result_to_json(obj) + if isinstance(obj, Message): + return self._message_to_json(obj) + if isinstance(obj, MessageType): + return str(obj) + return super().default(obj) + + +class StrippedResultToJsonEncoder(ResultToJsonEncoder): + stripped = True + + +ResponseData = Union[Result, object, List[object]] + + +class APIResponse(abc.ABC, Response): + @abc.abstractmethod + def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, + stripped: bool = False, *args, **kwargs): + super().__init__(*args, **kwargs) + if obj is None: + self.status_code = 204 + else: + self.data = self.serialize(obj, cursor, stripped) + + @abc.abstractmethod + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + pass + + +class JsonResponse(APIResponse): + def __init__(self, *args, content_type="application/json", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + if cursor is None: + data = obj + else: + data = { + "paging_metadata": {"cursor": str(cursor)}, + "result": obj + } + return json.dumps( + data, + cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, + separators=(",", ":") + ) + + +class XmlResponse(APIResponse): + def __init__(self, *args, content_type="application/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + root_elem = etree.Element("response", nsmap=XML_NS_MAP) + if cursor is not None: + root_elem.set("cursor", str(cursor)) + if isinstance(obj, Result): + result_elem = result_to_xml(obj, **XML_NS_MAP) + for child in result_elem: + root_elem.append(child) + elif isinstance(obj, list): + for item in obj: + item_elem = xml_serialization.object_to_xml_element(item) + root_elem.append(item_elem) + else: + obj_elem = xml_serialization.object_to_xml_element(obj) + for child in obj_elem: + root_elem.append(child) + etree.cleanup_namespaces(root_elem) + xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") + return xml_str # type: ignore[return-value] + + +class XmlResponseAlt(XmlResponse): + def __init__(self, *args, content_type="text/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + +def result_to_xml(result: Result, **kwargs) -> etree._Element: + result_elem = etree.Element("result", **kwargs) + success_elem = etree.Element("success") + success_elem.text = xml_serialization.boolean_to_xml(result.success) + messages_elem = etree.Element("messages") + for message in result.messages: + messages_elem.append(message_to_xml(message)) + + result_elem.append(success_elem) + result_elem.append(messages_elem) + return result_elem + + +def message_to_xml(message: Message) -> etree._Element: + message_elem = etree.Element("message") + message_type_elem = etree.Element("messageType") + message_type_elem.text = str(message.message_type) + text_elem = etree.Element("text") + text_elem.text = message.text + code_elem = etree.Element("code") + code_elem.text = message.code + timestamp_elem = etree.Element("timestamp") + timestamp_elem.text = message.timestamp.isoformat() + + message_elem.append(message_type_elem) + message_elem.append(text_elem) + message_elem.append(code_elem) + message_elem.append(timestamp_elem) + return message_elem + + +def get_response_type(request: Request) -> Type[APIResponse]: + response_types: Dict[str, Type[APIResponse]] = { + "application/json": JsonResponse, + "application/xml": XmlResponse, + "text/xml": XmlResponseAlt + } + if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): + return JsonResponse + mime_type = request.accept_mimetypes.best_match(response_types) + if mime_type is None: + raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " + + ", ".join(response_types.keys())) + return response_types[mime_type] + + +def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ + -> APIResponse: + headers = exception.get_headers() + location = exception.get_response().location + if location is not None: + headers.append(("Location", location)) + if exception.code and exception.code >= 400: + message = Message(type(exception).__name__, exception.description if exception.description is not None else "", + MessageType.ERROR) + result = Result(False, [message]) + else: + result = Result(False) + return response_type(result, status=exception.code, headers=headers) + + +def is_stripped_request(request: Request) -> bool: + return request.args.get("level") == "core" + + +T = TypeVar("T") + +BASE64URL_ENCODING = "utf-8" + + +def base64url_decode(data: str) -> str: + try: + # If the requester omits the base64 padding, an exception will be raised. + # However, Python doesn't complain about too much padding, + # thus we simply always append two padding characters (==). + # See also: https://stackoverflow.com/a/49459036/4780052 + decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) + except binascii.Error: + raise BadRequest(f"Encoded data {data} is invalid base64url!") + except UnicodeDecodeError: + raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") + return decoded + + +def base64url_encode(data: str) -> str: + encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") + return encoded + + +class HTTPApiDecoder: + # these are the types we can construct (well, only the ones we need) + type_constructables_map = { + model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, + model.AssetInformation: XMLConstructables.ASSET_INFORMATION, + model.ModelReference: XMLConstructables.MODEL_REFERENCE, + model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, + model.Qualifier: XMLConstructables.QUALIFIER, + model.Submodel: XMLConstructables.SUBMODEL, + model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, + model.Reference: XMLConstructables.REFERENCE, + + server_model.AssetAdministrationShellDescriptor: ServerXMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR, + server_model.SubmodelDescriptor: ServerXMLConstructables.SUBMODEL_DESCRIPTOR, + server_model.AssetLink: ServerXMLConstructables.ASSET_LINK, + } + + @classmethod + def check_type_supportance(cls, type_: type): + if type_ not in cls.type_constructables_map: + raise TypeError(f"Parsing {type_} is not supported!") + + @classmethod + def assert_type(cls, obj: object, type_: Type[T]) -> T: + if not isinstance(obj, type_): + raise UnprocessableEntity(f"Object {obj!r} is not of type {type_.__name__}!") + return obj + + @classmethod + def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + cls.check_type_supportance(expect_type) + decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ + else ServerStrictAASFromJsonDecoder + try: + parsed = json.loads(data, cls=decoder) + if isinstance(parsed, list) and expect_single: + raise UnprocessableEntity(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") + if not isinstance(parsed, list) and not expect_single: + raise UnprocessableEntity(f"Expected List[{expect_type.__name__}], got {parsed!r}!") + parsed = [parsed] if not isinstance(parsed, list) else parsed + + # TODO: the following is ugly, but necessary because references aren't self-identified objects + # in the json schema + # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines + # that automatically + mapping = { + model.ModelReference: decoder._construct_model_reference, # type: ignore[assignment] + model.AssetInformation: decoder._construct_asset_information, # type: ignore[assignment] + model.SpecificAssetId: decoder._construct_specific_asset_id, # type: ignore[assignment] + model.Reference: decoder._construct_reference, # type: ignore[assignment] + model.Qualifier: decoder._construct_qualifier, # type: ignore[assignment] + server_model.AssetAdministrationShellDescriptor: decoder._construct_asset_administration_shell_descriptor, # type: ignore[assignment] + server_model.SubmodelDescriptor: decoder._construct_submodel_descriptor, # type: ignore[assignment] + server_model.AssetLink: decoder._construct_asset_link, # type: ignore[assignment] + } + + constructor: Optional[Callable[..., T]] = mapping.get(expect_type) + args = [] + if expect_type is model.ModelReference: + args.append(model.Submodel) + + if constructor is not None: + # construct elements that aren't self-identified + return [constructor(obj, *args) for obj in parsed] + + except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: + raise UnprocessableEntity(str(e)) from e + + return [cls.assert_type(obj, expect_type) for obj in parsed] + + @classmethod + def base64urljson_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, expect_single) + + @classmethod + def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def base64urljson(cls, data: str, expect_type: Type[T], stripped: bool) -> T: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: + cls.check_type_supportance(expect_type) + try: + xml_data = io.BytesIO(data) + rv = read_server_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], + stripped=stripped, failsafe=False) + except (KeyError, ValueError) as e: + # xml deserialization creates an error chain. since we only return one error, return the root cause + f: BaseException = e + while f.__cause__ is not None: + f = f.__cause__ + raise UnprocessableEntity(str(f)) from e + except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: + raise UnprocessableEntity(str(e)) from e + return cls.assert_type(rv, expect_type) + + @classmethod + def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: + """ + TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent + running out of memory. but it doesn't state how to check the content length + also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json + schema + In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, + which should limit the maximum content length. + """ + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + + ", ".join(valid_content_types)) + + if request.mimetype == "application/json": + return cls.json(request.get_data(), expect_type, stripped) + return cls.xml(request.get_data(), expect_type, stripped) + @classmethod + def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: + """ + Deserializes the request body to an instance (or list of instances) + of the expected type. + """ + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + ", ".join(valid_content_types) + ) + + if request.mimetype == "application/json": + raw_data = request.get_data() + try: + parsed = json.loads(raw_data) + except Exception as e: + raise werkzeug.exceptions.BadRequest(f"Invalid JSON: {e}") + # Prüfe, ob parsed ein Array ist: + if isinstance(parsed, list): + # Für jedes Element wird die Konvertierung angewandt. + return [cls._convert_single_json_item(item, expect_type, stripped) for item in parsed] # type: ignore + else: + return cls._convert_single_json_item(parsed, expect_type, stripped) + else: + return cls.xml(request.get_data(), expect_type, stripped) + + @classmethod + def _convert_single_json_item(cls, data: any, expect_type: Type[T], stripped: bool) -> T: + """ + Konvertiert ein einzelnes JSON-Objekt (als Python-Dict) in ein Objekt vom Typ expect_type. + Hierbei wird das Dictionary zuerst wieder in einen JSON-String serialisiert und als Bytes übergeben. + """ + json_bytes = json.dumps(data).encode("utf-8") + return cls.json(json_bytes, expect_type, stripped) + +class Base64URLConverter(werkzeug.routing.UnicodeConverter): + + def to_url(self, value: model.Identifier) -> str: + return super().to_url(base64url_encode(value)) + + def to_python(self, value: str) -> model.Identifier: + value = super().to_python(value) + decoded = base64url_decode(super().to_python(value)) + return decoded + + +class IdShortPathConverter(werkzeug.routing.UnicodeConverter): + id_short_sep = "." + + def to_url(self, value: List[str]) -> str: + return super().to_url(self.id_short_sep.join(value)) + + def to_python(self, value: str) -> List[str]: + id_shorts = super().to_python(value).split(self.id_short_sep) + for id_short in id_shorts: + try: + model.Referable.validate_id_short(id_short) + except (ValueError, model.AASConstraintViolation): + raise BadRequest(f"{id_short} is not a valid id_short!") + return id_shorts diff --git a/server/app/main.py b/server/app/main.py index 816bf621a..fd24f0bfd 100644 --- a/server/app/main.py +++ b/server/app/main.py @@ -6,7 +6,7 @@ from basyx.aas.adapter import aasx from basyx.aas.backend.local_file import LocalFileObjectStore -from server.app.http import WSGIApp +from server.app.repository import WSGIApp storage_path = os.getenv("STORAGE_PATH", "/storage") storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") diff --git a/server/app/registry.py b/server/app/registry.py index 58e578c90..e2b47bc3d 100644 --- a/server/app/registry.py +++ b/server/app/registry.py @@ -21,8 +21,8 @@ from basyx.aas import model import server.app.server_model as server_model -from .http import APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder -from .http import Base64URLConverter +from .http_api_helpers import APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder +from .http_api_helpers import Base64URLConverter from typing import Dict, Iterable, Iterator, List, Type, TypeVar, Tuple diff --git a/server/app/http.py b/server/app/repository.py similarity index 67% rename from server/app/http.py rename to server/app/repository.py index 554fb3211..4ba8797d9 100644 --- a/server/app/http.py +++ b/server/app/repository.py @@ -1,9 +1,3 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors -# -# This program and the accompanying materials are made available under the terms of the MIT License, available in -# the LICENSE file of this project. -# -# SPDX-License-Identifier: MIT """ This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". However, several features and routes are currently not supported: @@ -34,442 +28,22 @@ - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-results/{handleId}/$value` """ -import abc -import base64 -import binascii -import datetime -import enum import io -import json import itertools +from typing import Iterable, Type, Iterator, List, Dict, Union, Callable, Tuple, Optional -from lxml import etree import werkzeug.exceptions import werkzeug.routing -import werkzeug.urls import werkzeug.utils -from werkzeug.exceptions import BadRequest, Conflict, NotFound, UnprocessableEntity -from werkzeug.routing import MapAdapter, Rule, Submount -from werkzeug.wrappers import Request, Response +from werkzeug import Response, Request from werkzeug.datastructures import FileStorage +from werkzeug.exceptions import NotFound, BadRequest, Conflict +from werkzeug.routing import Submount, Rule, MapAdapter -import server.app.server_model as server_model from basyx.aas import model -from basyx.aas.adapter._generic import XML_NS_MAP - from basyx.aas.adapter import aasx - -from basyx.aas.adapter.xml import xml_serialization, XMLConstructables - -from server.app.adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element -from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder - -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple - - -@enum.unique -class MessageType(enum.Enum): - UNDEFINED = enum.auto() - INFO = enum.auto() - WARNING = enum.auto() - ERROR = enum.auto() - EXCEPTION = enum.auto() - - def __str__(self): - return self.name.capitalize() - - -class Message: - def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, - timestamp: Optional[datetime.datetime] = None): - self.code: str = code - self.text: str = text - self.message_type: MessageType = message_type - self.timestamp: datetime.datetime = timestamp if timestamp is not None \ - else datetime.datetime.now(datetime.timezone.utc) - - -class Result: - def __init__(self, success: bool, messages: Optional[List[Message]] = None): - if messages is None: - messages = [] - self.success: bool = success - self.messages: List[Message] = messages - - -class ResultToJsonEncoder(ServerAASToJsonEncoder): - @classmethod - def _result_to_json(cls, result: Result) -> Dict[str, object]: - return { - "success": result.success, - "messages": result.messages - } - - @classmethod - def _message_to_json(cls, message: Message) -> Dict[str, object]: - return { - "messageType": message.message_type, - "text": message.text, - "code": message.code, - "timestamp": message.timestamp.isoformat() - } - - def default(self, obj: object) -> object: - if isinstance(obj, Result): - return self._result_to_json(obj) - if isinstance(obj, Message): - return self._message_to_json(obj) - if isinstance(obj, MessageType): - return str(obj) - return super().default(obj) - - -class StrippedResultToJsonEncoder(ResultToJsonEncoder): - stripped = True - - -ResponseData = Union[Result, object, List[object]] - - -class APIResponse(abc.ABC, Response): - @abc.abstractmethod - def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, - stripped: bool = False, *args, **kwargs): - super().__init__(*args, **kwargs) - if obj is None: - self.status_code = 204 - else: - self.data = self.serialize(obj, cursor, stripped) - - @abc.abstractmethod - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - pass - - -class JsonResponse(APIResponse): - def __init__(self, *args, content_type="application/json", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - if cursor is None: - data = obj - else: - data = { - "paging_metadata": {"cursor": str(cursor)}, - "result": obj - } - return json.dumps( - data, - cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, - separators=(",", ":") - ) - - -class XmlResponse(APIResponse): - def __init__(self, *args, content_type="application/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - root_elem = etree.Element("response", nsmap=XML_NS_MAP) - if cursor is not None: - root_elem.set("cursor", str(cursor)) - if isinstance(obj, Result): - result_elem = result_to_xml(obj, **XML_NS_MAP) - for child in result_elem: - root_elem.append(child) - elif isinstance(obj, list): - for item in obj: - item_elem = xml_serialization.object_to_xml_element(item) - root_elem.append(item_elem) - else: - obj_elem = xml_serialization.object_to_xml_element(obj) - for child in obj_elem: - root_elem.append(child) - etree.cleanup_namespaces(root_elem) - xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") - return xml_str # type: ignore[return-value] - - -class XmlResponseAlt(XmlResponse): - def __init__(self, *args, content_type="text/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - -def result_to_xml(result: Result, **kwargs) -> etree._Element: - result_elem = etree.Element("result", **kwargs) - success_elem = etree.Element("success") - success_elem.text = xml_serialization.boolean_to_xml(result.success) - messages_elem = etree.Element("messages") - for message in result.messages: - messages_elem.append(message_to_xml(message)) - - result_elem.append(success_elem) - result_elem.append(messages_elem) - return result_elem - - -def message_to_xml(message: Message) -> etree._Element: - message_elem = etree.Element("message") - message_type_elem = etree.Element("messageType") - message_type_elem.text = str(message.message_type) - text_elem = etree.Element("text") - text_elem.text = message.text - code_elem = etree.Element("code") - code_elem.text = message.code - timestamp_elem = etree.Element("timestamp") - timestamp_elem.text = message.timestamp.isoformat() - - message_elem.append(message_type_elem) - message_elem.append(text_elem) - message_elem.append(code_elem) - message_elem.append(timestamp_elem) - return message_elem - - -def get_response_type(request: Request) -> Type[APIResponse]: - response_types: Dict[str, Type[APIResponse]] = { - "application/json": JsonResponse, - "application/xml": XmlResponse, - "text/xml": XmlResponseAlt - } - if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): - return JsonResponse - mime_type = request.accept_mimetypes.best_match(response_types) - if mime_type is None: - raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " - + ", ".join(response_types.keys())) - return response_types[mime_type] - - -def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ - -> APIResponse: - headers = exception.get_headers() - location = exception.get_response().location - if location is not None: - headers.append(("Location", location)) - if exception.code and exception.code >= 400: - message = Message(type(exception).__name__, exception.description if exception.description is not None else "", - MessageType.ERROR) - result = Result(False, [message]) - else: - result = Result(False) - return response_type(result, status=exception.code, headers=headers) - - -def is_stripped_request(request: Request) -> bool: - return request.args.get("level") == "core" - - -T = TypeVar("T") - -BASE64URL_ENCODING = "utf-8" - - -def base64url_decode(data: str) -> str: - try: - # If the requester omits the base64 padding, an exception will be raised. - # However, Python doesn't complain about too much padding, - # thus we simply always append two padding characters (==). - # See also: https://stackoverflow.com/a/49459036/4780052 - decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) - except binascii.Error: - raise BadRequest(f"Encoded data {data} is invalid base64url!") - except UnicodeDecodeError: - raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") - return decoded - - -def base64url_encode(data: str) -> str: - encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") - return encoded - - -class HTTPApiDecoder: - # these are the types we can construct (well, only the ones we need) - type_constructables_map = { - model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, - model.AssetInformation: XMLConstructables.ASSET_INFORMATION, - model.ModelReference: XMLConstructables.MODEL_REFERENCE, - model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, - model.Qualifier: XMLConstructables.QUALIFIER, - model.Submodel: XMLConstructables.SUBMODEL, - model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, - model.Reference: XMLConstructables.REFERENCE, - server_model.AssetAdministrationShellDescriptor: ServerXMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR, - server_model.SubmodelDescriptor: ServerXMLConstructables.SUBMODEL_DESCRIPTOR, - server_model.AssetLink: ServerXMLConstructables.ASSET_LINK, - } - - @classmethod - def check_type_supportance(cls, type_: type): - if type_ not in cls.type_constructables_map: - raise TypeError(f"Parsing {type_} is not supported!") - - @classmethod - def assert_type(cls, obj: object, type_: Type[T]) -> T: - if not isinstance(obj, type_): - raise UnprocessableEntity(f"Object {obj!r} is not of type {type_.__name__}!") - return obj - - @classmethod - def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - cls.check_type_supportance(expect_type) - decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ - else ServerStrictAASFromJsonDecoder - try: - parsed = json.loads(data, cls=decoder) - if not isinstance(parsed, list): - if not expect_single: - raise UnprocessableEntity(f"Expected List[{expect_type.__name__}], got {parsed!r}!") - parsed = [parsed] - elif expect_single: - raise UnprocessableEntity(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") - # TODO: the following is ugly, but necessary because references aren't self-identified objects - # in the json schema - # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines - # that automatically - constructor: Optional[Callable[..., T]] = None - args = [] - if expect_type is model.ModelReference: - constructor = decoder._construct_model_reference # type: ignore[assignment] - args.append(model.Submodel) - elif expect_type is model.AssetInformation: - constructor = decoder._construct_asset_information # type: ignore[assignment] - elif expect_type is model.SpecificAssetId: - constructor = decoder._construct_specific_asset_id # type: ignore[assignment] - elif expect_type is model.Reference: - constructor = decoder._construct_reference # type: ignore[assignment] - elif expect_type is model.Qualifier: - constructor = decoder._construct_qualifier # type: ignore[assignment] - elif expect_type is model.AssetAdministrationShellDescriptor: - constructor = decoder._construct_asset_administration_shell_descriptor - elif expect_type is model.SubmodelDescriptor: - constructor = decoder._construct_submodel_descriptor - elif expect_type is server_model.AssetLink: - constructor = decoder._construct_asset_link - - if constructor is not None: - # construct elements that aren't self-identified - return [constructor(obj, *args) for obj in parsed] - - except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: - raise UnprocessableEntity(str(e)) from e - - return [cls.assert_type(obj, expect_type) for obj in parsed] - - @classmethod - def base64urljson_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, expect_single) - - @classmethod - def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def base64urljson(cls, data: str, expect_type: Type[T], stripped: bool) -> T: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: - cls.check_type_supportance(expect_type) - try: - xml_data = io.BytesIO(data) - rv = read_server_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], - stripped=stripped, failsafe=False) - except (KeyError, ValueError) as e: - # xml deserialization creates an error chain. since we only return one error, return the root cause - f: BaseException = e - while f.__cause__ is not None: - f = f.__cause__ - raise UnprocessableEntity(str(f)) from e - except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: - raise UnprocessableEntity(str(e)) from e - return cls.assert_type(rv, expect_type) - - @classmethod - def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: - """ - TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent - running out of memory. but it doesn't state how to check the content length - also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json - schema - In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, - which should limit the maximum content length. - """ - valid_content_types = ("application/json", "application/xml", "text/xml") - - if request.mimetype not in valid_content_types: - raise werkzeug.exceptions.UnsupportedMediaType( - f"Invalid content-type: {request.mimetype}! Supported types: " - + ", ".join(valid_content_types)) - - if request.mimetype == "application/json": - return cls.json(request.get_data(), expect_type, stripped) - return cls.xml(request.get_data(), expect_type, stripped) - @classmethod - def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: - """ - Deserializes the request body to an instance (or list of instances) - of the expected type. - """ - valid_content_types = ("application/json", "application/xml", "text/xml") - - if request.mimetype not in valid_content_types: - raise werkzeug.exceptions.UnsupportedMediaType( - f"Invalid content-type: {request.mimetype}! Supported types: " + ", ".join(valid_content_types) - ) - - if request.mimetype == "application/json": - raw_data = request.get_data() - try: - parsed = json.loads(raw_data) - except Exception as e: - raise werkzeug.exceptions.BadRequest(f"Invalid JSON: {e}") - # Prüfe, ob parsed ein Array ist: - if isinstance(parsed, list): - # Für jedes Element wird die Konvertierung angewandt. - return [cls._convert_single_json_item(item, expect_type, stripped) for item in parsed] # type: ignore - else: - return cls._convert_single_json_item(parsed, expect_type, stripped) - else: - return cls.xml(request.get_data(), expect_type, stripped) - - @classmethod - def _convert_single_json_item(cls, data: any, expect_type: Type[T], stripped: bool) -> T: - """ - Konvertiert ein einzelnes JSON-Objekt (als Python-Dict) in ein Objekt vom Typ expect_type. - Hierbei wird das Dictionary zuerst wieder in einen JSON-String serialisiert und als Bytes übergeben. - """ - json_bytes = json.dumps(data).encode("utf-8") - return cls.json(json_bytes, expect_type, stripped) - -class Base64URLConverter(werkzeug.routing.UnicodeConverter): - - def to_url(self, value: model.Identifier) -> str: - return super().to_url(base64url_encode(value)) - - def to_python(self, value: str) -> model.Identifier: - value = super().to_python(value) - decoded = base64url_decode(super().to_python(value)) - return decoded - - -class IdShortPathConverter(werkzeug.routing.UnicodeConverter): - id_short_sep = "." - - def to_url(self, value: List[str]) -> str: - return super().to_url(self.id_short_sep.join(value)) - - def to_python(self, value: str) -> List[str]: - id_shorts = super().to_python(value).split(self.id_short_sep) - for id_short in id_shorts: - try: - model.Referable.validate_id_short(id_short) - except (ValueError, model.AASConstraintViolation): - raise BadRequest(f"{id_short} is not a valid id_short!") - return id_shorts +from server.app.http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, get_response_type, \ + http_exception_to_response, APIResponse, is_stripped_request class WSGIApp: @@ -492,17 +66,13 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/$reference", methods=["GET"], endpoint=self.get_aas_reference), Rule("/asset-information", methods=["GET"], endpoint=self.get_aas_asset_information), Rule("/asset-information", methods=["PUT"], endpoint=self.put_aas_asset_information), - Rule("/asset-information/thumbnail", methods=["GET", "PUT", "DELETE"], - endpoint=self.not_implemented), + Rule("/asset-information/thumbnail", methods=["GET", "PUT", "DELETE"], endpoint=self.not_implemented), Rule("/submodel-refs", methods=["GET"], endpoint=self.get_aas_submodel_refs), Rule("/submodel-refs", methods=["POST"], endpoint=self.post_aas_submodel_refs), - Rule("/submodel-refs/", methods=["DELETE"], - endpoint=self.delete_aas_submodel_refs_specific), + Rule("/submodel-refs/", methods=["DELETE"], endpoint=self.delete_aas_submodel_refs_specific), Submount("/submodels", [ - Rule("/", methods=["PUT"], - endpoint=self.put_aas_submodel_refs_submodel), - Rule("/", methods=["DELETE"], - endpoint=self.delete_aas_submodel_refs_submodel), + Rule("/", methods=["PUT"], endpoint=self.put_aas_submodel_refs_submodel), + Rule("/", methods=["DELETE"], endpoint=self.delete_aas_submodel_refs_submodel), Rule("/", endpoint=self.aas_submodel_refs_redirect), Rule("//", endpoint=self.aas_submodel_refs_redirect) ]) @@ -527,76 +97,51 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/$reference", methods=["GET"], endpoint=self.get_submodels_reference), Rule("/$path", methods=["GET"], endpoint=self.not_implemented), Rule("/submodel-elements", methods=["GET"], endpoint=self.get_submodel_submodel_elements), - Rule("/submodel-elements", methods=["POST"], - endpoint=self.post_submodel_submodel_elements_id_short_path), + Rule("/submodel-elements", methods=["POST"], endpoint=self.post_submodel_submodel_elements_id_short_path), Submount("/submodel-elements", [ - Rule("/$metadata", methods=["GET"], - endpoint=self.get_submodel_submodel_elements_metadata), - Rule("/$reference", methods=["GET"], - endpoint=self.get_submodel_submodel_elements_reference), + Rule("/$metadata", methods=["GET"], endpoint=self.get_submodel_submodel_elements_metadata), + Rule("/$reference", methods=["GET"], endpoint=self.get_submodel_submodel_elements_reference), Rule("/$value", methods=["GET"], endpoint=self.not_implemented), Rule("/$path", methods=["GET"], endpoint=self.not_implemented), - Rule("/", methods=["GET"], - endpoint=self.get_submodel_submodel_elements_id_short_path), - Rule("/", methods=["POST"], - endpoint=self.post_submodel_submodel_elements_id_short_path), - Rule("/", methods=["PUT"], - endpoint=self.put_submodel_submodel_elements_id_short_path), - Rule("/", methods=["DELETE"], - endpoint=self.delete_submodel_submodel_elements_id_short_path), + Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_elements_id_short_path), + Rule("/", methods=["POST"], endpoint=self.post_submodel_submodel_elements_id_short_path), + Rule("/", methods=["PUT"], endpoint=self.put_submodel_submodel_elements_id_short_path), + Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_submodel_elements_id_short_path), Rule("/", methods=["PATCH"], endpoint=self.not_implemented), Submount("/", [ - Rule("/$metadata", methods=["GET"], - endpoint=self.get_submodel_submodel_elements_id_short_path_metadata), + Rule("/$metadata", methods=["GET"], endpoint=self.get_submodel_submodel_elements_id_short_path_metadata), Rule("/$metadata", methods=["PATCH"], endpoint=self.not_implemented), - Rule("/$reference", methods=["GET"], - endpoint=self.get_submodel_submodel_elements_id_short_path_reference), + Rule("/$reference", methods=["GET"], endpoint=self.get_submodel_submodel_elements_id_short_path_reference), Rule("/$value", methods=["GET"], endpoint=self.not_implemented), Rule("/$value", methods=["PATCH"], endpoint=self.not_implemented), Rule("/$path", methods=["GET"], endpoint=self.not_implemented), - Rule("/attachment", methods=["GET"], - endpoint=self.get_submodel_submodel_element_attachment), - Rule("/attachment", methods=["PUT"], - endpoint=self.put_submodel_submodel_element_attachment), - Rule("/attachment", methods=["DELETE"], - endpoint=self.delete_submodel_submodel_element_attachment), + Rule("/attachment", methods=["GET"], endpoint=self.get_submodel_submodel_element_attachment), + Rule("/attachment", methods=["PUT"], endpoint=self.put_submodel_submodel_element_attachment), + Rule("/attachment", methods=["DELETE"], endpoint=self.delete_submodel_submodel_element_attachment), Rule("/invoke", methods=["POST"], endpoint=self.not_implemented), Rule("/invoke/$value", methods=["POST"], endpoint=self.not_implemented), Rule("/invoke-async", methods=["POST"], endpoint=self.not_implemented), Rule("/invoke-async/$value", methods=["POST"], endpoint=self.not_implemented), - Rule("/operation-status/", methods=["GET"], - endpoint=self.not_implemented), + Rule("/operation-status/", methods=["GET"], endpoint=self.not_implemented), Submount("/operation-results", [ - Rule("/", methods=["GET"], - endpoint=self.not_implemented), - Rule("//$value", methods=["GET"], - endpoint=self.not_implemented) + Rule("/", methods=["GET"], endpoint=self.not_implemented), + Rule("//$value", methods=["GET"], endpoint=self.not_implemented) ]), - Rule("/qualifiers", methods=["GET"], - endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/qualifiers", methods=["POST"], - endpoint=self.post_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["POST"], endpoint=self.post_submodel_submodel_element_qualifiers), Submount("/qualifiers", [ - Rule("/", methods=["GET"], - endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/", methods=["PUT"], - endpoint=self.put_submodel_submodel_element_qualifiers), - Rule("/", methods=["DELETE"], - endpoint=self.delete_submodel_submodel_element_qualifiers) + Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/", methods=["PUT"], endpoint=self.put_submodel_submodel_element_qualifiers), + Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_submodel_element_qualifiers) ]) ]) ]), - Rule("/qualifiers", methods=["GET"], - endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/qualifiers", methods=["POST"], - endpoint=self.post_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["POST"], endpoint=self.post_submodel_submodel_element_qualifiers), Submount("/qualifiers", [ - Rule("/", methods=["GET"], - endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/", methods=["PUT"], - endpoint=self.put_submodel_submodel_element_qualifiers), - Rule("/", methods=["DELETE"], - endpoint=self.delete_submodel_submodel_element_qualifiers) + Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/", methods=["PUT"], endpoint=self.put_submodel_submodel_element_qualifiers), + Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_submodel_element_qualifiers) ]) ]) ]), @@ -1210,7 +755,6 @@ def delete_concept_description(self, request: Request, url_args: Dict, response_ self.object_store.remove(self._get_concept_description(url_args)) return response_t() - if __name__ == "__main__": from werkzeug.serving import run_simple from basyx.aas.examples.data.example_aas import create_full_example From 6fd1612f8509c0a33fe7edf1f48a50d85e47145b Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 10:51:58 +0200 Subject: [PATCH 16/52] Refactor server_model and move create interfaces folder --- server/app/http_api_helpers.py | 6 +- server/app/{ => interfaces}/discovery.py | 11 +- server/app/{ => interfaces}/registry.py | 4 +- server/app/{ => interfaces}/repository.py | 2 +- server/app/server_model/__init__.py | 2 + .../descriptor.py} | 107 +---------------- server/app/server_model/endpoint.py | 110 ++++++++++++++++++ 7 files changed, 126 insertions(+), 116 deletions(-) rename server/app/{ => interfaces}/discovery.py (95%) rename server/app/{ => interfaces}/registry.py (99%) rename server/app/{ => interfaces}/repository.py (99%) create mode 100644 server/app/server_model/__init__.py rename server/app/{server_model.py => server_model/descriptor.py} (60%) create mode 100644 server/app/server_model/endpoint.py diff --git a/server/app/http_api_helpers.py b/server/app/http_api_helpers.py index 1f6f96770..5d7f882b8 100644 --- a/server/app/http_api_helpers.py +++ b/server/app/http_api_helpers.py @@ -20,14 +20,14 @@ from werkzeug.exceptions import BadRequest, UnprocessableEntity from werkzeug.wrappers import Request, Response -import server.app.server_model as server_model from basyx.aas import model from basyx.aas.adapter._generic import XML_NS_MAP from basyx.aas.adapter.xml import xml_serialization, XMLConstructables -from server.app.adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element -from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder +import server_model +from .adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element +from .adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder from typing import Callable, Dict, List, Optional, Type, TypeVar, Union diff --git a/server/app/discovery.py b/server/app/interfaces/discovery.py similarity index 95% rename from server/app/discovery.py rename to server/app/interfaces/discovery.py index 524b3c123..e05c5d3d5 100644 --- a/server/app/discovery.py +++ b/server/app/interfaces/discovery.py @@ -2,13 +2,14 @@ import werkzeug.exceptions from werkzeug.wrappers import Request, Response -import server.app.server_model from basyx.aas import model -from server.app import server_model -from server.app.adapter.jsonization import ServerAASToJsonEncoder -from .http_api_helpers import APIResponse, http_exception_to_response, get_response_type, HTTPApiDecoder + +from .. import server_model +from ..adapter.jsonization import ServerAASToJsonEncoder +from ..http_api_helpers import APIResponse, http_exception_to_response, get_response_type, HTTPApiDecoder + from werkzeug.routing import MapAdapter, Rule, Submount -from .http_api_helpers import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request +from ..http_api_helpers import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple, Set import abc diff --git a/server/app/registry.py b/server/app/interfaces/registry.py similarity index 99% rename from server/app/registry.py rename to server/app/interfaces/registry.py index e2b47bc3d..1202798c2 100644 --- a/server/app/registry.py +++ b/server/app/interfaces/registry.py @@ -21,8 +21,8 @@ from basyx.aas import model import server.app.server_model as server_model -from .http_api_helpers import APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder -from .http_api_helpers import Base64URLConverter +from ..http_api_helpers import APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder +from ..http_api_helpers import Base64URLConverter from typing import Dict, Iterable, Iterator, List, Type, TypeVar, Tuple diff --git a/server/app/repository.py b/server/app/interfaces/repository.py similarity index 99% rename from server/app/repository.py rename to server/app/interfaces/repository.py index 4ba8797d9..aec462301 100644 --- a/server/app/repository.py +++ b/server/app/interfaces/repository.py @@ -42,7 +42,7 @@ from basyx.aas import model from basyx.aas.adapter import aasx -from server.app.http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, get_response_type, \ +from ..http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, get_response_type, \ http_exception_to_response, APIResponse, is_stripped_request diff --git a/server/app/server_model/__init__.py b/server/app/server_model/__init__.py new file mode 100644 index 000000000..5712f4a27 --- /dev/null +++ b/server/app/server_model/__init__.py @@ -0,0 +1,2 @@ +from .endpoint import * +from .descriptor import * diff --git a/server/app/server_model.py b/server/app/server_model/descriptor.py similarity index 60% rename from server/app/server_model.py rename to server/app/server_model/descriptor.py index 65fde1161..40d2d59d7 100644 --- a/server/app/server_model.py +++ b/server/app/server_model/descriptor.py @@ -1,113 +1,10 @@ from __future__ import absolute_import import abc -import re -from enum import Enum +from typing import Optional, Iterable, List -from typing import Optional, List, Iterable - -import server.app from basyx.aas.model import base, NamespaceSet - - -class AssetLink: - def __init__(self, name: base.LabelType, value: base.Identifier): - if not name: - raise ValueError("AssetLink 'name' must be a non-empty string.") - if not value: - raise ValueError("AssetLink 'value' must be a non-empty string.") - self.name = name - self.value = value - - -class SecurityTypeEnum(Enum): - NONE = "NONE" - RFC_TLSA = "RFC_TLSA" - W3C_DID = "W3C_DID" - - -class SecurityAttributeObject: - def __init__(self, type_: SecurityTypeEnum, key: str, value: str): - - if not isinstance(type_, SecurityTypeEnum): - raise ValueError(f"Invalid security type: {type_}. Must be one of {list(SecurityTypeEnum)}") - if not key or not isinstance(key, str): - raise ValueError("Key must be a non-empty string.") - if not value or not isinstance(value, str): - raise ValueError("Value must be a non-empty string.") - self.type = type_ - self.key = key - self.value = value - - -class ProtocolInformation: - - def __init__( - self, - href: str, - endpoint_protocol: Optional[str] = None, - endpoint_protocol_version: Optional[List[str]] = None, - subprotocol: Optional[str] = None, - subprotocol_body: Optional[str] = None, - subprotocol_body_encoding: Optional[str] = None, - security_attributes: Optional[List[SecurityAttributeObject]] = None - ): - if not href or not isinstance(href, str): - raise ValueError("href must be a non-empty string representing a valid URL.") - - self.href = href - self.endpoint_protocol = endpoint_protocol - self.endpoint_protocol_version = endpoint_protocol_version or [] - self.subprotocol = subprotocol - self.subprotocol_body = subprotocol_body - self.subprotocol_body_encoding = subprotocol_body_encoding - self.security_attributes = security_attributes or [] - - -class Endpoint: - INTERFACE_SHORTNAMES = { - "AAS", "SUBMODEL", "SERIALIZE", "AASX-FILE", "AAS-REGISTRY", - "SUBMODEL-REGISTRY", "AAS-REPOSITORY", "SUBMODEL-REPOSITORY", - "CD-REPOSITORY", "AAS-DISCOVERY" - } - VERSION_PATTERN = re.compile(r"^\d+(\.\d+)*$") - - def __init__(self, interface: base.NameType, protocol_information: ProtocolInformation): # noqa: E501 - - self.interface = interface - self.protocol_information = protocol_information - - @property - def interface(self) -> str: - return self._interface - - @interface.setter - def interface(self, interface: base.NameType): - if interface is None: - raise ValueError("Invalid value for `interface`, must not be `None`") - if not self.is_valid_interface(interface): - raise ValueError(f"Invalid interface format: {interface}. Expected format: '-', ") - - self._interface = interface - - @classmethod - def is_valid_interface(cls, interface: base.NameType) -> bool: - parts = interface.split("-", 1) - if len(parts) != 2: - return False - short_name, version = parts - return short_name in cls.INTERFACE_SHORTNAMES and cls.VERSION_PATTERN.match(version) - - @property - def protocol_information(self) -> ProtocolInformation: - return self._protocol_information - - @protocol_information.setter - def protocol_information(self, protocol_information: ProtocolInformation): - if protocol_information is None: - raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 - - self._protocol_information = protocol_information +from . import Endpoint class Descriptor(metaclass=abc.ABCMeta): diff --git a/server/app/server_model/endpoint.py b/server/app/server_model/endpoint.py new file mode 100644 index 000000000..578e298c6 --- /dev/null +++ b/server/app/server_model/endpoint.py @@ -0,0 +1,110 @@ +from __future__ import absolute_import + +import re +from enum import Enum + +from typing import Optional, List + +from basyx.aas.model import base + + +class AssetLink: + def __init__(self, name: base.LabelType, value: base.Identifier): + if not name: + raise ValueError("AssetLink 'name' must be a non-empty string.") + if not value: + raise ValueError("AssetLink 'value' must be a non-empty string.") + self.name = name + self.value = value + + +class SecurityTypeEnum(Enum): + NONE = "NONE" + RFC_TLSA = "RFC_TLSA" + W3C_DID = "W3C_DID" + + +class SecurityAttributeObject: + def __init__(self, type_: SecurityTypeEnum, key: str, value: str): + + if not isinstance(type_, SecurityTypeEnum): + raise ValueError(f"Invalid security type: {type_}. Must be one of {list(SecurityTypeEnum)}") + if not key or not isinstance(key, str): + raise ValueError("Key must be a non-empty string.") + if not value or not isinstance(value, str): + raise ValueError("Value must be a non-empty string.") + self.type = type_ + self.key = key + self.value = value + + +class ProtocolInformation: + + def __init__( + self, + href: str, + endpoint_protocol: Optional[str] = None, + endpoint_protocol_version: Optional[List[str]] = None, + subprotocol: Optional[str] = None, + subprotocol_body: Optional[str] = None, + subprotocol_body_encoding: Optional[str] = None, + security_attributes: Optional[List[SecurityAttributeObject]] = None + ): + if not href or not isinstance(href, str): + raise ValueError("href must be a non-empty string representing a valid URL.") + + self.href = href + self.endpoint_protocol = endpoint_protocol + self.endpoint_protocol_version = endpoint_protocol_version or [] + self.subprotocol = subprotocol + self.subprotocol_body = subprotocol_body + self.subprotocol_body_encoding = subprotocol_body_encoding + self.security_attributes = security_attributes or [] + + +class Endpoint: + INTERFACE_SHORTNAMES = { + "AAS", "SUBMODEL", "SERIALIZE", "AASX-FILE", "AAS-REGISTRY", + "SUBMODEL-REGISTRY", "AAS-REPOSITORY", "SUBMODEL-REPOSITORY", + "CD-REPOSITORY", "AAS-DISCOVERY" + } + VERSION_PATTERN = re.compile(r"^\d+(\.\d+)*$") + + def __init__(self, interface: base.NameType, protocol_information: ProtocolInformation): # noqa: E501 + + self.interface = interface + self.protocol_information = protocol_information + + @property + def interface(self) -> str: + return self._interface + + @interface.setter + def interface(self, interface: base.NameType): + if interface is None: + raise ValueError("Invalid value for `interface`, must not be `None`") + if not self.is_valid_interface(interface): + raise ValueError(f"Invalid interface format: {interface}. Expected format: '-', ") + + self._interface = interface + + @classmethod + def is_valid_interface(cls, interface: base.NameType) -> bool: + parts = interface.split("-", 1) + if len(parts) != 2: + return False + short_name, version = parts + return short_name in cls.INTERFACE_SHORTNAMES and cls.VERSION_PATTERN.match(version) + + @property + def protocol_information(self) -> ProtocolInformation: + return self._protocol_information + + @protocol_information.setter + def protocol_information(self, protocol_information: ProtocolInformation): + if protocol_information is None: + raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 + + self._protocol_information = protocol_information + + From eba1d89600fd8b268d1b65c69b97556c10f98d5f Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 11:02:25 +0200 Subject: [PATCH 17/52] Refactor `result_to_xml` and `message_to_xml` --- server/app/http_api_helpers.py | 64 +++++++++++++++++----------------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/server/app/http_api_helpers.py b/server/app/http_api_helpers.py index 5d7f882b8..847a6fb63 100644 --- a/server/app/http_api_helpers.py +++ b/server/app/http_api_helpers.py @@ -138,7 +138,7 @@ def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> if cursor is not None: root_elem.set("cursor", str(cursor)) if isinstance(obj, Result): - result_elem = result_to_xml(obj, **XML_NS_MAP) + result_elem = self.result_to_xml(obj, **XML_NS_MAP) for child in result_elem: root_elem.append(child) elif isinstance(obj, list): @@ -153,43 +153,43 @@ def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") return xml_str # type: ignore[return-value] + @classmethod + def result_to_xml(cls, result: Result, **kwargs) -> etree._Element: + result_elem = etree.Element("result", **kwargs) + success_elem = etree.Element("success") + success_elem.text = xml_serialization.boolean_to_xml(result.success) + messages_elem = etree.Element("messages") + for message in result.messages: + messages_elem.append(cls.message_to_xml(message)) + + result_elem.append(success_elem) + result_elem.append(messages_elem) + return result_elem + + @classmethod + def message_to_xml(cls, message: Message) -> etree._Element: + message_elem = etree.Element("message") + message_type_elem = etree.Element("messageType") + message_type_elem.text = str(message.message_type) + text_elem = etree.Element("text") + text_elem.text = message.text + code_elem = etree.Element("code") + code_elem.text = message.code + timestamp_elem = etree.Element("timestamp") + timestamp_elem.text = message.timestamp.isoformat() + + message_elem.append(message_type_elem) + message_elem.append(text_elem) + message_elem.append(code_elem) + message_elem.append(timestamp_elem) + return message_elem + class XmlResponseAlt(XmlResponse): def __init__(self, *args, content_type="text/xml", **kwargs): super().__init__(*args, **kwargs, content_type=content_type) -def result_to_xml(result: Result, **kwargs) -> etree._Element: - result_elem = etree.Element("result", **kwargs) - success_elem = etree.Element("success") - success_elem.text = xml_serialization.boolean_to_xml(result.success) - messages_elem = etree.Element("messages") - for message in result.messages: - messages_elem.append(message_to_xml(message)) - - result_elem.append(success_elem) - result_elem.append(messages_elem) - return result_elem - - -def message_to_xml(message: Message) -> etree._Element: - message_elem = etree.Element("message") - message_type_elem = etree.Element("messageType") - message_type_elem.text = str(message.message_type) - text_elem = etree.Element("text") - text_elem.text = message.text - code_elem = etree.Element("code") - code_elem.text = message.code - timestamp_elem = etree.Element("timestamp") - timestamp_elem.text = message.timestamp.isoformat() - - message_elem.append(message_type_elem) - message_elem.append(text_elem) - message_elem.append(code_elem) - message_elem.append(timestamp_elem) - return message_elem - - def get_response_type(request: Request) -> Type[APIResponse]: response_types: Dict[str, Type[APIResponse]] = { "application/json": JsonResponse, From 4acab0d36ed655276c1ce9debe7af1fd3a63d205 Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 11:17:24 +0200 Subject: [PATCH 18/52] Move all response related to `response.py` All response related from http_api_helpers.py was moved to `response.py` --- server/app/http_api_helpers.py | 201 +--------------------------- server/app/interfaces/discovery.py | 7 +- server/app/interfaces/registry.py | 4 +- server/app/interfaces/repository.py | 4 +- server/app/response.py | 201 ++++++++++++++++++++++++++++ 5 files changed, 213 insertions(+), 204 deletions(-) create mode 100644 server/app/response.py diff --git a/server/app/http_api_helpers.py b/server/app/http_api_helpers.py index 847a6fb63..e15ba632a 100644 --- a/server/app/http_api_helpers.py +++ b/server/app/http_api_helpers.py @@ -4,11 +4,8 @@ # the LICENSE file of this project. # # SPDX-License-Identifier: MIT -import abc import base64 import binascii -import datetime -import enum import io import json @@ -18,206 +15,17 @@ import werkzeug.urls import werkzeug.utils from werkzeug.exceptions import BadRequest, UnprocessableEntity -from werkzeug.wrappers import Request, Response +from werkzeug.wrappers import Request from basyx.aas import model -from basyx.aas.adapter._generic import XML_NS_MAP -from basyx.aas.adapter.xml import xml_serialization, XMLConstructables +from basyx.aas.adapter.xml import XMLConstructables import server_model from .adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element -from .adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder +from .adapter.jsonization import ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder -from typing import Callable, Dict, List, Optional, Type, TypeVar, Union - -@enum.unique -class MessageType(enum.Enum): - UNDEFINED = enum.auto() - INFO = enum.auto() - WARNING = enum.auto() - ERROR = enum.auto() - EXCEPTION = enum.auto() - - def __str__(self): - return self.name.capitalize() - - -class Message: - def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, - timestamp: Optional[datetime.datetime] = None): - self.code: str = code - self.text: str = text - self.message_type: MessageType = message_type - self.timestamp: datetime.datetime = timestamp if timestamp is not None \ - else datetime.datetime.now(datetime.timezone.utc) - - -class Result: - def __init__(self, success: bool, messages: Optional[List[Message]] = None): - if messages is None: - messages = [] - self.success: bool = success - self.messages: List[Message] = messages - - -class ResultToJsonEncoder(ServerAASToJsonEncoder): - @classmethod - def _result_to_json(cls, result: Result) -> Dict[str, object]: - return { - "success": result.success, - "messages": result.messages - } - - @classmethod - def _message_to_json(cls, message: Message) -> Dict[str, object]: - return { - "messageType": message.message_type, - "text": message.text, - "code": message.code, - "timestamp": message.timestamp.isoformat() - } - - def default(self, obj: object) -> object: - if isinstance(obj, Result): - return self._result_to_json(obj) - if isinstance(obj, Message): - return self._message_to_json(obj) - if isinstance(obj, MessageType): - return str(obj) - return super().default(obj) - - -class StrippedResultToJsonEncoder(ResultToJsonEncoder): - stripped = True - - -ResponseData = Union[Result, object, List[object]] - - -class APIResponse(abc.ABC, Response): - @abc.abstractmethod - def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, - stripped: bool = False, *args, **kwargs): - super().__init__(*args, **kwargs) - if obj is None: - self.status_code = 204 - else: - self.data = self.serialize(obj, cursor, stripped) - - @abc.abstractmethod - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - pass - - -class JsonResponse(APIResponse): - def __init__(self, *args, content_type="application/json", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - if cursor is None: - data = obj - else: - data = { - "paging_metadata": {"cursor": str(cursor)}, - "result": obj - } - return json.dumps( - data, - cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, - separators=(",", ":") - ) - - -class XmlResponse(APIResponse): - def __init__(self, *args, content_type="application/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - root_elem = etree.Element("response", nsmap=XML_NS_MAP) - if cursor is not None: - root_elem.set("cursor", str(cursor)) - if isinstance(obj, Result): - result_elem = self.result_to_xml(obj, **XML_NS_MAP) - for child in result_elem: - root_elem.append(child) - elif isinstance(obj, list): - for item in obj: - item_elem = xml_serialization.object_to_xml_element(item) - root_elem.append(item_elem) - else: - obj_elem = xml_serialization.object_to_xml_element(obj) - for child in obj_elem: - root_elem.append(child) - etree.cleanup_namespaces(root_elem) - xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") - return xml_str # type: ignore[return-value] - - @classmethod - def result_to_xml(cls, result: Result, **kwargs) -> etree._Element: - result_elem = etree.Element("result", **kwargs) - success_elem = etree.Element("success") - success_elem.text = xml_serialization.boolean_to_xml(result.success) - messages_elem = etree.Element("messages") - for message in result.messages: - messages_elem.append(cls.message_to_xml(message)) - - result_elem.append(success_elem) - result_elem.append(messages_elem) - return result_elem - - @classmethod - def message_to_xml(cls, message: Message) -> etree._Element: - message_elem = etree.Element("message") - message_type_elem = etree.Element("messageType") - message_type_elem.text = str(message.message_type) - text_elem = etree.Element("text") - text_elem.text = message.text - code_elem = etree.Element("code") - code_elem.text = message.code - timestamp_elem = etree.Element("timestamp") - timestamp_elem.text = message.timestamp.isoformat() - - message_elem.append(message_type_elem) - message_elem.append(text_elem) - message_elem.append(code_elem) - message_elem.append(timestamp_elem) - return message_elem - - -class XmlResponseAlt(XmlResponse): - def __init__(self, *args, content_type="text/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - -def get_response_type(request: Request) -> Type[APIResponse]: - response_types: Dict[str, Type[APIResponse]] = { - "application/json": JsonResponse, - "application/xml": XmlResponse, - "text/xml": XmlResponseAlt - } - if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): - return JsonResponse - mime_type = request.accept_mimetypes.best_match(response_types) - if mime_type is None: - raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " - + ", ".join(response_types.keys())) - return response_types[mime_type] - - -def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ - -> APIResponse: - headers = exception.get_headers() - location = exception.get_response().location - if location is not None: - headers.append(("Location", location)) - if exception.code and exception.code >= 400: - message = Message(type(exception).__name__, exception.description if exception.description is not None else "", - MessageType.ERROR) - result = Result(False, [message]) - else: - result = Result(False) - return response_type(result, status=exception.code, headers=headers) +from typing import Callable, List, Optional, Type, TypeVar, Union def is_stripped_request(request: Request) -> bool: @@ -406,6 +214,7 @@ def _convert_single_json_item(cls, data: any, expect_type: Type[T], stripped: bo json_bytes = json.dumps(data).encode("utf-8") return cls.json(json_bytes, expect_type, stripped) + class Base64URLConverter(werkzeug.routing.UnicodeConverter): def to_url(self, value: model.Identifier) -> str: diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index e05c5d3d5..d1ee6197b 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -6,15 +6,14 @@ from .. import server_model from ..adapter.jsonization import ServerAASToJsonEncoder -from ..http_api_helpers import APIResponse, http_exception_to_response, get_response_type, HTTPApiDecoder from werkzeug.routing import MapAdapter, Rule, Submount -from ..http_api_helpers import Base64URLConverter, APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder, get_response_type, http_exception_to_response, is_stripped_request -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple, Set +from ..http_api_helpers import Base64URLConverter, HTTPApiDecoder +from ..response import get_response_type, http_exception_to_response +from typing import Dict, Iterable, List, TypeVar, Set import abc -import copy from pymongo import MongoClient from pymongo.collection import Collection diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index 1202798c2..0181faa1b 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -21,8 +21,8 @@ from basyx.aas import model import server.app.server_model as server_model -from ..http_api_helpers import APIResponse, XmlResponse, JsonResponse, XmlResponseAlt, Message, MessageType, Result, HTTPApiDecoder -from ..http_api_helpers import Base64URLConverter +from ..http_api_helpers import HTTPApiDecoder, Base64URLConverter +from server.app.response import APIResponse, JsonResponse, XmlResponse, XmlResponseAlt, Result, MessageType, Message from typing import Dict, Iterable, Iterator, List, Type, TypeVar, Tuple diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index aec462301..5042bc96e 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -42,8 +42,8 @@ from basyx.aas import model from basyx.aas.adapter import aasx -from ..http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, get_response_type, \ - http_exception_to_response, APIResponse, is_stripped_request +from ..http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, is_stripped_request +from server.app.response import APIResponse, get_response_type, http_exception_to_response class WSGIApp: diff --git a/server/app/response.py b/server/app/response.py new file mode 100644 index 000000000..2d8115827 --- /dev/null +++ b/server/app/response.py @@ -0,0 +1,201 @@ +import abc +import datetime +import enum +import json +from typing import Union, List, Optional, Type, Dict + +import werkzeug.exceptions +from lxml import etree +from werkzeug import Response, Request + +from basyx.aas.adapter._generic import XML_NS_MAP +from basyx.aas.adapter.xml import xml_serialization +from server.app.adapter.jsonization import ServerAASToJsonEncoder + + +@enum.unique +class MessageType(enum.Enum): + UNDEFINED = enum.auto() + INFO = enum.auto() + WARNING = enum.auto() + ERROR = enum.auto() + EXCEPTION = enum.auto() + + def __str__(self): + return self.name.capitalize() + + +class Message: + def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, + timestamp: Optional[datetime.datetime] = None): + self.code: str = code + self.text: str = text + self.message_type: MessageType = message_type + self.timestamp: datetime.datetime = timestamp if timestamp is not None \ + else datetime.datetime.now(datetime.timezone.utc) + + +class Result: + def __init__(self, success: bool, messages: Optional[List[Message]] = None): + if messages is None: + messages = [] + self.success: bool = success + self.messages: List[Message] = messages + + +ResponseData = Union[Result, object, List[object]] + + +class APIResponse(abc.ABC, Response): + @abc.abstractmethod + def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, + stripped: bool = False, *args, **kwargs): + super().__init__(*args, **kwargs) + if obj is None: + self.status_code = 204 + else: + self.data = self.serialize(obj, cursor, stripped) + + @abc.abstractmethod + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + pass + + +class JsonResponse(APIResponse): + def __init__(self, *args, content_type="application/json", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + if cursor is None: + data = obj + else: + data = { + "paging_metadata": {"cursor": str(cursor)}, + "result": obj + } + return json.dumps( + data, + cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, + separators=(",", ":") + ) + + +class XmlResponse(APIResponse): + def __init__(self, *args, content_type="application/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + root_elem = etree.Element("response", nsmap=XML_NS_MAP) + if cursor is not None: + root_elem.set("cursor", str(cursor)) + if isinstance(obj, Result): + result_elem = self.result_to_xml(obj, **XML_NS_MAP) + for child in result_elem: + root_elem.append(child) + elif isinstance(obj, list): + for item in obj: + item_elem = xml_serialization.object_to_xml_element(item) + root_elem.append(item_elem) + else: + obj_elem = xml_serialization.object_to_xml_element(obj) + for child in obj_elem: + root_elem.append(child) + etree.cleanup_namespaces(root_elem) + xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") + return xml_str # type: ignore[return-value] + + @classmethod + def result_to_xml(cls, result: Result, **kwargs) -> etree._Element: + result_elem = etree.Element("result", **kwargs) + success_elem = etree.Element("success") + success_elem.text = xml_serialization.boolean_to_xml(result.success) + messages_elem = etree.Element("messages") + for message in result.messages: + messages_elem.append(cls.message_to_xml(message)) + + result_elem.append(success_elem) + result_elem.append(messages_elem) + return result_elem + + @classmethod + def message_to_xml(cls, message: Message) -> etree._Element: + message_elem = etree.Element("message") + message_type_elem = etree.Element("messageType") + message_type_elem.text = str(message.message_type) + text_elem = etree.Element("text") + text_elem.text = message.text + code_elem = etree.Element("code") + code_elem.text = message.code + timestamp_elem = etree.Element("timestamp") + timestamp_elem.text = message.timestamp.isoformat() + + message_elem.append(message_type_elem) + message_elem.append(text_elem) + message_elem.append(code_elem) + message_elem.append(timestamp_elem) + return message_elem + + +class XmlResponseAlt(XmlResponse): + def __init__(self, *args, content_type="text/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + +class ResultToJsonEncoder(ServerAASToJsonEncoder): + @classmethod + def _result_to_json(cls, result: Result) -> Dict[str, object]: + return { + "success": result.success, + "messages": result.messages + } + @classmethod + def _message_to_json(cls, message: Message) -> Dict[str, object]: + return { + "messageType": message.message_type, + "text": message.text, + "code": message.code, + "timestamp": message.timestamp.isoformat() + } + + def default(self, obj: object) -> object: + if isinstance(obj, Result): + return self._result_to_json(obj) + if isinstance(obj, Message): + return self._message_to_json(obj) + if isinstance(obj, MessageType): + return str(obj) + return super().default(obj) + + +class StrippedResultToJsonEncoder(ResultToJsonEncoder): + stripped = True + + +def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ + -> APIResponse: + headers = exception.get_headers() + location = exception.get_response().location + if location is not None: + headers.append(("Location", location)) + if exception.code and exception.code >= 400: + message = Message(type(exception).__name__, exception.description if exception.description is not None else "", + MessageType.ERROR) + result = Result(False, [message]) + else: + result = Result(False) + return response_type(result, status=exception.code, headers=headers) + + +def get_response_type(request: Request) -> Type[APIResponse]: + response_types: Dict[str, Type[APIResponse]] = { + "application/json": JsonResponse, + "application/xml": XmlResponse, + "text/xml": XmlResponseAlt + } + if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): + return JsonResponse + mime_type = request.accept_mimetypes.best_match(response_types) + if mime_type is None: + raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " + + ", ".join(response_types.keys())) + return response_types[mime_type] From 567b5f1fae4f7ea8751a64f23c1e22a5bb819f4b Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 14:59:30 +0200 Subject: [PATCH 19/52] Create base classes for WSGI apps We created parent classes `BaseWSGIApp` and `ObjectStoreWSGIApp` for discovery/registry/repository app classes. Now we can reuse methods defined in parent classes and avoid code duplication. --- server/app/http_api_helpers.py | 5 +- server/app/interfaces/base.py | 71 +++++++++++++++++++++ server/app/interfaces/discovery.py | 47 ++------------ server/app/interfaces/registry.py | 98 +++-------------------------- server/app/interfaces/repository.py | 54 +--------------- 5 files changed, 90 insertions(+), 185 deletions(-) create mode 100644 server/app/interfaces/base.py diff --git a/server/app/http_api_helpers.py b/server/app/http_api_helpers.py index e15ba632a..61ee1fabf 100644 --- a/server/app/http_api_helpers.py +++ b/server/app/http_api_helpers.py @@ -7,6 +7,7 @@ import base64 import binascii import io +import itertools import json from lxml import etree @@ -21,11 +22,11 @@ from basyx.aas.adapter.xml import XMLConstructables -import server_model +from . import server_model from .adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element from .adapter.jsonization import ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder -from typing import Callable, List, Optional, Type, TypeVar, Union +from typing import Callable, List, Optional, Type, TypeVar, Union, Iterable, Tuple, Iterator def is_stripped_request(request: Request) -> bool: diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py new file mode 100644 index 000000000..4198b3f92 --- /dev/null +++ b/server/app/interfaces/base.py @@ -0,0 +1,71 @@ +import itertools +from typing import Iterable, Type, Iterator, Tuple + +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.utils +from werkzeug import Response, Request +from werkzeug.exceptions import NotFound, BadRequest +from werkzeug.routing import MapAdapter + +from basyx.aas import model +from basyx.aas.model import AbstractObjectStore +from ..http_api_helpers import T +from server.app.response import get_response_type, http_exception_to_response + + +class BaseWSGIApp: + url_map: werkzeug.routing.Map + + # TODO: the parameters can be typed via builtin wsgiref with Python 3.11+ + def __call__(self, environ, start_response) -> Iterable[bytes]: + response: Response = self.handle_request(Request(environ)) + return response(environ, start_response) + + @classmethod + def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: + limit_str = request.args.get('limit', default="10") + cursor_str = request.args.get('cursor', default="0") + try: + limit, cursor = int(limit_str), int(cursor_str) + if limit < 0 or cursor < 0: + raise ValueError + except ValueError: + raise BadRequest("Cursor and limit must be positive integers!") + start_index = cursor + end_index = cursor + limit + paginated_slice = itertools.islice(iterator, start_index, end_index) + return paginated_slice, end_index + + def handle_request(self, request: Request): + map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) + try: + response_t = get_response_type(request) + except werkzeug.exceptions.NotAcceptable as e: + return e + + try: + endpoint, values = map_adapter.match() + return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) + + # any raised error that leaves this function will cause a 500 internal server error + # so catch raised http exceptions and return them + except werkzeug.exceptions.HTTPException as e: + return http_exception_to_response(e, response_t) + + +class ObjectStoreWSGIApp(BaseWSGIApp): + object_store: AbstractObjectStore + + def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: + for obj in self.object_store: + if isinstance(obj, type_): + obj.update() + yield obj + + def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: + identifiable = self.object_store.get(identifier) + if not isinstance(identifiable, type_): + raise NotFound(f"No {type_.__name__} with {identifier} found!") + identifiable.update() + return identifiable diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index d1ee6197b..345532511 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -1,16 +1,15 @@ -import itertools import werkzeug.exceptions from werkzeug.wrappers import Request, Response from basyx.aas import model +from server.app.interfaces.base import BaseWSGIApp from .. import server_model from ..adapter.jsonization import ServerAASToJsonEncoder -from werkzeug.routing import MapAdapter, Rule, Submount +from werkzeug.routing import Rule, Submount from ..http_api_helpers import Base64URLConverter, HTTPApiDecoder -from ..response import get_response_type, http_exception_to_response -from typing import Dict, Iterable, List, TypeVar, Set +from typing import Dict, List, Set import abc @@ -133,12 +132,7 @@ def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_identi ) - -T = TypeVar("T") - -BASE64URL_ENCODING = "utf-8" - -class DiscoveryAPI: +class DiscoveryAPI(BaseWSGIApp): def __init__(self, persistent_store: AbstractDiscoveryStore, base_path: str = "/api/v3.0"): self.persistent_store: AbstractDiscoveryStore = persistent_store @@ -159,39 +153,6 @@ def __init__(self, "base64url": Base64URLConverter }, strict_slashes=False) - def __call__(self, environ, start_response) -> Iterable[bytes]: - response: Response = self.handle_request(Request(environ)) - return response(environ, start_response) - - def _get_slice(self, request: Request, iterator): - limit_str = request.args.get('limit', default="10") - cursor_str = request.args.get('cursor', default="0") - try: - limit, cursor = int(limit_str), int(cursor_str) - if limit < 0 or cursor < 0: - raise ValueError - except ValueError: - raise werkzeug.exceptions.BadRequest("Cursor and limit must be positive integers!") - paginated_slice = itertools.islice(iterator, cursor, cursor + limit) - return paginated_slice, cursor + limit - - - def handle_request(self, request: Request): - map_adapter: MapAdapter = self.url_map.bind_to_environ( - request.environ) - try: - response_t = get_response_type(request) - except werkzeug.exceptions.NotAcceptable as e: - return e - try: - endpoint, values = map_adapter.match() - return endpoint(request, values, response_t=response_t, - map_adapter=map_adapter) - # any raised error that leaves this function will cause a 500 internal server error - # so catch raised http exceptions and return them - except werkzeug.exceptions.HTTPException as e: - return http_exception_to_response(e, response_t) - def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: asset_links = HTTPApiDecoder.request_body_list(request, server_model.AssetLink, False) matching_aas_keys = set() diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index 0181faa1b..026a89987 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -8,59 +8,25 @@ This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". """ -import itertools - import werkzeug.exceptions import werkzeug.routing import werkzeug.urls import werkzeug.utils -from werkzeug.exceptions import BadRequest, Conflict, NotFound +from werkzeug.exceptions import Conflict, NotFound from werkzeug.routing import MapAdapter, Rule, Submount from werkzeug.wrappers import Request, Response from basyx.aas import model import server.app.server_model as server_model +from server.app.interfaces.base import ObjectStoreWSGIApp + +from ..http_api_helpers import HTTPApiDecoder, Base64URLConverter, is_stripped_request +from server.app.response import APIResponse + +from typing import Dict, Iterator, List, Type, Tuple -from ..http_api_helpers import HTTPApiDecoder, Base64URLConverter -from server.app.response import APIResponse, JsonResponse, XmlResponse, XmlResponseAlt, Result, MessageType, Message - -from typing import Dict, Iterable, Iterator, List, Type, TypeVar, Tuple - -def get_response_type(request: Request) -> Type[APIResponse]: - response_types: Dict[str, Type[APIResponse]] = { - "application/json": JsonResponse, - "application/xml": XmlResponse, - "text/xml": XmlResponseAlt - } - if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): - return JsonResponse - mime_type = request.accept_mimetypes.best_match(response_types) - if mime_type is None: - raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " - + ", ".join(response_types.keys())) - return response_types[mime_type] - -def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ - -> APIResponse: - headers = exception.get_headers() - location = exception.get_response().location - if location is not None: - headers.append(("Location", location)) - if exception.code and exception.code >= 400: - message = Message(type(exception).__name__, exception.description if exception.description is not None else "", - MessageType.ERROR) - result = Result(False, [message]) - else: - result = Result(False) - return response_type(result, status=exception.code, headers=headers) - -def is_stripped_request(request: Request) -> bool: - return request.args.get("level") == "core" - -T = TypeVar("T") - -BASE64URL_ENCODING = "utf-8" -class RegistryAPI: + +class RegistryAPI(ObjectStoreWSGIApp): def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/api/v3.0"): self.object_store: model.AbstractObjectStore = object_store self.url_map = werkzeug.routing.Map([ @@ -93,36 +59,6 @@ def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/a "base64url": Base64URLConverter }, strict_slashes=False) - def __call__(self, environ, start_response) -> Iterable[bytes]: - response: Response = self.handle_request(Request(environ)) - return response(environ, start_response) - def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: - identifiable = self.object_store.get(identifier) - if not isinstance(identifiable, type_): - raise NotFound(f"No {type_.__name__} with {identifier} found!") - identifiable.update() - return identifiable - - def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: - for obj in self.object_store: - if isinstance(obj, type_): - obj.update() - yield obj - @classmethod - def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: - limit_str = request.args.get('limit', default="10") - cursor_str = request.args.get('cursor', default="0") - try: - limit, cursor = int(limit_str), int(cursor_str) - if limit < 0 or cursor < 0: - raise ValueError - except ValueError: - raise BadRequest("Cursor and limit must be positive integers!") - start_index = cursor - end_index = cursor + limit - paginated_slice = itertools.islice(iterator, start_index, end_index) - return paginated_slice, end_index - def _get_descriptors(self, request: "Request") -> Tuple[Iterator[server_model.AssetAdministrationShellDescriptor], int]: """ Returns all Asset Administration Shell Descriptors @@ -170,22 +106,6 @@ def _get_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_m def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescriptor: return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) - def handle_request(self, request: Request): - map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) - try: - response_t = get_response_type(request) - except werkzeug.exceptions.NotAcceptable as e: - return e - - try: - endpoint, values = map_adapter.match() - return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) - - # any raised error that leaves this function will cause a 500 internal server error - # so catch raised http exceptions and return them - except werkzeug.exceptions.HTTPException as e: - return http_exception_to_response(e, response_t) - # ------ AAS REGISTRY ROUTES ------- def get_aas_descriptors_all(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: aas_descriptors, cursor = self._get_descriptors(request) diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index 5042bc96e..03b072c0b 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -42,11 +42,12 @@ from basyx.aas import model from basyx.aas.adapter import aasx +from .base import ObjectStoreWSGIApp from ..http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, is_stripped_request -from server.app.response import APIResponse, get_response_type, http_exception_to_response +from server.app.response import APIResponse -class WSGIApp: +class WSGIApp(ObjectStoreWSGIApp): def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.AbstractSupplementaryFileContainer, base_path: str = "/api/v3.0"): self.object_store: model.AbstractObjectStore = object_store @@ -158,24 +159,6 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs "id_short_path": IdShortPathConverter }, strict_slashes=False) - # TODO: the parameters can be typed via builtin wsgiref with Python 3.11+ - def __call__(self, environ, start_response) -> Iterable[bytes]: - response: Response = self.handle_request(Request(environ)) - return response(environ, start_response) - - def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: - identifiable = self.object_store.get(identifier) - if not isinstance(identifiable, type_): - raise NotFound(f"No {type_.__name__} with {identifier} found!") - identifiable.update() - return identifiable - - def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: - for obj in self.object_store: - if isinstance(obj, type_): - obj.update() - yield obj - def _resolve_reference(self, reference: model.ModelReference[model.base._RT]) -> model.base._RT: try: return reference.resolve(self.object_store) @@ -238,21 +221,6 @@ def _get_submodel_reference(cls, aas: model.AssetAdministrationShell, submodel_i return ref raise NotFound(f"The AAS {aas!r} doesn't have a submodel reference to {submodel_id!r}!") - @classmethod - def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: - limit_str = request.args.get('limit', default="10") - cursor_str = request.args.get('cursor', default="0") - try: - limit, cursor = int(limit_str), int(cursor_str) - if limit < 0 or cursor < 0: - raise ValueError - except ValueError: - raise BadRequest("Cursor and limit must be positive integers!") - start_index = cursor - end_index = cursor + limit - paginated_slice = itertools.islice(iterator, start_index, end_index) - return paginated_slice, end_index - def _get_shells(self, request: Request) -> Tuple[Iterator[model.AssetAdministrationShell], int]: aas: Iterator[model.AssetAdministrationShell] = self._get_all_obj_of_type(model.AssetAdministrationShell) @@ -307,22 +275,6 @@ def _get_submodel_submodel_elements_id_short_path(self, url_args: Dict) -> model def _get_concept_description(self, url_args): return self._get_obj_ts(url_args["concept_id"], model.ConceptDescription) - def handle_request(self, request: Request): - map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) - try: - response_t = get_response_type(request) - except werkzeug.exceptions.NotAcceptable as e: - return e - - try: - endpoint, values = map_adapter.match() - return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) - - # any raised error that leaves this function will cause a 500 internal server error - # so catch raised http exceptions and return them - except werkzeug.exceptions.HTTPException as e: - return http_exception_to_response(e, response_t) - # ------ all not implemented ROUTES ------- def not_implemented(self, request: Request, url_args: Dict, **_kwargs) -> Response: raise werkzeug.exceptions.NotImplemented("This route is not implemented!") From 3d15b516640e13a8fd6e6db4bd189909b569864e Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 15:11:38 +0200 Subject: [PATCH 20/52] Refactor `http_api_helpers.py` and `response.py` --- server/app/api_utils/__init__.py | 0 server/app/{ => api_utils}/http_api_helpers.py | 6 +++--- server/app/{ => api_utils}/response.py | 0 server/app/interfaces/base.py | 4 ++-- server/app/interfaces/discovery.py | 2 +- server/app/interfaces/registry.py | 4 ++-- server/app/interfaces/repository.py | 7 +++---- 7 files changed, 11 insertions(+), 12 deletions(-) create mode 100644 server/app/api_utils/__init__.py rename server/app/{ => api_utils}/http_api_helpers.py (97%) rename server/app/{ => api_utils}/response.py (100%) diff --git a/server/app/api_utils/__init__.py b/server/app/api_utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/http_api_helpers.py b/server/app/api_utils/http_api_helpers.py similarity index 97% rename from server/app/http_api_helpers.py rename to server/app/api_utils/http_api_helpers.py index 61ee1fabf..f01d46168 100644 --- a/server/app/http_api_helpers.py +++ b/server/app/api_utils/http_api_helpers.py @@ -22,9 +22,9 @@ from basyx.aas.adapter.xml import XMLConstructables -from . import server_model -from .adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element -from .adapter.jsonization import ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder +from server.app import server_model +from server.app.adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element +from server.app.adapter.jsonization import ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder from typing import Callable, List, Optional, Type, TypeVar, Union, Iterable, Tuple, Iterator diff --git a/server/app/response.py b/server/app/api_utils/response.py similarity index 100% rename from server/app/response.py rename to server/app/api_utils/response.py diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index 4198b3f92..88f244664 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -10,8 +10,8 @@ from basyx.aas import model from basyx.aas.model import AbstractObjectStore -from ..http_api_helpers import T -from server.app.response import get_response_type, http_exception_to_response +from server.app.api_utils.http_api_helpers import T +from server.app.api_utils.response import get_response_type, http_exception_to_response class BaseWSGIApp: diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index 345532511..7749de5ae 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -8,7 +8,7 @@ from ..adapter.jsonization import ServerAASToJsonEncoder from werkzeug.routing import Rule, Submount -from ..http_api_helpers import Base64URLConverter, HTTPApiDecoder +from server.app.api_utils.http_api_helpers import Base64URLConverter, HTTPApiDecoder from typing import Dict, List, Set import abc diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index 026a89987..f3ce6c177 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -20,8 +20,8 @@ import server.app.server_model as server_model from server.app.interfaces.base import ObjectStoreWSGIApp -from ..http_api_helpers import HTTPApiDecoder, Base64URLConverter, is_stripped_request -from server.app.response import APIResponse +from server.app.api_utils.http_api_helpers import HTTPApiDecoder, Base64URLConverter, is_stripped_request +from server.app.api_utils.response import APIResponse from typing import Dict, Iterator, List, Type, Tuple diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index 03b072c0b..040e6c859 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -29,8 +29,7 @@ """ import io -import itertools -from typing import Iterable, Type, Iterator, List, Dict, Union, Callable, Tuple, Optional +from typing import Type, Iterator, List, Dict, Union, Callable, Tuple, Optional import werkzeug.exceptions import werkzeug.routing @@ -43,8 +42,8 @@ from basyx.aas import model from basyx.aas.adapter import aasx from .base import ObjectStoreWSGIApp -from ..http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, is_stripped_request -from server.app.response import APIResponse +from server.app.api_utils.http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, is_stripped_request +from server.app.api_utils.response import APIResponse class WSGIApp(ObjectStoreWSGIApp): From cb107ed7ab9a98c72db1965f015cb65e359ba616 Mon Sep 17 00:00:00 2001 From: zrgt Date: Wed, 16 Apr 2025 15:18:21 +0200 Subject: [PATCH 21/52] Reformat code with PyCharm --- server/app/adapter/jsonization.py | 18 +++--- server/app/api_utils/http_api_helpers.py | 7 ++- server/app/api_utils/response.py | 1 + server/app/interfaces/discovery.py | 27 ++++---- server/app/interfaces/registry.py | 67 ++++++++++++-------- server/app/interfaces/repository.py | 78 ++++++++++++++++-------- server/app/server_model/__init__.py | 2 +- server/app/server_model/endpoint.py | 3 - 8 files changed, 123 insertions(+), 80 deletions(-) diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index b6ae1635c..6999302ea 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -1,4 +1,4 @@ -from typing import Dict, Set, Optional, Type, List +from typing import Dict, Set, Optional, Type import server.app.server_model as server_model from basyx.aas import model @@ -11,7 +11,6 @@ import logging from typing import Callable - logger = logging.getLogger(__name__) JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES + ( @@ -138,7 +137,8 @@ def _construct_endpoint(cls, dct: Dict[str, object], @classmethod def _construct_submodel_descriptor( - cls, dct: Dict[str, object], object_class=server_model.SubmodelDescriptor) -> server_model.SubmodelDescriptor: + cls, dct: Dict[str, object], + object_class=server_model.SubmodelDescriptor) -> server_model.SubmodelDescriptor: ret = object_class(id_=_get_ts(dct, 'id', str), endpoints=[]) cls._amend_abstract_attributes(ret, dct) @@ -210,9 +210,10 @@ class ServerStrictStrippedAASFromJsonDecoder(ServerStrictAASFromJsonDecoder, Ser pass -def read_server_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, - ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, - decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: +def read_server_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, + replace_existing: bool = False, + ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, + decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: return read_aas_json_file_into(object_store=object_store, file=file, replace_existing=replace_existing, ignore_existing=ignore_existing, failsafe=failsafe, stripped=stripped, decoder=decoder, keys_to_types=JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES) @@ -245,7 +246,8 @@ def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: return data @classmethod - def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> Dict[str, object]: + def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> \ + Dict[str, object]: """ serialization of an object from class AssetAdministrationShell to json @@ -296,7 +298,7 @@ def _protocol_information_to_json(cls, def _endpoint_to_json(cls, obj: server_model.Endpoint) -> Dict[str, object]: data = cls._abstract_classes_to_json(obj) data['protocolInformation'] = cls._protocol_information_to_json( - obj.protocol_information) + obj.protocol_information) data['interface'] = obj.interface return data diff --git a/server/app/api_utils/http_api_helpers.py b/server/app/api_utils/http_api_helpers.py index f01d46168..a81f123da 100644 --- a/server/app/api_utils/http_api_helpers.py +++ b/server/app/api_utils/http_api_helpers.py @@ -7,7 +7,6 @@ import base64 import binascii import io -import itertools import json from lxml import etree @@ -26,7 +25,7 @@ from server.app.adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element from server.app.adapter.jsonization import ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder -from typing import Callable, List, Optional, Type, TypeVar, Union, Iterable, Tuple, Iterator +from typing import Callable, List, Optional, Type, TypeVar, Union def is_stripped_request(request: Request) -> bool: @@ -108,7 +107,8 @@ def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool model.SpecificAssetId: decoder._construct_specific_asset_id, # type: ignore[assignment] model.Reference: decoder._construct_reference, # type: ignore[assignment] model.Qualifier: decoder._construct_qualifier, # type: ignore[assignment] - server_model.AssetAdministrationShellDescriptor: decoder._construct_asset_administration_shell_descriptor, # type: ignore[assignment] + server_model.AssetAdministrationShellDescriptor: + decoder._construct_asset_administration_shell_descriptor, # type: ignore[assignment] server_model.SubmodelDescriptor: decoder._construct_submodel_descriptor, # type: ignore[assignment] server_model.AssetLink: decoder._construct_asset_link, # type: ignore[assignment] } @@ -178,6 +178,7 @@ def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> if request.mimetype == "application/json": return cls.json(request.get_data(), expect_type, stripped) return cls.xml(request.get_data(), expect_type, stripped) + @classmethod def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: """ diff --git a/server/app/api_utils/response.py b/server/app/api_utils/response.py index 2d8115827..73c16887d 100644 --- a/server/app/api_utils/response.py +++ b/server/app/api_utils/response.py @@ -148,6 +148,7 @@ def _result_to_json(cls, result: Result) -> Dict[str, object]: "success": result.success, "messages": result.messages } + @classmethod def _message_to_json(cls, message: Message) -> Dict[str, object]: return { diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index 7749de5ae..d96a224e5 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -1,33 +1,32 @@ +import abc +import json +from typing import Dict, List, Set + import werkzeug.exceptions +from pymongo import MongoClient +from pymongo.collection import Collection +from werkzeug.routing import Rule, Submount from werkzeug.wrappers import Request, Response from basyx.aas import model +from server.app.api_utils.http_api_helpers import Base64URLConverter, HTTPApiDecoder from server.app.interfaces.base import BaseWSGIApp - from .. import server_model from ..adapter.jsonization import ServerAASToJsonEncoder -from werkzeug.routing import Rule, Submount -from server.app.api_utils.http_api_helpers import Base64URLConverter, HTTPApiDecoder -from typing import Dict, List, Set - -import abc - -from pymongo import MongoClient -from pymongo.collection import Collection - -import json def specific_asset_to_json_obj(asset_id: model.SpecificAssetId) -> dict: # Encode the asset to a JSON string and then decode to a dict. json_str = ServerAASToJsonEncoder().encode(asset_id) return json.loads(json_str) + class AbstractDiscoveryStore(metaclass=abc.ABCMeta): @abc.abstractmethod def __init__(self): pass + class InMemoryDiscoveryStore(AbstractDiscoveryStore): def __init__(self): self.aas_to_assets: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} @@ -74,6 +73,7 @@ def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_identi if asset_key in self.asset_to_aas: self.asset_to_aas[asset_key].discard(aas_key) + class MongoDiscoveryStore(AbstractDiscoveryStore): def __init__(self, uri: str = "mongodb://localhost:27017", @@ -153,7 +153,8 @@ def __init__(self, "base64url": Base64URLConverter }, strict_slashes=False) - def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, response_t: type, + **_kwargs) -> Response: asset_links = HTTPApiDecoder.request_body_list(request, server_model.AssetLink, False) matching_aas_keys = set() for asset_link in asset_links: @@ -184,7 +185,9 @@ def delete_all_asset_links_by_id(self, request: Request, url_args: dict, respons self.persistent_store.asset_to_aas[key].discard(aas_identifier) return response_t() + if __name__ == "__main__": from werkzeug.serving import run_simple + run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), use_debugger=True, use_reloader=True) diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index f3ce6c177..938b8e3a0 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -8,6 +8,8 @@ This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". """ +from typing import Dict, Iterator, List, Type, Tuple + import werkzeug.exceptions import werkzeug.routing import werkzeug.urls @@ -16,14 +18,11 @@ from werkzeug.routing import MapAdapter, Rule, Submount from werkzeug.wrappers import Request, Response -from basyx.aas import model import server.app.server_model as server_model -from server.app.interfaces.base import ObjectStoreWSGIApp - +from basyx.aas import model from server.app.api_utils.http_api_helpers import HTTPApiDecoder, Base64URLConverter, is_stripped_request from server.app.api_utils.response import APIResponse - -from typing import Dict, Iterator, List, Type, Tuple +from server.app.interfaces.base import ObjectStoreWSGIApp class RegistryAPI(ObjectStoreWSGIApp): @@ -38,12 +37,17 @@ def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/a Rule("/", methods=["PUT"], endpoint=self.put_aas_descriptor), Rule("/", methods=["DELETE"], endpoint=self.delete_aas_descriptor), Submount("/", [ - Rule("/submodel-descriptors", methods=["GET"], endpoint=self.get_all_submodel_descriptors_through_superpath), - Rule("/submodel-descriptors", methods=["POST"], endpoint=self.post_submodel_descriptor_through_superpath), + Rule("/submodel-descriptors", methods=["GET"], + endpoint=self.get_all_submodel_descriptors_through_superpath), + Rule("/submodel-descriptors", methods=["POST"], + endpoint=self.post_submodel_descriptor_through_superpath), Submount("/submodel-descriptors", [ - Rule("/", methods=["GET"], endpoint=self.get_submodel_descriptor_by_id_through_superpath), - Rule("/", methods=["PUT"], endpoint=self.put_submodel_descriptor_by_id_through_superpath), - Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["GET"], + endpoint=self.get_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["PUT"], + endpoint=self.put_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_descriptor_by_id_through_superpath), ]) ]) ]), @@ -52,14 +56,16 @@ def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/a Submount("/submodel-descriptors", [ Rule("/", methods=["GET"], endpoint=self.get_submodel_descriptor_by_id), Rule("/", methods=["PUT"], endpoint=self.put_submodel_descriptor_by_id), - Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_descriptor_by_id), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_descriptor_by_id), ]) ]) ], converters={ "base64url": Base64URLConverter }, strict_slashes=False) - def _get_descriptors(self, request: "Request") -> Tuple[Iterator[server_model.AssetAdministrationShellDescriptor], int]: + def _get_descriptors(self, request: "Request") -> Tuple[ + Iterator[server_model.AssetAdministrationShellDescriptor], int]: """ Returns all Asset Administration Shell Descriptors """ @@ -107,12 +113,13 @@ def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescr return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) # ------ AAS REGISTRY ROUTES ------- - def get_aas_descriptors_all(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + def get_aas_descriptors_all(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: aas_descriptors, cursor = self._get_descriptors(request) return response_t(list(aas_descriptors), cursor=cursor) def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - map_adapter: MapAdapter) -> Response: + map_adapter: MapAdapter) -> Response: descriptor = HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, False) try: self.object_store.add(descriptor) @@ -124,18 +131,21 @@ def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type }, force_external=True) return response_t(descriptor, status=201, headers={"Location": created_resource_url}) - def get_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + def get_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: descriptor = self._get_descriptor(url_args) return response_t(descriptor) - def put_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + def put_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: descriptor = self._get_descriptor(url_args) descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, - is_stripped_request(request))) + is_stripped_request(request))) descriptor.commit() return response_t() - def delete_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + def delete_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: descriptor = self._get_descriptor(url_args) self.object_store.remove(descriptor) return response_t() @@ -233,19 +243,20 @@ def delete_submodel_descriptor_by_id_through_superpath(self, return response_t() # ------ Submodel REGISTRY ROUTES ------- - def get_all_submodel_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + def get_all_submodel_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: submodel_descriptors, cursor = self._get_submodel_descriptors(request) return response_t(list(submodel_descriptors), cursor=cursor, stripped=is_stripped_request(request)) - - def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: submodel_descriptor = self._get_submodel_descriptor(url_args) return response_t(submodel_descriptor, stripped=is_stripped_request(request)) - def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], map_adapter: MapAdapter) -> Response: - submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request)) + submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, + is_stripped_request(request)) try: self.object_store.add(submodel_descriptor) except KeyError as e: @@ -256,14 +267,16 @@ def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: }, force_external=True) return response_t(submodel_descriptor, status=201, headers={"Location": created_resource_url}) - - def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: submodel_descriptor = self._get_submodel_descriptor(url_args) - submodel_descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request))) + submodel_descriptor.update_from( + HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request))) submodel_descriptor.commit() return response_t() - def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: + def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor)) return response_t() diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index 040e6c859..6d63c2c01 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -41,9 +41,10 @@ from basyx.aas import model from basyx.aas.adapter import aasx -from .base import ObjectStoreWSGIApp -from server.app.api_utils.http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, is_stripped_request +from server.app.api_utils.http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, \ + is_stripped_request from server.app.api_utils.response import APIResponse +from .base import ObjectStoreWSGIApp class WSGIApp(ObjectStoreWSGIApp): @@ -66,13 +67,17 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/$reference", methods=["GET"], endpoint=self.get_aas_reference), Rule("/asset-information", methods=["GET"], endpoint=self.get_aas_asset_information), Rule("/asset-information", methods=["PUT"], endpoint=self.put_aas_asset_information), - Rule("/asset-information/thumbnail", methods=["GET", "PUT", "DELETE"], endpoint=self.not_implemented), + Rule("/asset-information/thumbnail", methods=["GET", "PUT", "DELETE"], + endpoint=self.not_implemented), Rule("/submodel-refs", methods=["GET"], endpoint=self.get_aas_submodel_refs), Rule("/submodel-refs", methods=["POST"], endpoint=self.post_aas_submodel_refs), - Rule("/submodel-refs/", methods=["DELETE"], endpoint=self.delete_aas_submodel_refs_specific), + Rule("/submodel-refs/", methods=["DELETE"], + endpoint=self.delete_aas_submodel_refs_specific), Submount("/submodels", [ - Rule("/", methods=["PUT"], endpoint=self.put_aas_submodel_refs_submodel), - Rule("/", methods=["DELETE"], endpoint=self.delete_aas_submodel_refs_submodel), + Rule("/", methods=["PUT"], + endpoint=self.put_aas_submodel_refs_submodel), + Rule("/", methods=["DELETE"], + endpoint=self.delete_aas_submodel_refs_submodel), Rule("/", endpoint=self.aas_submodel_refs_redirect), Rule("//", endpoint=self.aas_submodel_refs_redirect) ]) @@ -97,51 +102,71 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/$reference", methods=["GET"], endpoint=self.get_submodels_reference), Rule("/$path", methods=["GET"], endpoint=self.not_implemented), Rule("/submodel-elements", methods=["GET"], endpoint=self.get_submodel_submodel_elements), - Rule("/submodel-elements", methods=["POST"], endpoint=self.post_submodel_submodel_elements_id_short_path), + Rule("/submodel-elements", methods=["POST"], + endpoint=self.post_submodel_submodel_elements_id_short_path), Submount("/submodel-elements", [ Rule("/$metadata", methods=["GET"], endpoint=self.get_submodel_submodel_elements_metadata), - Rule("/$reference", methods=["GET"], endpoint=self.get_submodel_submodel_elements_reference), + Rule("/$reference", methods=["GET"], + endpoint=self.get_submodel_submodel_elements_reference), Rule("/$value", methods=["GET"], endpoint=self.not_implemented), Rule("/$path", methods=["GET"], endpoint=self.not_implemented), - Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_elements_id_short_path), - Rule("/", methods=["POST"], endpoint=self.post_submodel_submodel_elements_id_short_path), - Rule("/", methods=["PUT"], endpoint=self.put_submodel_submodel_elements_id_short_path), - Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_submodel_elements_id_short_path), + Rule("/", methods=["GET"], + endpoint=self.get_submodel_submodel_elements_id_short_path), + Rule("/", methods=["POST"], + endpoint=self.post_submodel_submodel_elements_id_short_path), + Rule("/", methods=["PUT"], + endpoint=self.put_submodel_submodel_elements_id_short_path), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_submodel_elements_id_short_path), Rule("/", methods=["PATCH"], endpoint=self.not_implemented), Submount("/", [ - Rule("/$metadata", methods=["GET"], endpoint=self.get_submodel_submodel_elements_id_short_path_metadata), + Rule("/$metadata", methods=["GET"], + endpoint=self.get_submodel_submodel_elements_id_short_path_metadata), Rule("/$metadata", methods=["PATCH"], endpoint=self.not_implemented), - Rule("/$reference", methods=["GET"], endpoint=self.get_submodel_submodel_elements_id_short_path_reference), + Rule("/$reference", methods=["GET"], + endpoint=self.get_submodel_submodel_elements_id_short_path_reference), Rule("/$value", methods=["GET"], endpoint=self.not_implemented), Rule("/$value", methods=["PATCH"], endpoint=self.not_implemented), Rule("/$path", methods=["GET"], endpoint=self.not_implemented), - Rule("/attachment", methods=["GET"], endpoint=self.get_submodel_submodel_element_attachment), - Rule("/attachment", methods=["PUT"], endpoint=self.put_submodel_submodel_element_attachment), - Rule("/attachment", methods=["DELETE"], endpoint=self.delete_submodel_submodel_element_attachment), + Rule("/attachment", methods=["GET"], + endpoint=self.get_submodel_submodel_element_attachment), + Rule("/attachment", methods=["PUT"], + endpoint=self.put_submodel_submodel_element_attachment), + Rule("/attachment", methods=["DELETE"], + endpoint=self.delete_submodel_submodel_element_attachment), Rule("/invoke", methods=["POST"], endpoint=self.not_implemented), Rule("/invoke/$value", methods=["POST"], endpoint=self.not_implemented), Rule("/invoke-async", methods=["POST"], endpoint=self.not_implemented), Rule("/invoke-async/$value", methods=["POST"], endpoint=self.not_implemented), - Rule("/operation-status/", methods=["GET"], endpoint=self.not_implemented), + Rule("/operation-status/", methods=["GET"], + endpoint=self.not_implemented), Submount("/operation-results", [ Rule("/", methods=["GET"], endpoint=self.not_implemented), Rule("//$value", methods=["GET"], endpoint=self.not_implemented) ]), - Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/qualifiers", methods=["POST"], endpoint=self.post_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["GET"], + endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["POST"], + endpoint=self.post_submodel_submodel_element_qualifiers), Submount("/qualifiers", [ - Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/", methods=["PUT"], endpoint=self.put_submodel_submodel_element_qualifiers), - Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_submodel_element_qualifiers) + Rule("/", methods=["GET"], + endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/", methods=["PUT"], + endpoint=self.put_submodel_submodel_element_qualifiers), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_submodel_element_qualifiers) ]) ]) ]), Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), Rule("/qualifiers", methods=["POST"], endpoint=self.post_submodel_submodel_element_qualifiers), Submount("/qualifiers", [ - Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/", methods=["PUT"], endpoint=self.put_submodel_submodel_element_qualifiers), - Rule("/", methods=["DELETE"], endpoint=self.delete_submodel_submodel_element_qualifiers) + Rule("/", methods=["GET"], + endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/", methods=["PUT"], + endpoint=self.put_submodel_submodel_element_qualifiers), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_submodel_element_qualifiers) ]) ]) ]), @@ -706,6 +731,7 @@ def delete_concept_description(self, request: Request, url_args: Dict, response_ self.object_store.remove(self._get_concept_description(url_args)) return response_t() + if __name__ == "__main__": from werkzeug.serving import run_simple from basyx.aas.examples.data.example_aas import create_full_example diff --git a/server/app/server_model/__init__.py b/server/app/server_model/__init__.py index 5712f4a27..5736b5492 100644 --- a/server/app/server_model/__init__.py +++ b/server/app/server_model/__init__.py @@ -1,2 +1,2 @@ -from .endpoint import * from .descriptor import * +from .endpoint import * diff --git a/server/app/server_model/endpoint.py b/server/app/server_model/endpoint.py index 578e298c6..3be6dc061 100644 --- a/server/app/server_model/endpoint.py +++ b/server/app/server_model/endpoint.py @@ -2,7 +2,6 @@ import re from enum import Enum - from typing import Optional, List from basyx.aas.model import base @@ -106,5 +105,3 @@ def protocol_information(self, protocol_information: ProtocolInformation): raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 self._protocol_information = protocol_information - - From 4e1c64753fbc40bdc42206b2de19e15f493dc999 Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 17 Apr 2025 09:35:35 +0200 Subject: [PATCH 22/52] Small fixes --- server/app/adapter/jsonization.py | 2 +- server/app/adapter/xmlization.py | 6 +++--- server/app/main.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index 6999302ea..38601f268 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -59,7 +59,7 @@ def _construct_asset_administration_shell_descriptor( cls._amend_abstract_attributes(ret, dct) if 'administration' in dct: ret.administration = cls._construct_administrative_information(_get_ts(dct, 'administration', dict)) - if 'assetkind' in dct: + if 'assetKind' in dct: ret.asset_kind = ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] if 'assetType' in dct: ret.asset_type = _get_ts(dct, 'assetType', str) diff --git a/server/app/adapter/xmlization.py b/server/app/adapter/xmlization.py index e13a5ebed..76e9b5056 100644 --- a/server/app/adapter/xmlization.py +++ b/server/app/adapter/xmlization.py @@ -34,11 +34,11 @@ def construct_asset_administration_shell_descriptor(cls, element: etree._Element specific_asset_ids.append({"name": name.strip(), "value": value.strip()}) descriptor = object_class( - id=id_value, + id_=id_value, id_short=id_short, endpoints=endpoints, asset_kind=asset_kind, - specific_asset_ids=specific_asset_ids + specific_asset_id=specific_asset_ids ) cls._amend_abstract_attributes(descriptor, element) @@ -58,7 +58,7 @@ def construct_submodel_descriptor(cls, element: etree._Element, object_class=ser # Hier können weitere optionale Felder verarbeitet werden, z.B. semanticId, etc. submodel_descriptor = object_class( - id=submodel_id, + id_=submodel_id, id_short=id_short, endpoints=endpoints ) diff --git a/server/app/main.py b/server/app/main.py index fd24f0bfd..de9eea065 100644 --- a/server/app/main.py +++ b/server/app/main.py @@ -6,7 +6,7 @@ from basyx.aas.adapter import aasx from basyx.aas.backend.local_file import LocalFileObjectStore -from server.app.repository import WSGIApp +from server.app.interfaces.repository import WSGIApp storage_path = os.getenv("STORAGE_PATH", "/storage") storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") From 95b2d5a5f4019967704422e339a4b636dd557a92 Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 17 Apr 2025 09:36:26 +0200 Subject: [PATCH 23/52] Refactor --- server/app/api_utils/http_api_helpers.py | 13 +-- server/app/interfaces/registry.py | 4 +- server/app/interfaces/repository.py | 4 +- server/app/server_model/descriptor.py | 111 ++++++++++------------- 4 files changed, 58 insertions(+), 74 deletions(-) diff --git a/server/app/api_utils/http_api_helpers.py b/server/app/api_utils/http_api_helpers.py index a81f123da..276e49d3d 100644 --- a/server/app/api_utils/http_api_helpers.py +++ b/server/app/api_utils/http_api_helpers.py @@ -74,7 +74,7 @@ class HTTPApiDecoder: } @classmethod - def check_type_supportance(cls, type_: type): + def check_type_support(cls, type_: type): if type_ not in cls.type_constructables_map: raise TypeError(f"Parsing {type_} is not supported!") @@ -86,7 +86,7 @@ def assert_type(cls, obj: object, type_: Type[T]) -> T: @classmethod def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - cls.check_type_supportance(expect_type) + cls.check_type_support(expect_type) decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ else ServerStrictAASFromJsonDecoder try: @@ -128,7 +128,7 @@ def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool return [cls.assert_type(obj, expect_type) for obj in parsed] @classmethod - def base64urljson_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + def base64url_json_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: data = base64url_decode(data) return cls.json_list(data, expect_type, stripped, expect_single) @@ -137,13 +137,13 @@ def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> return cls.json_list(data, expect_type, stripped, True)[0] @classmethod - def base64urljson(cls, data: str, expect_type: Type[T], stripped: bool) -> T: + def base64url_json(cls, data: str, expect_type: Type[T], stripped: bool) -> T: data = base64url_decode(data) return cls.json_list(data, expect_type, stripped, True)[0] @classmethod def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: - cls.check_type_supportance(expect_type) + cls.check_type_support(expect_type) try: xml_data = io.BytesIO(data) rv = read_server_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], @@ -180,11 +180,12 @@ def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> return cls.xml(request.get_data(), expect_type, stripped) @classmethod - def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: + def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> List[T]: """ Deserializes the request body to an instance (or list of instances) of the expected type. """ + # TODO: Refactor this method and request_body to avoid code duplication valid_content_types = ("application/json", "application/xml", "text/xml") if request.mimetype not in valid_content_types: diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index 938b8e3a0..bd988e465 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -81,7 +81,7 @@ def _get_descriptors(self, request: "Request") -> Tuple[ if asset_ids: # Decode und Instanziierung der SpecificAssetIds specific_asset_ids: List[model.SpecificAssetId] = list( - map(lambda asset_id: HTTPApiDecoder.base64urljson(asset_id, model.SpecificAssetId, False), asset_ids) + map(lambda asset_id: HTTPApiDecoder.base64url_json(asset_id, model.SpecificAssetId, False), asset_ids) ) # Filtere anhand der übergebenen SpecificAssetIds descriptors = filter( @@ -103,7 +103,7 @@ def _get_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_m submodel_descriptors = filter(lambda sm: sm.id_short == id_short, submodel_descriptors) semantic_id = request.args.get("semanticId") if semantic_id is not None: - spec_semantic_id = HTTPApiDecoder.base64urljson( + spec_semantic_id = HTTPApiDecoder.base64url_json( semantic_id, model.Reference, False) # type: ignore[type-abstract] submodel_descriptors = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodel_descriptors) paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index 6d63c2c01..49663bbdf 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -257,7 +257,7 @@ def _get_shells(self, request: Request) -> Tuple[Iterator[model.AssetAdministrat # Decode and instantiate SpecificAssetIds # This needs to be a list, otherwise we can only iterate it once. specific_asset_ids: List[model.SpecificAssetId] = list( - map(lambda asset_id: HTTPApiDecoder.base64urljson(asset_id, model.SpecificAssetId, False), asset_ids)) + map(lambda asset_id: HTTPApiDecoder.base64url_json(asset_id, model.SpecificAssetId, False), asset_ids)) # Filter AAS based on these SpecificAssetIds aas = filter(lambda shell: all(specific_asset_id in shell.asset_information.specific_asset_id for specific_asset_id in specific_asset_ids), aas) @@ -275,7 +275,7 @@ def _get_submodels(self, request: Request) -> Tuple[Iterator[model.Submodel], in submodels = filter(lambda sm: sm.id_short == id_short, submodels) semantic_id = request.args.get("semanticId") if semantic_id is not None: - spec_semantic_id = HTTPApiDecoder.base64urljson( + spec_semantic_id = HTTPApiDecoder.base64url_json( semantic_id, model.Reference, False) # type: ignore[type-abstract] submodels = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodels) paginated_submodels, end_index = self._get_slice(request, submodels) diff --git a/server/app/server_model/descriptor.py b/server/app/server_model/descriptor.py index 40d2d59d7..5bbc5996a 100644 --- a/server/app/server_model/descriptor.py +++ b/server/app/server_model/descriptor.py @@ -3,35 +3,18 @@ import abc from typing import Optional, Iterable, List -from basyx.aas.model import base, NamespaceSet +from basyx.aas import model from . import Endpoint -class Descriptor(metaclass=abc.ABCMeta): +class Descriptor(model.HasExtension, metaclass=abc.ABCMeta): @abc.abstractmethod - def __init__(self, description: Optional[base.MultiLanguageTextType] = None, - display_name: Optional[base.MultiLanguageNameType] = None, extension: Iterable[base.Extension] = ()): + def __init__(self, description: Optional[model.MultiLanguageTextType] = None, + display_name: Optional[model.MultiLanguageNameType] = None, extension: Iterable[model.Extension] = ()): super().__init__() - self.namespace_element_sets: List[NamespaceSet] = [] - self.description: Optional[base.MultiLanguageTextType] = description - self.display_name: Optional[base.MultiLanguageNameType] = display_name - self.extension = base.NamespaceSet(self, [("name", True)], extension) - - @property - def description(self) -> Optional[base.MultiLanguageTextType]: - return self._description - - @description.setter - def description(self, value: Optional[base.MultiLanguageTextType]): - self._description = value - - @property - def display_name(self) -> Optional[base.MultiLanguageNameType]: - return self._display_name - - @display_name.setter - def display_name(self, value: Optional[base.MultiLanguageNameType]): - self._display_name = value + self.description: Optional[model.MultiLanguageTextType] = description + self.display_name: Optional[model.MultiLanguageNameType] = display_name + self.extension = model.NamespaceSet(self, [("name", True)], extension) def commit(self): pass @@ -54,73 +37,73 @@ def update_from(self, other: "Descriptor", update_source: bool = False): class SubmodelDescriptor(Descriptor): - def __init__(self, id_: base.Identifier, endpoints: List[Endpoint], - administration: Optional[base.AdministrativeInformation] = None, - id_short: Optional[base.NameType] = None, semantic_id: Optional[base.Reference] = None, - supplemental_semantic_id: Iterable[base.Reference] = ()): + def __init__(self, id_: model.Identifier, endpoints: List[Endpoint], + administration: Optional[model.AdministrativeInformation] = None, + id_short: Optional[model.NameType] = None, semantic_id: Optional[model.Reference] = None, + supplemental_semantic_id: Iterable[model.Reference] = ()): super().__init__() - self.id: base.Identifier = id_ + self.id: model.Identifier = id_ self.endpoints: List[Endpoint] = endpoints - self.administration: Optional[base.AdministrativeInformation] = administration - self.id_short: Optional[base.NameType] = id_short - self.semantic_id: Optional[base.Reference] = semantic_id - self.supplemental_semantic_id: base.ConstrainedList[base.Reference] = \ - base.ConstrainedList(supplemental_semantic_id) + self.administration: Optional[model.AdministrativeInformation] = administration + self.id_short: Optional[model.NameType] = id_short + self.semantic_id: Optional[model.Reference] = semantic_id + self.supplemental_semantic_id: model.ConstrainedList[model.Reference] = \ + model.ConstrainedList(supplemental_semantic_id) class AssetAdministrationShellDescriptor(Descriptor): def __init__(self, - id_: base.Identifier, - administration: Optional[base.AdministrativeInformation] = None, - asset_kind: Optional[base.AssetKind] = None, - asset_type: Optional[base.Identifier] = None, + id_: model.Identifier, + administration: Optional[model.AdministrativeInformation] = None, + asset_kind: Optional[model.AssetKind] = None, + asset_type: Optional[model.Identifier] = None, endpoints: Optional[List[Endpoint]] = None, - global_asset_id: Optional[base.Identifier] = None, - id_short: Optional[base.NameType] = None, - specific_asset_id: Iterable[base.SpecificAssetId] = (), + global_asset_id: Optional[model.Identifier] = None, + id_short: Optional[model.NameType] = None, + specific_asset_id: Iterable[model.SpecificAssetId] = (), submodel_descriptors: Optional[List[SubmodelDescriptor]] = None, - description: Optional[base.MultiLanguageTextType] = None, - display_name: Optional[base.MultiLanguageNameType] = None, - extension: Iterable[base.Extension] = ()): + description: Optional[model.MultiLanguageTextType] = None, + display_name: Optional[model.MultiLanguageNameType] = None, + extension: Iterable[model.Extension] = ()): """AssetAdministrationShellDescriptor - Nur das 'id'-Feld (id_) ist zwingend erforderlich. Alle anderen Felder erhalten Defaultwerte. """ super().__init__() - self.administration: Optional[base.AdministrativeInformation] = administration - self.asset_kind: Optional[base.AssetKind] = asset_kind - self.asset_type: Optional[base.Identifier] = asset_type + self.administration: Optional[model.AdministrativeInformation] = administration + self.asset_kind: Optional[model.AssetKind] = asset_kind + self.asset_type: Optional[model.Identifier] = asset_type self.endpoints: Optional[ List[Endpoint]] = endpoints if endpoints is not None else [] # leere Liste, falls nicht gesetzt - self.global_asset_id: Optional[base.Identifier] = global_asset_id - self.id_short: Optional[base.NameType] = id_short - self.id: base.Identifier = id_ - self._specific_asset_id: base.ConstrainedList[base.SpecificAssetId] = base.ConstrainedList( + self.global_asset_id: Optional[model.Identifier] = global_asset_id + self.id_short: Optional[model.NameType] = id_short + self.id: model.Identifier = id_ + self._specific_asset_id: model.ConstrainedList[model.SpecificAssetId] = model.ConstrainedList( specific_asset_id, item_set_hook=self._check_constraint_set_spec_asset_id, item_del_hook=self._check_constraint_del_spec_asset_id ) self.submodel_descriptors = submodel_descriptors if submodel_descriptors is not None else [] - self.description: Optional[base.MultiLanguageTextType] = description - self.display_name: Optional[base.MultiLanguageNameType] = display_name - self.extension = base.NamespaceSet(self, [("name", True)], extension) + self.description: Optional[model.MultiLanguageTextType] = description + self.display_name: Optional[model.MultiLanguageNameType] = display_name + self.extension = model.NamespaceSet(self, [("name", True)], extension) @property - def specific_asset_id(self) -> base.ConstrainedList[base.SpecificAssetId]: + def specific_asset_id(self) -> model.ConstrainedList[model.SpecificAssetId]: return self._specific_asset_id @specific_asset_id.setter - def specific_asset_id(self, specific_asset_id: Iterable[base.SpecificAssetId]) -> None: + def specific_asset_id(self, specific_asset_id: Iterable[model.SpecificAssetId]) -> None: # constraints are checked via _check_constraint_set_spec_asset_id() in this case self._specific_asset_id[:] = specific_asset_id - def _check_constraint_set_spec_asset_id(self, items_to_replace: List[base.SpecificAssetId], - new_items: List[base.SpecificAssetId], - old_list: List[base.SpecificAssetId]) -> None: - self._validate_aasd_131(self.global_asset_id, - len(old_list) - len(items_to_replace) + len(new_items) > 0) + def _check_constraint_set_spec_asset_id(self, items_to_replace: List[model.SpecificAssetId], + new_items: List[model.SpecificAssetId], + old_list: List[model.SpecificAssetId]) -> None: + model.AssetInformation._validate_aasd_131(self.global_asset_id, + len(old_list) - len(items_to_replace) + len(new_items) > 0) - def _check_constraint_del_spec_asset_id(self, _item_to_del: base.SpecificAssetId, - old_list: List[base.SpecificAssetId]) -> None: - self._validate_aasd_131(self.global_asset_id, len(old_list) > 1) + def _check_constraint_del_spec_asset_id(self, _item_to_del: model.SpecificAssetId, + old_list: List[model.SpecificAssetId]) -> None: + model.AssetInformation._validate_aasd_131(self.global_asset_id, len(old_list) > 1) From b65c420f27f4d791e0a640e8122bef1f2465f09b Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 17 Apr 2025 09:36:52 +0200 Subject: [PATCH 24/52] Refactor --- Discovery Server/README.md | 12 +-- server/app/interfaces/discovery.py | 138 +++++++++++++++++------------ 2 files changed, 85 insertions(+), 65 deletions(-) diff --git a/Discovery Server/README.md b/Discovery Server/README.md index fe66542ce..0b0938167 100644 --- a/Discovery Server/README.md +++ b/Discovery Server/README.md @@ -17,12 +17,12 @@ This implementation supports: ## Features -| Feature | Description | -|---------------------------------------------|-----------------------------------------------------------------------------| -| `add_asset_links` | Register specific asset identifiers linked to an AAS | -| `get_asset_links_by_aas` | Retrieve asset links associated with an AAS | -| `search_aas_by_asset_link` | Find AAS identifiers by providing asset link values | -| `remove_asset_links_for_aas` | Delete all asset links associated with a specific AAS | +| Feature | Description | +|---------------------------------------------|-------------------------------------------------------| +| `add_asset_links` | Register specific asset ids linked to an AAS | +| `get_all_specific_asset_ids_by_aas_id` | Retrieve specific asset ids associated with an AAS | +| `search_aas_by_asset_link` | Find AAS identifiers by providing asset link values | +| `remove_asset_links_for_aas` | Delete all asset links associated with a specific AAS | ## Specification Compliance diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index d96a224e5..da7b0bfb5 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -1,6 +1,6 @@ import abc import json -from typing import Dict, List, Set +from typing import Dict, List, Set, Any import werkzeug.exceptions from pymongo import MongoClient @@ -14,64 +14,85 @@ from .. import server_model from ..adapter.jsonization import ServerAASToJsonEncoder +class AbstractDiscoveryStore(metaclass=abc.ABCMeta): + aas_id_to_asset_ids: Any + asset_id_to_aas_ids: Any -def specific_asset_to_json_obj(asset_id: model.SpecificAssetId) -> dict: - # Encode the asset to a JSON string and then decode to a dict. - json_str = ServerAASToJsonEncoder().encode(asset_id) - return json.loads(json_str) + @abc.abstractmethod + def __init__(self): + pass + @abc.abstractmethod + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + pass + + @abc.abstractmethod + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: + pass + + @abc.abstractmethod + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + pass + + @abc.abstractmethod + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: + pass + + @abc.abstractmethod + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + pass -class AbstractDiscoveryStore(metaclass=abc.ABCMeta): @abc.abstractmethod - def __init__(self): + def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: pass + class InMemoryDiscoveryStore(AbstractDiscoveryStore): def __init__(self): - self.aas_to_assets: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} - self.asset_to_aas: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} + self.aas_id_to_asset_ids: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} + self.asset_id_to_aas_ids: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} - def get_asset_links_by_aas(self, aas_identifier: model.Identifier) -> List[dict]: - key = aas_identifier - return list(self.aas_to_assets.get(key, set())) + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + return list(self.aas_id_to_asset_ids.get(aas_id, set())) - def add_asset_links(self, aas_identifier: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: - key = aas_identifier - serialized_assets = [specific_asset_to_json_obj(aid) for aid in asset_ids] - if key in self.aas_to_assets: + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, + asset_ids: List[model.SpecificAssetId]) -> None: + serialized_assets = [ServerAASToJsonEncoder.default(asset_id) for asset_id in asset_ids] + if aas_id in self.aas_id_to_asset_ids: for asset in serialized_assets: - if asset not in self.aas_to_assets[key]: - self.aas_to_assets[key].append(asset) + if asset not in self.aas_id_to_asset_ids[aas_id]: + self.aas_id_to_asset_ids[aas_id].append(asset) else: - self.aas_to_assets[key] = serialized_assets[:] + self.aas_id_to_asset_ids[aas_id] = serialized_assets[:] - def delete_asset_links_by_aas(self, aas_identifier: model.Identifier) -> None: - key = aas_identifier - if key in self.aas_to_assets: - del self.aas_to_assets[key] + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + key = aas_id + if key in self.aas_id_to_asset_ids: + del self.aas_id_to_asset_ids[key] - def search_aas_by_asset_link(self, asset_link: server_model.AssetLink) -> List[str]: + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: result = [] - for asset_key, aas_ids in self.asset_to_aas.items(): + for asset_key, aas_ids in self.asset_id_to_aas_ids.items(): expected_key = f"{asset_link.name}:{asset_link.value}" if asset_key == expected_key: result.extend(list(aas_ids)) return result - def add_aas_for_asset_link(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: asset_key = f"{asset_id.name}:{asset_id.value}" - aas_key = aas_identifier - if asset_key in self.asset_to_aas: - self.asset_to_aas[asset_key].add(aas_key) + aas_key = aas_id + # FIXME + if asset_key in self.asset_id_to_aas_ids: + self.asset_id_to_aas_ids[asset_key].add(aas_key) else: - self.asset_to_aas[asset_key] = {aas_key} + self.asset_id_to_aas_ids[asset_key] = {aas_key} - def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: asset_key = f"{asset_id.name}:{asset_id.value}" - aas_key = aas_identifier - if asset_key in self.asset_to_aas: - self.asset_to_aas[asset_key].discard(aas_key) + aas_key = aas_id + if asset_key in self.asset_id_to_aas_ids: + self.asset_id_to_aas_ids[asset_key].discard(aas_key) class MongoDiscoveryStore(AbstractDiscoveryStore): @@ -87,26 +108,26 @@ def __init__(self, # Create an index for fast asset reverse lookups. self.coll_asset_to_aas.create_index("_id") - def get_asset_links_by_aas(self, aas_identifier: model.Identifier) -> List[dict]: - key = aas_identifier + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + key = aas_id doc = self.coll_aas_to_assets.find_one({"_id": key}) return doc["asset_ids"] if doc and "asset_ids" in doc else [] - def add_asset_links(self, aas_identifier: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: - key = aas_identifier + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: + key = aas_id # Convert each SpecificAssetId using the serialization helper. - serializable_assets = [specific_asset_to_json_obj(aid) for aid in asset_ids] + serializable_assets = [ServerAASToJsonEncoder.default(asset_id) for asset_id in asset_ids] self.coll_aas_to_assets.update_one( {"_id": key}, {"$addToSet": {"asset_ids": {"$each": serializable_assets}}}, upsert=True ) - def delete_asset_links_by_aas(self, aas_identifier: model.Identifier) -> None: - key = aas_identifier + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + key = aas_id self.coll_aas_to_assets.delete_one({"_id": key}) - def search_aas_by_asset_link(self, asset_link: server_model.AssetLink) -> List[str]: + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: # Query MongoDB for specificAssetIds where 'name' and 'value' match doc = self.coll_asset_to_aas.find_one({ "name": asset_link.name, @@ -114,18 +135,17 @@ def search_aas_by_asset_link(self, asset_link: server_model.AssetLink) -> List[s }) return doc["aas_ids"] if doc and "aas_ids" in doc else [] - def add_aas_for_asset_link(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: - asset_key = str(specific_asset_to_json_obj(asset_id)) - aas_key = aas_identifier + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = str(ServerAASToJsonEncoder.default(asset_id)) self.coll_asset_to_aas.update_one( {"_id": asset_key}, - {"$addToSet": {"aas_ids": aas_key}}, + {"$addToSet": {"aas_ids": aas_id}}, upsert=True ) - def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: - asset_key = str(specific_asset_to_json_obj(asset_id)) - aas_key = aas_identifier + def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = str(ServerAASToJsonEncoder.default(asset_id)) + aas_key = aas_id self.coll_asset_to_aas.update_one( {"_id": asset_key}, {"$pull": {"aas_ids": aas_key}} @@ -142,7 +162,7 @@ def __init__(self, endpoint=self.search_all_aas_ids_by_asset_link), Submount("/lookup/shells", [ Rule("/", methods=["GET"], - endpoint=self.get_all_asset_links_by_id), + endpoint=self.get_all_specific_asset_ids_by_aas_id), Rule("/", methods=["POST"], endpoint=self.post_all_asset_links_by_id), Rule("/", methods=["DELETE"], @@ -158,31 +178,31 @@ def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, res asset_links = HTTPApiDecoder.request_body_list(request, server_model.AssetLink, False) matching_aas_keys = set() for asset_link in asset_links: - aas_keys = self.persistent_store.search_aas_by_asset_link(asset_link) + aas_keys = self.persistent_store.search_aas_ids_by_asset_link(asset_link) matching_aas_keys.update(aas_keys) matching_aas_keys = list(matching_aas_keys) paginated_slice, cursor = self._get_slice(request, matching_aas_keys) return response_t(list(paginated_slice), cursor=cursor) - def get_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + def get_all_specific_asset_ids_by_aas_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: aas_identifier = url_args.get("aas_id") - asset_ids = self.persistent_store.get_asset_links_by_aas(aas_identifier) + asset_ids = self.persistent_store.get_all_specific_asset_ids_by_aas_id(aas_identifier) return response_t(asset_ids) def post_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: aas_identifier = url_args.get("aas_id") specific_asset_ids = HTTPApiDecoder.request_body_list(request, model.SpecificAssetId, False) - self.persistent_store.add_asset_links(aas_identifier, specific_asset_ids) + self.persistent_store.add_specific_asset_ids_to_aas(aas_identifier, specific_asset_ids) for asset_id in specific_asset_ids: - self.persistent_store.add_aas_for_asset_link(asset_id, aas_identifier) - updated = {aas_identifier: self.persistent_store.get_asset_links_by_aas(aas_identifier)} + self.persistent_store._add_aas_id_to_specific_asset_id(asset_id, aas_identifier) + updated = {aas_identifier: self.persistent_store.get_all_specific_asset_ids_by_aas_id(aas_identifier)} return response_t(updated) def delete_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: aas_identifier = url_args.get("aas_id") - self.persistent_store.delete_asset_links_by_aas(aas_identifier) - for key in list(self.persistent_store.asset_to_aas.keys()): - self.persistent_store.asset_to_aas[key].discard(aas_identifier) + self.persistent_store.delete_specific_asset_ids_by_aas_id(aas_identifier) + for key in list(self.persistent_store.asset_id_to_aas_ids.keys()): + self.persistent_store.asset_id_to_aas_ids[key].discard(aas_identifier) return response_t() From b0f79d698eb66ff3c1b2ba14dc985c9cef329eac Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Thu, 17 Apr 2025 12:36:31 +0200 Subject: [PATCH 25/52] Refactor some methods in registry.py and fix some typos --- server/app/interfaces/discovery.py | 4 ++ server/app/interfaces/registry.py | 70 ++++++++++++--------------- server/app/server_model/descriptor.py | 2 +- 3 files changed, 36 insertions(+), 40 deletions(-) diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index da7b0bfb5..639925175 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -1,3 +1,7 @@ +""" +This module implements the Discovery interface defined in the 'Specification of the Asset Administration Shell Part 2 – Application Programming Interface'. +""" + import abc import json from typing import Dict, List, Set, Any diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index bd988e465..24fd19cca 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -1,11 +1,5 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors -# -# This program and the accompanying materials are made available under the terms of the MIT License, available in -# the LICENSE file of this project. -# -# SPDX-License-Identifier: MIT """ -This module implements the "Specification of the Asset Administration Shell Part 2 Application Programming Interfaces". +This module implements the Registry interface defined in the 'Specification of the Asset Administration Shell Part 2 – Application Programming Interface'. """ from typing import Dict, Iterator, List, Type, Tuple @@ -30,12 +24,12 @@ def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/a self.object_store: model.AbstractObjectStore = object_store self.url_map = werkzeug.routing.Map([ Submount(base_path, [ - Rule("/shell-descriptors", methods=["GET"], endpoint=self.get_aas_descriptors_all), + Rule("/shell-descriptors", methods=["GET"], endpoint=self.get_all_aas_descriptors), Rule("/shell-descriptors", methods=["POST"], endpoint=self.post_aas_descriptor), Submount("/shell-descriptors", [ - Rule("/", methods=["GET"], endpoint=self.get_aas_descriptor), - Rule("/", methods=["PUT"], endpoint=self.put_aas_descriptor), - Rule("/", methods=["DELETE"], endpoint=self.delete_aas_descriptor), + Rule("/", methods=["GET"], endpoint=self.get_aas_descriptor_by_id), + Rule("/", methods=["PUT"], endpoint=self.put_aas_descriptor_by_id), + Rule("/", methods=["DELETE"], endpoint=self.delete_aas_descriptor_by_id), Submount("/", [ Rule("/submodel-descriptors", methods=["GET"], endpoint=self.get_all_submodel_descriptors_through_superpath), @@ -64,11 +58,9 @@ def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/a "base64url": Base64URLConverter }, strict_slashes=False) - def _get_descriptors(self, request: "Request") -> Tuple[ + def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ Iterator[server_model.AssetAdministrationShellDescriptor], int]: - """ - Returns all Asset Administration Shell Descriptors - """ + descriptors: Iterator[server_model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( server_model.AssetAdministrationShellDescriptor ) @@ -78,14 +70,14 @@ def _get_descriptors(self, request: "Request") -> Tuple[ descriptors = filter(lambda desc: desc.id_short == id_short, descriptors) asset_ids = request.args.getlist("assetIds") - if asset_ids: - # Decode und Instanziierung der SpecificAssetIds + if asset_ids is not None: + # Decode and instantiate SpecificAssetIds specific_asset_ids: List[model.SpecificAssetId] = list( map(lambda asset_id: HTTPApiDecoder.base64url_json(asset_id, model.SpecificAssetId, False), asset_ids) ) - # Filtere anhand der übergebenen SpecificAssetIds + # Filter AAS based on these SpecificAssetIds descriptors = filter( - lambda desc: all(specific_asset_id in desc.asset_information.specific_asset_id + lambda desc: all(specific_asset_id in desc.specific_asset_id for specific_asset_id in specific_asset_ids), descriptors ) @@ -93,10 +85,10 @@ def _get_descriptors(self, request: "Request") -> Tuple[ paginated_descriptors, end_index = self._get_slice(request, descriptors) return paginated_descriptors, end_index - def _get_descriptor(self, url_args: Dict) -> server_model.AssetAdministrationShellDescriptor: + def _get_aas_descriptor(self, url_args: Dict) -> server_model.AssetAdministrationShellDescriptor: return self._get_obj_ts(url_args["aas_id"], server_model.AssetAdministrationShellDescriptor) - def _get_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: + def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: submodel_descriptors: Iterator[model.Submodel] = self._get_all_obj_of_type(server_model.SubmodelDescriptor) id_short = request.args.get("idShort") if id_short is not None: @@ -113,9 +105,9 @@ def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescr return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) # ------ AAS REGISTRY ROUTES ------- - def get_aas_descriptors_all(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: - aas_descriptors, cursor = self._get_descriptors(request) + aas_descriptors, cursor = self._get_all_aas_descriptors(request) return response_t(list(aas_descriptors), cursor=cursor) def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -126,27 +118,27 @@ def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type except KeyError as e: raise Conflict(f"AssetAdministrationShellDescriptor with Identifier {descriptor.id} already exists!") from e descriptor.commit() - created_resource_url = map_adapter.build(self.get_aas_descriptor, { + created_resource_url = map_adapter.build(self.get_aas_descriptor_by_id, { "aas_id": descriptor.id }, force_external=True) return response_t(descriptor, status=201, headers={"Location": created_resource_url}) - def get_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - descriptor = self._get_descriptor(url_args) + def get_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) return response_t(descriptor) - def put_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - descriptor = self._get_descriptor(url_args) + def put_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, is_stripped_request(request))) descriptor.commit() return response_t() - def delete_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - descriptor = self._get_descriptor(url_args) + def delete_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) self.object_store.remove(descriptor) return response_t() @@ -156,7 +148,7 @@ def get_all_submodel_descriptors_through_superpath(self, response_t: Type[ APIResponse], **_kwargs) -> Response: - aas_descriptor = self._get_descriptor(url_args) + aas_descriptor = self._get_aas_descriptor(url_args) submodel_descriptors, cursor = self._get_slice(request, aas_descriptor.submodel_descriptors) return response_t(list(submodel_descriptors), cursor=cursor) @@ -168,7 +160,7 @@ def get_submodel_descriptor_by_id_through_superpath(self, Type[ APIResponse], **_kwargs) -> Response: - aas_descriptor = self._get_descriptor(url_args) + aas_descriptor = self._get_aas_descriptor(url_args) submodel_id = url_args["submodel_id"] submodel_descriptor = next( (sd for sd in aas_descriptor.submodel_descriptors if @@ -184,7 +176,7 @@ def post_submodel_descriptor_through_superpath(self, response_t: Type[ APIResponse], map_adapter: MapAdapter) -> Response: - aas_descriptor = self._get_descriptor(url_args) + aas_descriptor = self._get_aas_descriptor(url_args) submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request( @@ -210,7 +202,7 @@ def put_submodel_descriptor_by_id_through_superpath(self, Type[ APIResponse], **_kwargs) -> Response: - aas_descriptor = self._get_descriptor(url_args) + aas_descriptor = self._get_aas_descriptor(url_args) submodel_id = url_args["submodel_id"] submodel_descriptor = next( (sd for sd in aas_descriptor.submodel_descriptors if @@ -232,7 +224,7 @@ def delete_submodel_descriptor_by_id_through_superpath(self, Type[ APIResponse], **_kwargs) -> Response: - aas_descriptor = self._get_descriptor(url_args) + aas_descriptor = self._get_aas_descriptor(url_args) submodel_id = url_args["submodel_id"] submodel_descriptor = next( (sd for sd in aas_descriptor.submodel_descriptors if sd.id == submodel_id), None) @@ -245,7 +237,7 @@ def delete_submodel_descriptor_by_id_through_superpath(self, # ------ Submodel REGISTRY ROUTES ------- def get_all_submodel_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: - submodel_descriptors, cursor = self._get_submodel_descriptors(request) + submodel_descriptors, cursor = self._get_all_submodel_descriptors(request) return response_t(list(submodel_descriptors), cursor=cursor, stripped=is_stripped_request(request)) def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], diff --git a/server/app/server_model/descriptor.py b/server/app/server_model/descriptor.py index 5bbc5996a..6b57bf63f 100644 --- a/server/app/server_model/descriptor.py +++ b/server/app/server_model/descriptor.py @@ -4,7 +4,7 @@ from typing import Optional, Iterable, List from basyx.aas import model -from . import Endpoint +from server.app.server_model.endpoint import Endpoint class Descriptor(model.HasExtension, metaclass=abc.ABCMeta): From 7c8fbe2582e7d5a1177557882bae8685b26ddc55 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Thu, 17 Apr 2025 14:26:38 +0200 Subject: [PATCH 26/52] remove xmlization for Registry and Discovery classes --- server/app/adapter/xmlization.py | 162 ------------------------------- 1 file changed, 162 deletions(-) delete mode 100644 server/app/adapter/xmlization.py diff --git a/server/app/adapter/xmlization.py b/server/app/adapter/xmlization.py deleted file mode 100644 index 76e9b5056..000000000 --- a/server/app/adapter/xmlization.py +++ /dev/null @@ -1,162 +0,0 @@ -import enum -from typing import Optional, Type, Callable, Any, List, Dict -from lxml import etree - -from basyx.aas.adapter._generic import PathOrIO -from basyx.aas.adapter.xml import XMLConstructables, AASFromXmlDecoder -from basyx.aas.adapter.xml.xml_deserialization import _parse_xml_document, _failsafe_construct, \ - _child_text_mandatory, NS_AAS, read_aas_xml_element -import server.app.server_model as server_model - - -class ServerAASFromXmlDecoder(AASFromXmlDecoder): - - @classmethod - def construct_asset_administration_shell_descriptor(cls, element: etree._Element, - object_class=server_model.AssetAdministrationShellDescriptor, - **_kwargs: Any) -> server_model.AssetAdministrationShellDescriptor: - id_value = _child_text_mandatory(element, NS_AAS + "id") - id_short = _child_text_mandatory(element, NS_AAS + "idShort") - endpoints_elem = element.find(NS_AAS + "endpoints") - endpoints: List[str] = [] - if endpoints_elem is not None: - endpoints = [child.text.strip() for child in endpoints_elem.findall(NS_AAS + "endpoint") if child.text] - - asset_kind = _child_text_mandatory(element, NS_AAS + "assetKind") - - specific_asset_ids_elem = element.find(NS_AAS + "specificAssetIds") - specific_asset_ids: List[Dict[str, Any]] = [] - if specific_asset_ids_elem is not None: - for sid_elem in specific_asset_ids_elem.findall(NS_AAS + "specificAssetId"): - name = sid_elem.findtext(NS_AAS + "name") - value = sid_elem.findtext(NS_AAS + "value") - if name is not None and value is not None: - specific_asset_ids.append({"name": name.strip(), "value": value.strip()}) - - descriptor = object_class( - id_=id_value, - id_short=id_short, - endpoints=endpoints, - asset_kind=asset_kind, - specific_asset_id=specific_asset_ids - ) - - cls._amend_abstract_attributes(descriptor, element) - return descriptor - - @classmethod - def construct_submodel_descriptor(cls, element: etree._Element, object_class=server_model.SubmodelDescriptor, - **_kwargs: Any) -> server_model.SubmodelDescriptor: - submodel_id = _child_text_mandatory(element, NS_AAS + "id") - id_short = _child_text_mandatory(element, NS_AAS + "idShort") - - endpoints_elem = element.find(NS_AAS + "endpoints") - endpoints: List[str] = [] - if endpoints_elem is not None: - endpoints = [child.text.strip() for child in endpoints_elem.findall(NS_AAS + "endpoint") if child.text] - - # Hier können weitere optionale Felder verarbeitet werden, z.B. semanticId, etc. - - submodel_descriptor = object_class( - id_=submodel_id, - id_short=id_short, - endpoints=endpoints - ) - - cls._amend_abstract_attributes(submodel_descriptor, element) - return submodel_descriptor - - -class ServerStrictAASFromXmlDecoder(ServerAASFromXmlDecoder): - """ - Non-failsafe XML decoder. Encountered errors won't be caught and abort parsing. - """ - failsafe = False - - -class ServerStrippedAASFromXmlDecoder(ServerAASFromXmlDecoder): - """ - Decoder for stripped XML elements. Used in the HTTP adapter. - """ - stripped = True - - -class ServerStrictStrippedAASFromXmlDecoder(ServerStrictAASFromXmlDecoder, ServerStrippedAASFromXmlDecoder): - """ - Non-failsafe decoder for stripped XML elements. - """ - pass - - -@enum.unique -class ServerXMLConstructables(enum.Enum): - ASSET_ADMINISTRATION_SHELL_DESCRIPTOR = enum.auto() - SUBMODEL_DESCRIPTOR = enum.auto() - ASSET_LINK = enum.auto() - - -def _select_server_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[ServerAASFromXmlDecoder]]) \ - -> Type[ServerAASFromXmlDecoder]: - """ - Returns the correct decoder based on the parameters failsafe and stripped. If a decoder class is given, failsafe - and stripped are ignored. - - :param failsafe: If true, a failsafe decoder is selected. Ignored if a decoder class is specified. - :param stripped: If true, a decoder for parsing stripped XML elements is selected. Ignored if a decoder class is - specified. - :param decoder: Is returned, if specified. - :return: A AASFromXmlDecoder (sub)class. - """ - if decoder is not None: - return decoder - if failsafe: - if stripped: - return ServerStrippedAASFromXmlDecoder - return ServerAASFromXmlDecoder - else: - if stripped: - return ServerStrictStrippedAASFromXmlDecoder - return ServerStrictAASFromXmlDecoder - - -def read_server_aas_xml_element(file: PathOrIO, construct: XMLConstructables, failsafe: bool = True, - stripped: bool = False, - decoder: Optional[Type[AASFromXmlDecoder]] = None, **constructor_kwargs) -> Optional[ - object]: - """ - Construct a single object from an XML string. The namespaces have to be declared on the object itself, since there - is no surrounding environment element. - - :param file: A filename or file-like object to read the XML-serialized data from - :param construct: A member of the enum :class:`~.XMLConstructables`, specifying which type to construct. - :param failsafe: If true, the document is parsed in a failsafe way: missing attributes and elements are logged - instead of causing exceptions. Defect objects are skipped. - This parameter is ignored if a decoder class is specified. - :param stripped: If true, stripped XML elements are parsed. - See https://git.rwth-aachen.de/acplt/pyi40aas/-/issues/91 - This parameter is ignored if a decoder class is specified. - :param decoder: The decoder class used to decode the XML elements - :param constructor_kwargs: Keyword arguments passed to the constructor function - :raises ~lxml.etree.XMLSyntaxError: **Non-failsafe**: If the given file(-handle) has invalid XML - :raises KeyError: **Non-failsafe**: If a required namespace has not been declared on the XML document - :raises (~basyx.aas.model.base.AASConstraintViolation, KeyError, ValueError): **Non-failsafe**: Errors during - construction of the objects - :return: The constructed object or None, if an error occurred in failsafe mode. - """ - - try: - return read_aas_xml_element(file, construct, failsafe=failsafe, stripped=stripped, decoder=decoder, - **constructor_kwargs) - except ValueError: - decoder_ = _select_server_decoder(failsafe, stripped, decoder) - constructor: Callable[..., object] - - if construct == ServerXMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR: - constructor = decoder_.construct_asset_administration_shell_descriptor - elif construct == ServerXMLConstructables.SUBMODEL_DESCRIPTOR: - constructor = decoder_.construct_submodel_descriptor - else: - raise ValueError(f"{construct.name} cannot be constructed!") - - element = _parse_xml_document(file, failsafe=decoder_.failsafe) - return _failsafe_construct(element, constructor, decoder_.failsafe, **constructor_kwargs) From eb44e8a01c8dcd0e1aa6f6cd9125d40f5ab24d15 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Thu, 17 Apr 2025 14:31:24 +0200 Subject: [PATCH 27/52] change according to xmlization removal for registry and discovery classes --- server/app/api_utils/http_api_helpers.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/server/app/api_utils/http_api_helpers.py b/server/app/api_utils/http_api_helpers.py index 276e49d3d..8c65f9c17 100644 --- a/server/app/api_utils/http_api_helpers.py +++ b/server/app/api_utils/http_api_helpers.py @@ -19,10 +19,9 @@ from basyx.aas import model -from basyx.aas.adapter.xml import XMLConstructables +from basyx.aas.adapter.xml import XMLConstructables, read_aas_xml_element from server.app import server_model -from server.app.adapter.xmlization import ServerXMLConstructables, read_server_aas_xml_element from server.app.adapter.jsonization import ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder from typing import Callable, List, Optional, Type, TypeVar, Union @@ -67,15 +66,16 @@ class HTTPApiDecoder: model.Submodel: XMLConstructables.SUBMODEL, model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, model.Reference: XMLConstructables.REFERENCE, - - server_model.AssetAdministrationShellDescriptor: ServerXMLConstructables.ASSET_ADMINISTRATION_SHELL_DESCRIPTOR, - server_model.SubmodelDescriptor: ServerXMLConstructables.SUBMODEL_DESCRIPTOR, - server_model.AssetLink: ServerXMLConstructables.ASSET_LINK, } @classmethod def check_type_support(cls, type_: type): - if type_ not in cls.type_constructables_map: + tolerated_types = ( + server_model.AssetAdministrationShellDescriptor, + server_model.SubmodelDescriptor, + server_model.AssetLink, + ) + if type_ not in cls.type_constructables_map and type_ not in tolerated_types: raise TypeError(f"Parsing {type_} is not supported!") @classmethod @@ -146,7 +146,7 @@ def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: cls.check_type_support(expect_type) try: xml_data = io.BytesIO(data) - rv = read_server_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], + rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], stripped=stripped, failsafe=False) except (KeyError, ValueError) as e: # xml deserialization creates an error chain. since we only return one error, return the root cause From d608409c7cd70e4e63b29293d0d379b1ae387b42 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 22 Apr 2025 14:58:44 +0200 Subject: [PATCH 28/52] fix error with ServerAASToJSONEncoder --- server/app/interfaces/discovery.py | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index 639925175..9731bc333 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -18,6 +18,8 @@ from .. import server_model from ..adapter.jsonization import ServerAASToJsonEncoder +encoder=ServerAASToJsonEncoder() + class AbstractDiscoveryStore(metaclass=abc.ABCMeta): aas_id_to_asset_ids: Any asset_id_to_aas_ids: Any @@ -47,7 +49,7 @@ def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_ pass @abc.abstractmethod - def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: pass @@ -62,7 +64,7 @@ def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: - serialized_assets = [ServerAASToJsonEncoder.default(asset_id) for asset_id in asset_ids] + serialized_assets = [encoder.default(asset_id) for asset_id in asset_ids] if aas_id in self.aas_id_to_asset_ids: for asset in serialized_assets: if asset not in self.aas_id_to_asset_ids[aas_id]: @@ -85,18 +87,15 @@ def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> Li def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: asset_key = f"{asset_id.name}:{asset_id.value}" - aas_key = aas_id - # FIXME if asset_key in self.asset_id_to_aas_ids: - self.asset_id_to_aas_ids[asset_key].add(aas_key) + self.asset_id_to_aas_ids[asset_key].add(aas_id) else: - self.asset_id_to_aas_ids[asset_key] = {aas_key} + self.asset_id_to_aas_ids[asset_key] = {aas_id} - def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: asset_key = f"{asset_id.name}:{asset_id.value}" - aas_key = aas_id if asset_key in self.asset_id_to_aas_ids: - self.asset_id_to_aas_ids[asset_key].discard(aas_key) + self.asset_id_to_aas_ids[asset_key].discard(aas_id) class MongoDiscoveryStore(AbstractDiscoveryStore): @@ -120,7 +119,7 @@ def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: key = aas_id # Convert each SpecificAssetId using the serialization helper. - serializable_assets = [ServerAASToJsonEncoder.default(asset_id) for asset_id in asset_ids] + serializable_assets = [encoder.default(asset_id) for asset_id in asset_ids] self.coll_aas_to_assets.update_one( {"_id": key}, {"$addToSet": {"asset_ids": {"$each": serializable_assets}}}, @@ -140,19 +139,18 @@ def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> Li return doc["aas_ids"] if doc and "aas_ids" in doc else [] def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: - asset_key = str(ServerAASToJsonEncoder.default(asset_id)) + asset_key = str(encoder.default(asset_id)) self.coll_asset_to_aas.update_one( {"_id": asset_key}, {"$addToSet": {"aas_ids": aas_id}}, upsert=True ) - def remove_aas_from_asset_link(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: - asset_key = str(ServerAASToJsonEncoder.default(asset_id)) - aas_key = aas_id + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = str(encoder.default(asset_id)) self.coll_asset_to_aas.update_one( {"_id": asset_key}, - {"$pull": {"aas_ids": aas_key}} + {"$pull": {"aas_ids": aas_id}} ) From dde249976c81d96ed10bf734ab7e43d4f3759bd1 Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 24 Apr 2025 01:09:16 +0200 Subject: [PATCH 29/52] Refactor `response.py` Move all classes from `response.py` to `base.py`. --- server/app/api_utils/response.py | 202 --------------------------- server/app/interfaces/base.py | 205 +++++++++++++++++++++++++++- server/app/interfaces/registry.py | 3 +- server/app/interfaces/repository.py | 3 +- 4 files changed, 203 insertions(+), 210 deletions(-) delete mode 100644 server/app/api_utils/response.py diff --git a/server/app/api_utils/response.py b/server/app/api_utils/response.py deleted file mode 100644 index 73c16887d..000000000 --- a/server/app/api_utils/response.py +++ /dev/null @@ -1,202 +0,0 @@ -import abc -import datetime -import enum -import json -from typing import Union, List, Optional, Type, Dict - -import werkzeug.exceptions -from lxml import etree -from werkzeug import Response, Request - -from basyx.aas.adapter._generic import XML_NS_MAP -from basyx.aas.adapter.xml import xml_serialization -from server.app.adapter.jsonization import ServerAASToJsonEncoder - - -@enum.unique -class MessageType(enum.Enum): - UNDEFINED = enum.auto() - INFO = enum.auto() - WARNING = enum.auto() - ERROR = enum.auto() - EXCEPTION = enum.auto() - - def __str__(self): - return self.name.capitalize() - - -class Message: - def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, - timestamp: Optional[datetime.datetime] = None): - self.code: str = code - self.text: str = text - self.message_type: MessageType = message_type - self.timestamp: datetime.datetime = timestamp if timestamp is not None \ - else datetime.datetime.now(datetime.timezone.utc) - - -class Result: - def __init__(self, success: bool, messages: Optional[List[Message]] = None): - if messages is None: - messages = [] - self.success: bool = success - self.messages: List[Message] = messages - - -ResponseData = Union[Result, object, List[object]] - - -class APIResponse(abc.ABC, Response): - @abc.abstractmethod - def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, - stripped: bool = False, *args, **kwargs): - super().__init__(*args, **kwargs) - if obj is None: - self.status_code = 204 - else: - self.data = self.serialize(obj, cursor, stripped) - - @abc.abstractmethod - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - pass - - -class JsonResponse(APIResponse): - def __init__(self, *args, content_type="application/json", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - if cursor is None: - data = obj - else: - data = { - "paging_metadata": {"cursor": str(cursor)}, - "result": obj - } - return json.dumps( - data, - cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, - separators=(",", ":") - ) - - -class XmlResponse(APIResponse): - def __init__(self, *args, content_type="application/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - root_elem = etree.Element("response", nsmap=XML_NS_MAP) - if cursor is not None: - root_elem.set("cursor", str(cursor)) - if isinstance(obj, Result): - result_elem = self.result_to_xml(obj, **XML_NS_MAP) - for child in result_elem: - root_elem.append(child) - elif isinstance(obj, list): - for item in obj: - item_elem = xml_serialization.object_to_xml_element(item) - root_elem.append(item_elem) - else: - obj_elem = xml_serialization.object_to_xml_element(obj) - for child in obj_elem: - root_elem.append(child) - etree.cleanup_namespaces(root_elem) - xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") - return xml_str # type: ignore[return-value] - - @classmethod - def result_to_xml(cls, result: Result, **kwargs) -> etree._Element: - result_elem = etree.Element("result", **kwargs) - success_elem = etree.Element("success") - success_elem.text = xml_serialization.boolean_to_xml(result.success) - messages_elem = etree.Element("messages") - for message in result.messages: - messages_elem.append(cls.message_to_xml(message)) - - result_elem.append(success_elem) - result_elem.append(messages_elem) - return result_elem - - @classmethod - def message_to_xml(cls, message: Message) -> etree._Element: - message_elem = etree.Element("message") - message_type_elem = etree.Element("messageType") - message_type_elem.text = str(message.message_type) - text_elem = etree.Element("text") - text_elem.text = message.text - code_elem = etree.Element("code") - code_elem.text = message.code - timestamp_elem = etree.Element("timestamp") - timestamp_elem.text = message.timestamp.isoformat() - - message_elem.append(message_type_elem) - message_elem.append(text_elem) - message_elem.append(code_elem) - message_elem.append(timestamp_elem) - return message_elem - - -class XmlResponseAlt(XmlResponse): - def __init__(self, *args, content_type="text/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - -class ResultToJsonEncoder(ServerAASToJsonEncoder): - @classmethod - def _result_to_json(cls, result: Result) -> Dict[str, object]: - return { - "success": result.success, - "messages": result.messages - } - - @classmethod - def _message_to_json(cls, message: Message) -> Dict[str, object]: - return { - "messageType": message.message_type, - "text": message.text, - "code": message.code, - "timestamp": message.timestamp.isoformat() - } - - def default(self, obj: object) -> object: - if isinstance(obj, Result): - return self._result_to_json(obj) - if isinstance(obj, Message): - return self._message_to_json(obj) - if isinstance(obj, MessageType): - return str(obj) - return super().default(obj) - - -class StrippedResultToJsonEncoder(ResultToJsonEncoder): - stripped = True - - -def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ - -> APIResponse: - headers = exception.get_headers() - location = exception.get_response().location - if location is not None: - headers.append(("Location", location)) - if exception.code and exception.code >= 400: - message = Message(type(exception).__name__, exception.description if exception.description is not None else "", - MessageType.ERROR) - result = Result(False, [message]) - else: - result = Result(False) - return response_type(result, status=exception.code, headers=headers) - - -def get_response_type(request: Request) -> Type[APIResponse]: - response_types: Dict[str, Type[APIResponse]] = { - "application/json": JsonResponse, - "application/xml": XmlResponse, - "text/xml": XmlResponseAlt - } - if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): - return JsonResponse - mime_type = request.accept_mimetypes.best_match(response_types) - if mime_type is None: - raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " - + ", ".join(response_types.keys())) - return response_types[mime_type] diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index 88f244664..36b77c6d6 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -1,17 +1,183 @@ +import abc +import datetime +import enum import itertools -from typing import Iterable, Type, Iterator, Tuple +import json +from typing import Iterable, Type, Iterator, Tuple, Optional, List, Union, Dict import werkzeug.exceptions import werkzeug.routing import werkzeug.utils +from lxml import etree from werkzeug import Response, Request from werkzeug.exceptions import NotFound, BadRequest from werkzeug.routing import MapAdapter from basyx.aas import model +from basyx.aas.adapter._generic import XML_NS_MAP +from basyx.aas.adapter.xml import xml_serialization from basyx.aas.model import AbstractObjectStore +from server.app.adapter.jsonization import ServerAASToJsonEncoder from server.app.api_utils.http_api_helpers import T -from server.app.api_utils.response import get_response_type, http_exception_to_response + + +@enum.unique +class MessageType(enum.Enum): + UNDEFINED = enum.auto() + INFO = enum.auto() + WARNING = enum.auto() + ERROR = enum.auto() + EXCEPTION = enum.auto() + + def __str__(self): + return self.name.capitalize() + + +class Message: + def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, + timestamp: Optional[datetime.datetime] = None): + self.code: str = code + self.text: str = text + self.message_type: MessageType = message_type + self.timestamp: datetime.datetime = timestamp if timestamp is not None \ + else datetime.datetime.now(datetime.timezone.utc) + + +class Result: + def __init__(self, success: bool, messages: Optional[List[Message]] = None): + if messages is None: + messages = [] + self.success: bool = success + self.messages: List[Message] = messages + + +ResponseData = Union[Result, object, List[object]] + + +class APIResponse(abc.ABC, Response): + @abc.abstractmethod + def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, + stripped: bool = False, *args, **kwargs): + super().__init__(*args, **kwargs) + if obj is None: + self.status_code = 204 + else: + self.data = self.serialize(obj, cursor, stripped) + + @abc.abstractmethod + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + pass + + +class JsonResponse(APIResponse): + def __init__(self, *args, content_type="application/json", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + if cursor is None: + data = obj + else: + data = { + "paging_metadata": {"cursor": str(cursor)}, + "result": obj + } + return json.dumps( + data, + cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, + separators=(",", ":") + ) + + +class XmlResponse(APIResponse): + def __init__(self, *args, content_type="application/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + root_elem = etree.Element("response", nsmap=XML_NS_MAP) + if cursor is not None: + root_elem.set("cursor", str(cursor)) + if isinstance(obj, Result): + result_elem = self.result_to_xml(obj, **XML_NS_MAP) + for child in result_elem: + root_elem.append(child) + elif isinstance(obj, list): + for item in obj: + item_elem = xml_serialization.object_to_xml_element(item) + root_elem.append(item_elem) + else: + obj_elem = xml_serialization.object_to_xml_element(obj) + for child in obj_elem: + root_elem.append(child) + etree.cleanup_namespaces(root_elem) + xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") + return xml_str # type: ignore[return-value] + + @classmethod + def result_to_xml(cls, result: Result, **kwargs) -> etree._Element: + result_elem = etree.Element("result", **kwargs) + success_elem = etree.Element("success") + success_elem.text = xml_serialization.boolean_to_xml(result.success) + messages_elem = etree.Element("messages") + for message in result.messages: + messages_elem.append(cls.message_to_xml(message)) + + result_elem.append(success_elem) + result_elem.append(messages_elem) + return result_elem + + @classmethod + def message_to_xml(cls, message: Message) -> etree._Element: + message_elem = etree.Element("message") + message_type_elem = etree.Element("messageType") + message_type_elem.text = str(message.message_type) + text_elem = etree.Element("text") + text_elem.text = message.text + code_elem = etree.Element("code") + code_elem.text = message.code + timestamp_elem = etree.Element("timestamp") + timestamp_elem.text = message.timestamp.isoformat() + + message_elem.append(message_type_elem) + message_elem.append(text_elem) + message_elem.append(code_elem) + message_elem.append(timestamp_elem) + return message_elem + + +class XmlResponseAlt(XmlResponse): + def __init__(self, *args, content_type="text/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + +class ResultToJsonEncoder(ServerAASToJsonEncoder): + @classmethod + def _result_to_json(cls, result: Result) -> Dict[str, object]: + return { + "success": result.success, + "messages": result.messages + } + + @classmethod + def _message_to_json(cls, message: Message) -> Dict[str, object]: + return { + "messageType": message.message_type, + "text": message.text, + "code": message.code, + "timestamp": message.timestamp.isoformat() + } + + def default(self, obj: object) -> object: + if isinstance(obj, Result): + return self._result_to_json(obj) + if isinstance(obj, Message): + return self._message_to_json(obj) + if isinstance(obj, MessageType): + return str(obj) + return super().default(obj) + + +class StrippedResultToJsonEncoder(ResultToJsonEncoder): + stripped = True class BaseWSGIApp: @@ -40,7 +206,7 @@ def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T def handle_request(self, request: Request): map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) try: - response_t = get_response_type(request) + response_t = self.get_response_type(request) except werkzeug.exceptions.NotAcceptable as e: return e @@ -51,7 +217,38 @@ def handle_request(self, request: Request): # any raised error that leaves this function will cause a 500 internal server error # so catch raised http exceptions and return them except werkzeug.exceptions.HTTPException as e: - return http_exception_to_response(e, response_t) + return self.http_exception_to_response(e, response_t) + + @staticmethod + def get_response_type(request: Request) -> Type[APIResponse]: + response_types: Dict[str, Type[APIResponse]] = { + "application/json": JsonResponse, + "application/xml": XmlResponse, + "text/xml": XmlResponseAlt + } + if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): + return JsonResponse + mime_type = request.accept_mimetypes.best_match(response_types) + if mime_type is None: + raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " + + ", ".join(response_types.keys())) + return response_types[mime_type] + + @staticmethod + def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ + -> APIResponse: + headers = exception.get_headers() + location = exception.get_response().location + if location is not None: + headers.append(("Location", location)) + if exception.code and exception.code >= 400: + message = Message(type(exception).__name__, + exception.description if exception.description is not None else "", + MessageType.ERROR) + result = Result(False, [message]) + else: + result = Result(False) + return response_type(result, status=exception.code, headers=headers) class ObjectStoreWSGIApp(BaseWSGIApp): diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index 24fd19cca..348def865 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -15,8 +15,7 @@ import server.app.server_model as server_model from basyx.aas import model from server.app.api_utils.http_api_helpers import HTTPApiDecoder, Base64URLConverter, is_stripped_request -from server.app.api_utils.response import APIResponse -from server.app.interfaces.base import ObjectStoreWSGIApp +from server.app.interfaces.base import ObjectStoreWSGIApp, APIResponse class RegistryAPI(ObjectStoreWSGIApp): diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index 49663bbdf..e0310e320 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -43,8 +43,7 @@ from basyx.aas.adapter import aasx from server.app.api_utils.http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, \ is_stripped_request -from server.app.api_utils.response import APIResponse -from .base import ObjectStoreWSGIApp +from .base import ObjectStoreWSGIApp, APIResponse class WSGIApp(ObjectStoreWSGIApp): From df38540815455e959b02679079c6b5a079ac834f Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 24 Apr 2025 01:20:56 +0200 Subject: [PATCH 30/52] Refactor utils --- server/app/api_utils/http_api_helpers.py | 245 --------------------- server/app/interfaces/base.py | 183 ++++++++++++++- server/app/interfaces/discovery.py | 6 +- server/app/interfaces/registry.py | 4 +- server/app/interfaces/repository.py | 5 +- server/app/{api_utils => util}/__init__.py | 0 server/app/util/converters.py | 63 ++++++ 7 files changed, 248 insertions(+), 258 deletions(-) delete mode 100644 server/app/api_utils/http_api_helpers.py rename server/app/{api_utils => util}/__init__.py (100%) create mode 100644 server/app/util/converters.py diff --git a/server/app/api_utils/http_api_helpers.py b/server/app/api_utils/http_api_helpers.py deleted file mode 100644 index 8c65f9c17..000000000 --- a/server/app/api_utils/http_api_helpers.py +++ /dev/null @@ -1,245 +0,0 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors -# -# This program and the accompanying materials are made available under the terms of the MIT License, available in -# the LICENSE file of this project. -# -# SPDX-License-Identifier: MIT -import base64 -import binascii -import io -import json - -from lxml import etree -import werkzeug.exceptions -import werkzeug.routing -import werkzeug.urls -import werkzeug.utils -from werkzeug.exceptions import BadRequest, UnprocessableEntity -from werkzeug.wrappers import Request - -from basyx.aas import model - -from basyx.aas.adapter.xml import XMLConstructables, read_aas_xml_element - -from server.app import server_model -from server.app.adapter.jsonization import ServerStrictAASFromJsonDecoder, ServerStrictStrippedAASFromJsonDecoder - -from typing import Callable, List, Optional, Type, TypeVar, Union - - -def is_stripped_request(request: Request) -> bool: - return request.args.get("level") == "core" - - -T = TypeVar("T") - -BASE64URL_ENCODING = "utf-8" - - -def base64url_decode(data: str) -> str: - try: - # If the requester omits the base64 padding, an exception will be raised. - # However, Python doesn't complain about too much padding, - # thus we simply always append two padding characters (==). - # See also: https://stackoverflow.com/a/49459036/4780052 - decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) - except binascii.Error: - raise BadRequest(f"Encoded data {data} is invalid base64url!") - except UnicodeDecodeError: - raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") - return decoded - - -def base64url_encode(data: str) -> str: - encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") - return encoded - - -class HTTPApiDecoder: - # these are the types we can construct (well, only the ones we need) - type_constructables_map = { - model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, - model.AssetInformation: XMLConstructables.ASSET_INFORMATION, - model.ModelReference: XMLConstructables.MODEL_REFERENCE, - model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, - model.Qualifier: XMLConstructables.QUALIFIER, - model.Submodel: XMLConstructables.SUBMODEL, - model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, - model.Reference: XMLConstructables.REFERENCE, - } - - @classmethod - def check_type_support(cls, type_: type): - tolerated_types = ( - server_model.AssetAdministrationShellDescriptor, - server_model.SubmodelDescriptor, - server_model.AssetLink, - ) - if type_ not in cls.type_constructables_map and type_ not in tolerated_types: - raise TypeError(f"Parsing {type_} is not supported!") - - @classmethod - def assert_type(cls, obj: object, type_: Type[T]) -> T: - if not isinstance(obj, type_): - raise UnprocessableEntity(f"Object {obj!r} is not of type {type_.__name__}!") - return obj - - @classmethod - def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - cls.check_type_support(expect_type) - decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ - else ServerStrictAASFromJsonDecoder - try: - parsed = json.loads(data, cls=decoder) - if isinstance(parsed, list) and expect_single: - raise UnprocessableEntity(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") - if not isinstance(parsed, list) and not expect_single: - raise UnprocessableEntity(f"Expected List[{expect_type.__name__}], got {parsed!r}!") - parsed = [parsed] if not isinstance(parsed, list) else parsed - - # TODO: the following is ugly, but necessary because references aren't self-identified objects - # in the json schema - # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines - # that automatically - mapping = { - model.ModelReference: decoder._construct_model_reference, # type: ignore[assignment] - model.AssetInformation: decoder._construct_asset_information, # type: ignore[assignment] - model.SpecificAssetId: decoder._construct_specific_asset_id, # type: ignore[assignment] - model.Reference: decoder._construct_reference, # type: ignore[assignment] - model.Qualifier: decoder._construct_qualifier, # type: ignore[assignment] - server_model.AssetAdministrationShellDescriptor: - decoder._construct_asset_administration_shell_descriptor, # type: ignore[assignment] - server_model.SubmodelDescriptor: decoder._construct_submodel_descriptor, # type: ignore[assignment] - server_model.AssetLink: decoder._construct_asset_link, # type: ignore[assignment] - } - - constructor: Optional[Callable[..., T]] = mapping.get(expect_type) - args = [] - if expect_type is model.ModelReference: - args.append(model.Submodel) - - if constructor is not None: - # construct elements that aren't self-identified - return [constructor(obj, *args) for obj in parsed] - - except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: - raise UnprocessableEntity(str(e)) from e - - return [cls.assert_type(obj, expect_type) for obj in parsed] - - @classmethod - def base64url_json_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, expect_single) - - @classmethod - def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def base64url_json(cls, data: str, expect_type: Type[T], stripped: bool) -> T: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: - cls.check_type_support(expect_type) - try: - xml_data = io.BytesIO(data) - rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], - stripped=stripped, failsafe=False) - except (KeyError, ValueError) as e: - # xml deserialization creates an error chain. since we only return one error, return the root cause - f: BaseException = e - while f.__cause__ is not None: - f = f.__cause__ - raise UnprocessableEntity(str(f)) from e - except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: - raise UnprocessableEntity(str(e)) from e - return cls.assert_type(rv, expect_type) - - @classmethod - def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: - """ - TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent - running out of memory. but it doesn't state how to check the content length - also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json - schema - In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, - which should limit the maximum content length. - """ - valid_content_types = ("application/json", "application/xml", "text/xml") - - if request.mimetype not in valid_content_types: - raise werkzeug.exceptions.UnsupportedMediaType( - f"Invalid content-type: {request.mimetype}! Supported types: " - + ", ".join(valid_content_types)) - - if request.mimetype == "application/json": - return cls.json(request.get_data(), expect_type, stripped) - return cls.xml(request.get_data(), expect_type, stripped) - - @classmethod - def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> List[T]: - """ - Deserializes the request body to an instance (or list of instances) - of the expected type. - """ - # TODO: Refactor this method and request_body to avoid code duplication - valid_content_types = ("application/json", "application/xml", "text/xml") - - if request.mimetype not in valid_content_types: - raise werkzeug.exceptions.UnsupportedMediaType( - f"Invalid content-type: {request.mimetype}! Supported types: " + ", ".join(valid_content_types) - ) - - if request.mimetype == "application/json": - raw_data = request.get_data() - try: - parsed = json.loads(raw_data) - except Exception as e: - raise werkzeug.exceptions.BadRequest(f"Invalid JSON: {e}") - # Prüfe, ob parsed ein Array ist: - if isinstance(parsed, list): - # Für jedes Element wird die Konvertierung angewandt. - return [cls._convert_single_json_item(item, expect_type, stripped) for item in parsed] # type: ignore - else: - return cls._convert_single_json_item(parsed, expect_type, stripped) - else: - return cls.xml(request.get_data(), expect_type, stripped) - - @classmethod - def _convert_single_json_item(cls, data: any, expect_type: Type[T], stripped: bool) -> T: - """ - Konvertiert ein einzelnes JSON-Objekt (als Python-Dict) in ein Objekt vom Typ expect_type. - Hierbei wird das Dictionary zuerst wieder in einen JSON-String serialisiert und als Bytes übergeben. - """ - json_bytes = json.dumps(data).encode("utf-8") - return cls.json(json_bytes, expect_type, stripped) - - -class Base64URLConverter(werkzeug.routing.UnicodeConverter): - - def to_url(self, value: model.Identifier) -> str: - return super().to_url(base64url_encode(value)) - - def to_python(self, value: str) -> model.Identifier: - value = super().to_python(value) - decoded = base64url_decode(super().to_python(value)) - return decoded - - -class IdShortPathConverter(werkzeug.routing.UnicodeConverter): - id_short_sep = "." - - def to_url(self, value: List[str]) -> str: - return super().to_url(self.id_short_sep.join(value)) - - def to_python(self, value: str) -> List[str]: - id_shorts = super().to_python(value).split(self.id_short_sep) - for id_short in id_shorts: - try: - model.Referable.validate_id_short(id_short) - except (ValueError, model.AASConstraintViolation): - raise BadRequest(f"{id_short} is not a valid id_short!") - return id_shorts diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index 36b77c6d6..23d763361 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -1,24 +1,30 @@ import abc import datetime import enum +import io import itertools import json -from typing import Iterable, Type, Iterator, Tuple, Optional, List, Union, Dict +from typing import Iterable, Type, Iterator, Tuple, Optional, List, Union, Dict, Callable, TypeVar import werkzeug.exceptions import werkzeug.routing import werkzeug.utils from lxml import etree from werkzeug import Response, Request -from werkzeug.exceptions import NotFound, BadRequest +from werkzeug.exceptions import NotFound, BadRequest, UnprocessableEntity from werkzeug.routing import MapAdapter from basyx.aas import model from basyx.aas.adapter._generic import XML_NS_MAP -from basyx.aas.adapter.xml import xml_serialization +from basyx.aas.adapter.xml import xml_serialization, XMLConstructables, read_aas_xml_element from basyx.aas.model import AbstractObjectStore -from server.app.adapter.jsonization import ServerAASToJsonEncoder -from server.app.api_utils.http_api_helpers import T +from server.app import server_model +from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, \ + ServerStrictStrippedAASFromJsonDecoder +from server.app.util.converters import base64url_decode + + +T = TypeVar("T") @enum.unique @@ -266,3 +272,170 @@ def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._ raise NotFound(f"No {type_.__name__} with {identifier} found!") identifiable.update() return identifiable + +class HTTPApiDecoder: + # these are the types we can construct (well, only the ones we need) + type_constructables_map = { + model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, + model.AssetInformation: XMLConstructables.ASSET_INFORMATION, + model.ModelReference: XMLConstructables.MODEL_REFERENCE, + model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, + model.Qualifier: XMLConstructables.QUALIFIER, + model.Submodel: XMLConstructables.SUBMODEL, + model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, + model.Reference: XMLConstructables.REFERENCE, + } + + @classmethod + def check_type_support(cls, type_: type): + tolerated_types = ( + server_model.AssetAdministrationShellDescriptor, + server_model.SubmodelDescriptor, + server_model.AssetLink, + ) + if type_ not in cls.type_constructables_map and type_ not in tolerated_types: + raise TypeError(f"Parsing {type_} is not supported!") + + @classmethod + def assert_type(cls, obj: object, type_: Type[T]) -> T: + if not isinstance(obj, type_): + raise UnprocessableEntity(f"Object {obj!r} is not of type {type_.__name__}!") + return obj + + @classmethod + def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + cls.check_type_support(expect_type) + decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ + else ServerStrictAASFromJsonDecoder + try: + parsed = json.loads(data, cls=decoder) + if isinstance(parsed, list) and expect_single: + raise UnprocessableEntity(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") + if not isinstance(parsed, list) and not expect_single: + raise UnprocessableEntity(f"Expected List[{expect_type.__name__}], got {parsed!r}!") + parsed = [parsed] if not isinstance(parsed, list) else parsed + + # TODO: the following is ugly, but necessary because references aren't self-identified objects + # in the json schema + # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines + # that automatically + mapping = { + model.ModelReference: decoder._construct_model_reference, # type: ignore[assignment] + model.AssetInformation: decoder._construct_asset_information, # type: ignore[assignment] + model.SpecificAssetId: decoder._construct_specific_asset_id, # type: ignore[assignment] + model.Reference: decoder._construct_reference, # type: ignore[assignment] + model.Qualifier: decoder._construct_qualifier, # type: ignore[assignment] + server_model.AssetAdministrationShellDescriptor: + decoder._construct_asset_administration_shell_descriptor, # type: ignore[assignment] + server_model.SubmodelDescriptor: decoder._construct_submodel_descriptor, # type: ignore[assignment] + server_model.AssetLink: decoder._construct_asset_link, # type: ignore[assignment] + } + + constructor: Optional[Callable[..., T]] = mapping.get(expect_type) + args = [] + if expect_type is model.ModelReference: + args.append(model.Submodel) + + if constructor is not None: + # construct elements that aren't self-identified + return [constructor(obj, *args) for obj in parsed] + + except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: + raise UnprocessableEntity(str(e)) from e + + return [cls.assert_type(obj, expect_type) for obj in parsed] + + @classmethod + def base64url_json_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, expect_single) + + @classmethod + def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def base64url_json(cls, data: str, expect_type: Type[T], stripped: bool) -> T: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: + cls.check_type_support(expect_type) + try: + xml_data = io.BytesIO(data) + rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], + stripped=stripped, failsafe=False) + except (KeyError, ValueError) as e: + # xml deserialization creates an error chain. since we only return one error, return the root cause + f: BaseException = e + while f.__cause__ is not None: + f = f.__cause__ + raise UnprocessableEntity(str(f)) from e + except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: + raise UnprocessableEntity(str(e)) from e + return cls.assert_type(rv, expect_type) + + @classmethod + def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: + """ + TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent + running out of memory. but it doesn't state how to check the content length + also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json + schema + In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, + which should limit the maximum content length. + """ + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + + ", ".join(valid_content_types)) + + if request.mimetype == "application/json": + return cls.json(request.get_data(), expect_type, stripped) + return cls.xml(request.get_data(), expect_type, stripped) + + @classmethod + def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> List[T]: + """ + Deserializes the request body to an instance (or list of instances) + of the expected type. + """ + # TODO: Refactor this method and request_body to avoid code duplication + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + ", ".join(valid_content_types) + ) + + if request.mimetype == "application/json": + raw_data = request.get_data() + try: + parsed = json.loads(raw_data) + except Exception as e: + raise werkzeug.exceptions.BadRequest(f"Invalid JSON: {e}") + # Prüfe, ob parsed ein Array ist: + if isinstance(parsed, list): + # Für jedes Element wird die Konvertierung angewandt. + return [cls._convert_single_json_item(item, expect_type, stripped) for item in parsed] # type: ignore + else: + return cls._convert_single_json_item(parsed, expect_type, stripped) + else: + return cls.xml(request.get_data(), expect_type, stripped) + + @classmethod + def _convert_single_json_item(cls, data: any, expect_type: Type[T], stripped: bool) -> T: + """ + Konvertiert ein einzelnes JSON-Objekt (als Python-Dict) in ein Objekt vom Typ expect_type. + Hierbei wird das Dictionary zuerst wieder in einen JSON-String serialisiert und als Bytes übergeben. + """ + json_bytes = json.dumps(data).encode("utf-8") + return cls.json(json_bytes, expect_type, stripped) + + +def is_stripped_request(request: Request) -> bool: + return request.args.get("level") == "core" + diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index 9731bc333..8f5810b10 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -13,9 +13,9 @@ from werkzeug.wrappers import Request, Response from basyx.aas import model -from server.app.api_utils.http_api_helpers import Base64URLConverter, HTTPApiDecoder -from server.app.interfaces.base import BaseWSGIApp -from .. import server_model +from server.app.util.converters import Base64URLConverter +from server.app.interfaces.base import BaseWSGIApp, HTTPApiDecoder +from .. import server_model as server_model from ..adapter.jsonization import ServerAASToJsonEncoder encoder=ServerAASToJsonEncoder() diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index 348def865..099d3d093 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -14,8 +14,8 @@ import server.app.server_model as server_model from basyx.aas import model -from server.app.api_utils.http_api_helpers import HTTPApiDecoder, Base64URLConverter, is_stripped_request -from server.app.interfaces.base import ObjectStoreWSGIApp, APIResponse +from server.app.util.converters import Base64URLConverter +from server.app.interfaces.base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder class RegistryAPI(ObjectStoreWSGIApp): diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index e0310e320..d6abf17e6 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -41,9 +41,8 @@ from basyx.aas import model from basyx.aas.adapter import aasx -from server.app.api_utils.http_api_helpers import Base64URLConverter, IdShortPathConverter, T, HTTPApiDecoder, \ - is_stripped_request -from .base import ObjectStoreWSGIApp, APIResponse +from server.app.util.converters import Base64URLConverter, IdShortPathConverter +from .base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder, T class WSGIApp(ObjectStoreWSGIApp): diff --git a/server/app/api_utils/__init__.py b/server/app/util/__init__.py similarity index 100% rename from server/app/api_utils/__init__.py rename to server/app/util/__init__.py diff --git a/server/app/util/converters.py b/server/app/util/converters.py new file mode 100644 index 000000000..47e1ed645 --- /dev/null +++ b/server/app/util/converters.py @@ -0,0 +1,63 @@ +# Copyright (c) 2024 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +import base64 +import binascii + +import werkzeug.routing +import werkzeug.utils +from werkzeug.exceptions import BadRequest + +from basyx.aas import model + +from typing import List + +BASE64URL_ENCODING = "utf-8" + + +def base64url_decode(data: str) -> str: + try: + # If the requester omits the base64 padding, an exception will be raised. + # However, Python doesn't complain about too much padding, + # thus we simply always append two padding characters (==). + # See also: https://stackoverflow.com/a/49459036/4780052 + decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) + except binascii.Error: + raise BadRequest(f"Encoded data {data} is invalid base64url!") + except UnicodeDecodeError: + raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") + return decoded + + +def base64url_encode(data: str) -> str: + encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") + return encoded + + +class Base64URLConverter(werkzeug.routing.UnicodeConverter): + def to_url(self, value: model.Identifier) -> str: + return super().to_url(base64url_encode(value)) + + def to_python(self, value: str) -> model.Identifier: + value = super().to_python(value) + decoded = base64url_decode(super().to_python(value)) + return decoded + + +class IdShortPathConverter(werkzeug.routing.UnicodeConverter): + id_short_sep = "." + + def to_url(self, value: List[str]) -> str: + return super().to_url(self.id_short_sep.join(value)) + + def to_python(self, value: str) -> List[str]: + id_shorts = super().to_python(value).split(self.id_short_sep) + for id_short in id_shorts: + try: + model.Referable.validate_id_short(id_short) + except (ValueError, model.AASConstraintViolation): + raise BadRequest(f"{id_short} is not a valid id_short!") + return id_shorts From 1da157fb5d0743c2e7fa31bed444ffc4f6e02b70 Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 24 Apr 2025 01:24:41 +0200 Subject: [PATCH 31/52] Rename `server_model` to `model` similiar as in sdk --- server/app/adapter/jsonization.py | 56 +++++++++---------- server/app/interfaces/base.py | 2 +- server/app/interfaces/discovery.py | 2 +- server/app/interfaces/registry.py | 34 +++++------ .../app/{server_model => model}/__init__.py | 0 .../app/{server_model => model}/descriptor.py | 2 +- .../app/{server_model => model}/endpoint.py | 0 7 files changed, 48 insertions(+), 48 deletions(-) rename server/app/{server_model => model}/__init__.py (100%) rename server/app/{server_model => model}/descriptor.py (99%) rename server/app/{server_model => model}/endpoint.py (100%) diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index 38601f268..276f6c31b 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -1,6 +1,6 @@ from typing import Dict, Set, Optional, Type -import server.app.server_model as server_model +import server.app.model as server_model from basyx.aas import model from basyx.aas.adapter import _generic from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, PathOrIO @@ -14,8 +14,8 @@ logger = logging.getLogger(__name__) JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES + ( - ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), - ('submodelDescriptors', server_model.SubmodelDescriptor) + ('assetAdministrationShellDescriptors', model.AssetAdministrationShellDescriptor), + ('submodelDescriptors', model.SubmodelDescriptor) ) @@ -40,7 +40,7 @@ def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], objec def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None: super()._amend_abstract_attributes(obj, dct) - if isinstance(obj, server_model.Descriptor): + if isinstance(obj, model.Descriptor): if 'description' in dct: obj.description = cls._construct_lang_string_set(_get_ts(dct, 'description', list), model.MultiLanguageTextType) @@ -54,7 +54,7 @@ def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None @classmethod def _construct_asset_administration_shell_descriptor( cls, dct: Dict[str, object], - object_class=server_model.AssetAdministrationShellDescriptor) -> server_model.AssetAdministrationShellDescriptor: + object_class=model.AssetAdministrationShellDescriptor) -> model.AssetAdministrationShellDescriptor: ret = object_class(id_=_get_ts(dct, 'id', str)) cls._amend_abstract_attributes(ret, dct) if 'administration' in dct: @@ -75,9 +75,9 @@ def _construct_asset_administration_shell_descriptor( if 'protocolInformation' in endpoint_dct: ret.endpoints.append( cls._construct_endpoint(endpoint_dct, - server_model.Endpoint)) + model.Endpoint)) elif 'href' in endpoint_dct: - protocol_info = server_model.ProtocolInformation( + protocol_info = model.ProtocolInformation( href=_get_ts(endpoint_dct['href'], 'href', str), endpoint_protocol=_get_ts(endpoint_dct['href'], 'endpointProtocol', @@ -90,7 +90,7 @@ def _construct_asset_administration_shell_descriptor( list) if 'endpointProtocolVersion' in endpoint_dct['href'] else None ) - ret.endpoints.append(server_model.Endpoint( + ret.endpoints.append(model.Endpoint( protocol_information=protocol_info, interface=_get_ts(endpoint_dct, 'interface', str))) @@ -98,12 +98,12 @@ def _construct_asset_administration_shell_descriptor( ret.id_short = _get_ts(dct, 'idShort', str) if 'submodelDescriptors' in dct: ret.submodel_descriptors = cls._construct_submodel_descriptor(_get_ts(dct, 'submodelDescriptors', list), - server_model.SubmodelDescriptor) + model.SubmodelDescriptor) return ret @classmethod def _construct_protocol_information(cls, dct: Dict[str, object], - object_class=server_model.ProtocolInformation) -> server_model.ProtocolInformation: + object_class=model.ProtocolInformation) -> model.ProtocolInformation: ret = object_class( href=_get_ts(dct, 'href', str), endpoint_protocol=_get_ts(dct, 'endpointProtocol', @@ -123,11 +123,11 @@ def _construct_protocol_information(cls, dct: Dict[str, object], @classmethod def _construct_endpoint(cls, dct: Dict[str, object], - object_class=server_model.Endpoint) -> server_model.Endpoint: + object_class=model.Endpoint) -> model.Endpoint: ret = object_class( protocol_information=cls._construct_protocol_information( _get_ts(dct, 'protocolInformation', dict), - server_model.ProtocolInformation + model.ProtocolInformation ), interface=_get_ts(dct, 'interface', str) @@ -138,7 +138,7 @@ def _construct_endpoint(cls, dct: Dict[str, object], @classmethod def _construct_submodel_descriptor( cls, dct: Dict[str, object], - object_class=server_model.SubmodelDescriptor) -> server_model.SubmodelDescriptor: + object_class=model.SubmodelDescriptor) -> model.SubmodelDescriptor: ret = object_class(id_=_get_ts(dct, 'id', str), endpoints=[]) cls._amend_abstract_attributes(ret, dct) @@ -146,9 +146,9 @@ def _construct_submodel_descriptor( if 'protocolInformation' in endpoint_dct: ret.endpoints.append( cls._construct_endpoint(endpoint_dct, - server_model.Endpoint)) + model.Endpoint)) elif 'href' in endpoint_dct: - protocol_info = server_model.ProtocolInformation( + protocol_info = model.ProtocolInformation( href=_get_ts(endpoint_dct['href'], 'href', str), endpoint_protocol=_get_ts(endpoint_dct['href'], 'endpointProtocol', @@ -161,7 +161,7 @@ def _construct_submodel_descriptor( list) if 'endpointProtocolVersion' in endpoint_dct['href'] else None ) - ret.endpoints.append(server_model.Endpoint( + ret.endpoints.append(model.Endpoint( protocol_information=protocol_info, interface=_get_ts(endpoint_dct, 'interface', str))) @@ -179,7 +179,7 @@ def _construct_submodel_descriptor( @classmethod def _construct_asset_link( - cls, dct: Dict[str, object], object_class=server_model.AssetLink) -> server_model.AssetLink: + cls, dct: Dict[str, object], object_class=model.AssetLink) -> model.AssetLink: ret = object_class(name=_get_ts(dct, 'name', str), value=_get_ts(dct, 'value', str)) return ret @@ -225,18 +225,18 @@ class ServerAASToJsonEncoder(AASToJsonEncoder): def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: serializers = super()._get_aas_class_serializers() serializers.update({ - server_model.AssetAdministrationShellDescriptor: cls._asset_administration_shell_descriptor_to_json, - server_model.SubmodelDescriptor: cls._submodel_descriptor_to_json, - server_model.Endpoint: cls._endpoint_to_json, - server_model.ProtocolInformation: cls._protocol_information_to_json, - server_model.AssetLink: cls._asset_link_to_json + model.AssetAdministrationShellDescriptor: cls._asset_administration_shell_descriptor_to_json, + model.SubmodelDescriptor: cls._submodel_descriptor_to_json, + model.Endpoint: cls._endpoint_to_json, + model.ProtocolInformation: cls._protocol_information_to_json, + model.AssetLink: cls._asset_link_to_json }) return serializers @classmethod def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: data: Dict[str, object] = super()._abstract_classes_to_json(obj) - if isinstance(obj, server_model.Descriptor): + if isinstance(obj, model.Descriptor): if obj.description: data['description'] = obj.description if obj.display_name: @@ -246,7 +246,7 @@ def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: return data @classmethod - def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> \ + def _asset_administration_shell_descriptor_to_json(cls, obj: model.AssetAdministrationShellDescriptor) -> \ Dict[str, object]: """ serialization of an object from class AssetAdministrationShell to json @@ -277,7 +277,7 @@ def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetA @classmethod def _protocol_information_to_json(cls, - obj: server_model.ProtocolInformation) -> \ + obj: model.ProtocolInformation) -> \ Dict[str, object]: data = cls._abstract_classes_to_json(obj) @@ -295,7 +295,7 @@ def _protocol_information_to_json(cls, return data @classmethod - def _endpoint_to_json(cls, obj: server_model.Endpoint) -> Dict[str, object]: + def _endpoint_to_json(cls, obj: model.Endpoint) -> Dict[str, object]: data = cls._abstract_classes_to_json(obj) data['protocolInformation'] = cls._protocol_information_to_json( obj.protocol_information) @@ -303,7 +303,7 @@ def _endpoint_to_json(cls, obj: server_model.Endpoint) -> Dict[str, object]: return data @classmethod - def _submodel_descriptor_to_json(cls, obj: server_model.SubmodelDescriptor) -> Dict[str, object]: + def _submodel_descriptor_to_json(cls, obj: model.SubmodelDescriptor) -> Dict[str, object]: """ serialization of an object from class Submodel to json @@ -325,7 +325,7 @@ def _submodel_descriptor_to_json(cls, obj: server_model.SubmodelDescriptor) -> D return data @classmethod - def _asset_link_to_json(cls, obj: server_model.AssetLink) -> Dict[str, object]: + def _asset_link_to_json(cls, obj: model.AssetLink) -> Dict[str, object]: data = cls._abstract_classes_to_json(obj) data['name'] = obj.name data['value'] = obj.value diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index 23d763361..cc2ac9a4a 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -18,7 +18,7 @@ from basyx.aas.adapter._generic import XML_NS_MAP from basyx.aas.adapter.xml import xml_serialization, XMLConstructables, read_aas_xml_element from basyx.aas.model import AbstractObjectStore -from server.app import server_model +from server.app import model as server_model from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, \ ServerStrictStrippedAASFromJsonDecoder from server.app.util.converters import base64url_decode diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index 8f5810b10..b08448b06 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -15,7 +15,7 @@ from basyx.aas import model from server.app.util.converters import Base64URLConverter from server.app.interfaces.base import BaseWSGIApp, HTTPApiDecoder -from .. import server_model as server_model +from .. import model as server_model from ..adapter.jsonization import ServerAASToJsonEncoder encoder=ServerAASToJsonEncoder() diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index 099d3d093..52301b506 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -12,7 +12,7 @@ from werkzeug.routing import MapAdapter, Rule, Submount from werkzeug.wrappers import Request, Response -import server.app.server_model as server_model +import server.app.model as server_model from basyx.aas import model from server.app.util.converters import Base64URLConverter from server.app.interfaces.base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder @@ -58,10 +58,10 @@ def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/a }, strict_slashes=False) def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ - Iterator[server_model.AssetAdministrationShellDescriptor], int]: + Iterator[model.AssetAdministrationShellDescriptor], int]: - descriptors: Iterator[server_model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( - server_model.AssetAdministrationShellDescriptor + descriptors: Iterator[model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( + model.AssetAdministrationShellDescriptor ) id_short = request.args.get("idShort") @@ -84,11 +84,11 @@ def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ paginated_descriptors, end_index = self._get_slice(request, descriptors) return paginated_descriptors, end_index - def _get_aas_descriptor(self, url_args: Dict) -> server_model.AssetAdministrationShellDescriptor: - return self._get_obj_ts(url_args["aas_id"], server_model.AssetAdministrationShellDescriptor) + def _get_aas_descriptor(self, url_args: Dict) -> model.AssetAdministrationShellDescriptor: + return self._get_obj_ts(url_args["aas_id"], model.AssetAdministrationShellDescriptor) - def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: - submodel_descriptors: Iterator[model.Submodel] = self._get_all_obj_of_type(server_model.SubmodelDescriptor) + def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[model.SubmodelDescriptor], int]: + submodel_descriptors: Iterator[model.Submodel] = self._get_all_obj_of_type(model.SubmodelDescriptor) id_short = request.args.get("idShort") if id_short is not None: submodel_descriptors = filter(lambda sm: sm.id_short == id_short, submodel_descriptors) @@ -100,8 +100,8 @@ def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[serv paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) return paginated_submodel_descriptors, end_index - def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescriptor: - return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) + def _get_submodel_descriptor(self, url_args: Dict) -> model.SubmodelDescriptor: + return self._get_obj_ts(url_args["submodel_id"], model.SubmodelDescriptor) # ------ AAS REGISTRY ROUTES ------- def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -111,7 +111,7 @@ def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], map_adapter: MapAdapter) -> Response: - descriptor = HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, False) + descriptor = HTTPApiDecoder.request_body(request, model.AssetAdministrationShellDescriptor, False) try: self.object_store.add(descriptor) except KeyError as e: @@ -130,7 +130,7 @@ def get_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: def put_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: descriptor = self._get_aas_descriptor(url_args) - descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, + descriptor.update_from(HTTPApiDecoder.request_body(request, model.AssetAdministrationShellDescriptor, is_stripped_request(request))) descriptor.commit() return response_t() @@ -177,7 +177,7 @@ def post_submodel_descriptor_through_superpath(self, map_adapter: MapAdapter) -> Response: aas_descriptor = self._get_aas_descriptor(url_args) submodel_descriptor = HTTPApiDecoder.request_body(request, - server_model.SubmodelDescriptor, + model.SubmodelDescriptor, is_stripped_request( request)) if any(sd.id == submodel_descriptor.id for sd in @@ -211,7 +211,7 @@ def put_submodel_descriptor_by_id_through_superpath(self, f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") submodel_descriptor.update_from( HTTPApiDecoder.request_body(request, - server_model.SubmodelDescriptor, + model.SubmodelDescriptor, is_stripped_request(request))) aas_descriptor.commit() return response_t() @@ -246,7 +246,7 @@ def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, respon def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], map_adapter: MapAdapter) -> Response: - submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, + submodel_descriptor = HTTPApiDecoder.request_body(request, model.SubmodelDescriptor, is_stripped_request(request)) try: self.object_store.add(submodel_descriptor) @@ -262,13 +262,13 @@ def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, respon **_kwargs) -> Response: submodel_descriptor = self._get_submodel_descriptor(url_args) submodel_descriptor.update_from( - HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request))) + HTTPApiDecoder.request_body(request, model.SubmodelDescriptor, is_stripped_request(request))) submodel_descriptor.commit() return response_t() def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: - self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor)) + self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], model.SubmodelDescriptor)) return response_t() diff --git a/server/app/server_model/__init__.py b/server/app/model/__init__.py similarity index 100% rename from server/app/server_model/__init__.py rename to server/app/model/__init__.py diff --git a/server/app/server_model/descriptor.py b/server/app/model/descriptor.py similarity index 99% rename from server/app/server_model/descriptor.py rename to server/app/model/descriptor.py index 6b57bf63f..38276cd2d 100644 --- a/server/app/server_model/descriptor.py +++ b/server/app/model/descriptor.py @@ -4,7 +4,7 @@ from typing import Optional, Iterable, List from basyx.aas import model -from server.app.server_model.endpoint import Endpoint +from server.app.model.endpoint import Endpoint class Descriptor(model.HasExtension, metaclass=abc.ABCMeta): diff --git a/server/app/server_model/endpoint.py b/server/app/model/endpoint.py similarity index 100% rename from server/app/server_model/endpoint.py rename to server/app/model/endpoint.py From a96da478a8be3e7c36b93ceab0f8f60a32a99906 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Thu, 24 Apr 2025 07:35:25 +0200 Subject: [PATCH 32/52] correct typos from renaming server_model to model --- server/app/adapter/jsonization.py | 54 +++++++++++++++---------------- server/app/interfaces/registry.py | 32 +++++++++--------- 2 files changed, 43 insertions(+), 43 deletions(-) diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index 276f6c31b..cd5f01a76 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -14,8 +14,8 @@ logger = logging.getLogger(__name__) JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES + ( - ('assetAdministrationShellDescriptors', model.AssetAdministrationShellDescriptor), - ('submodelDescriptors', model.SubmodelDescriptor) + ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), + ('submodelDescriptors', server_model.SubmodelDescriptor) ) @@ -40,7 +40,7 @@ def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], objec def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None: super()._amend_abstract_attributes(obj, dct) - if isinstance(obj, model.Descriptor): + if isinstance(obj, server_model.Descriptor): if 'description' in dct: obj.description = cls._construct_lang_string_set(_get_ts(dct, 'description', list), model.MultiLanguageTextType) @@ -54,7 +54,7 @@ def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None @classmethod def _construct_asset_administration_shell_descriptor( cls, dct: Dict[str, object], - object_class=model.AssetAdministrationShellDescriptor) -> model.AssetAdministrationShellDescriptor: + object_class=server_model.AssetAdministrationShellDescriptor) -> server_model.AssetAdministrationShellDescriptor: ret = object_class(id_=_get_ts(dct, 'id', str)) cls._amend_abstract_attributes(ret, dct) if 'administration' in dct: @@ -75,9 +75,9 @@ def _construct_asset_administration_shell_descriptor( if 'protocolInformation' in endpoint_dct: ret.endpoints.append( cls._construct_endpoint(endpoint_dct, - model.Endpoint)) + server_model.Endpoint)) elif 'href' in endpoint_dct: - protocol_info = model.ProtocolInformation( + protocol_info = server_model.ProtocolInformation( href=_get_ts(endpoint_dct['href'], 'href', str), endpoint_protocol=_get_ts(endpoint_dct['href'], 'endpointProtocol', @@ -90,7 +90,7 @@ def _construct_asset_administration_shell_descriptor( list) if 'endpointProtocolVersion' in endpoint_dct['href'] else None ) - ret.endpoints.append(model.Endpoint( + ret.endpoints.append(server_model.Endpoint( protocol_information=protocol_info, interface=_get_ts(endpoint_dct, 'interface', str))) @@ -98,12 +98,12 @@ def _construct_asset_administration_shell_descriptor( ret.id_short = _get_ts(dct, 'idShort', str) if 'submodelDescriptors' in dct: ret.submodel_descriptors = cls._construct_submodel_descriptor(_get_ts(dct, 'submodelDescriptors', list), - model.SubmodelDescriptor) + server_model.SubmodelDescriptor) return ret @classmethod def _construct_protocol_information(cls, dct: Dict[str, object], - object_class=model.ProtocolInformation) -> model.ProtocolInformation: + object_class=server_model.ProtocolInformation) -> server_model.ProtocolInformation: ret = object_class( href=_get_ts(dct, 'href', str), endpoint_protocol=_get_ts(dct, 'endpointProtocol', @@ -123,11 +123,11 @@ def _construct_protocol_information(cls, dct: Dict[str, object], @classmethod def _construct_endpoint(cls, dct: Dict[str, object], - object_class=model.Endpoint) -> model.Endpoint: + object_class=server_model.Endpoint) -> server_model.Endpoint: ret = object_class( protocol_information=cls._construct_protocol_information( _get_ts(dct, 'protocolInformation', dict), - model.ProtocolInformation + server_model.ProtocolInformation ), interface=_get_ts(dct, 'interface', str) @@ -138,7 +138,7 @@ def _construct_endpoint(cls, dct: Dict[str, object], @classmethod def _construct_submodel_descriptor( cls, dct: Dict[str, object], - object_class=model.SubmodelDescriptor) -> model.SubmodelDescriptor: + object_class=server_model.SubmodelDescriptor) -> server_model.SubmodelDescriptor: ret = object_class(id_=_get_ts(dct, 'id', str), endpoints=[]) cls._amend_abstract_attributes(ret, dct) @@ -146,9 +146,9 @@ def _construct_submodel_descriptor( if 'protocolInformation' in endpoint_dct: ret.endpoints.append( cls._construct_endpoint(endpoint_dct, - model.Endpoint)) + server_model.Endpoint)) elif 'href' in endpoint_dct: - protocol_info = model.ProtocolInformation( + protocol_info = server_model.ProtocolInformation( href=_get_ts(endpoint_dct['href'], 'href', str), endpoint_protocol=_get_ts(endpoint_dct['href'], 'endpointProtocol', @@ -161,7 +161,7 @@ def _construct_submodel_descriptor( list) if 'endpointProtocolVersion' in endpoint_dct['href'] else None ) - ret.endpoints.append(model.Endpoint( + ret.endpoints.append(server_model.Endpoint( protocol_information=protocol_info, interface=_get_ts(endpoint_dct, 'interface', str))) @@ -179,7 +179,7 @@ def _construct_submodel_descriptor( @classmethod def _construct_asset_link( - cls, dct: Dict[str, object], object_class=model.AssetLink) -> model.AssetLink: + cls, dct: Dict[str, object], object_class=server_model.AssetLink) -> server_model.AssetLink: ret = object_class(name=_get_ts(dct, 'name', str), value=_get_ts(dct, 'value', str)) return ret @@ -225,18 +225,18 @@ class ServerAASToJsonEncoder(AASToJsonEncoder): def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: serializers = super()._get_aas_class_serializers() serializers.update({ - model.AssetAdministrationShellDescriptor: cls._asset_administration_shell_descriptor_to_json, - model.SubmodelDescriptor: cls._submodel_descriptor_to_json, - model.Endpoint: cls._endpoint_to_json, - model.ProtocolInformation: cls._protocol_information_to_json, - model.AssetLink: cls._asset_link_to_json + server_model.AssetAdministrationShellDescriptor: cls._asset_administration_shell_descriptor_to_json, + server_model.SubmodelDescriptor: cls._submodel_descriptor_to_json, + server_model.Endpoint: cls._endpoint_to_json, + server_model.ProtocolInformation: cls._protocol_information_to_json, + server_model.AssetLink: cls._asset_link_to_json }) return serializers @classmethod def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: data: Dict[str, object] = super()._abstract_classes_to_json(obj) - if isinstance(obj, model.Descriptor): + if isinstance(obj, server_model.Descriptor): if obj.description: data['description'] = obj.description if obj.display_name: @@ -246,7 +246,7 @@ def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: return data @classmethod - def _asset_administration_shell_descriptor_to_json(cls, obj: model.AssetAdministrationShellDescriptor) -> \ + def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> \ Dict[str, object]: """ serialization of an object from class AssetAdministrationShell to json @@ -277,7 +277,7 @@ def _asset_administration_shell_descriptor_to_json(cls, obj: model.AssetAdminist @classmethod def _protocol_information_to_json(cls, - obj: model.ProtocolInformation) -> \ + obj: server_model.ProtocolInformation) -> \ Dict[str, object]: data = cls._abstract_classes_to_json(obj) @@ -295,7 +295,7 @@ def _protocol_information_to_json(cls, return data @classmethod - def _endpoint_to_json(cls, obj: model.Endpoint) -> Dict[str, object]: + def _endpoint_to_json(cls, obj: server_model.Endpoint) -> Dict[str, object]: data = cls._abstract_classes_to_json(obj) data['protocolInformation'] = cls._protocol_information_to_json( obj.protocol_information) @@ -303,7 +303,7 @@ def _endpoint_to_json(cls, obj: model.Endpoint) -> Dict[str, object]: return data @classmethod - def _submodel_descriptor_to_json(cls, obj: model.SubmodelDescriptor) -> Dict[str, object]: + def _submodel_descriptor_to_json(cls, obj: server_model.SubmodelDescriptor) -> Dict[str, object]: """ serialization of an object from class Submodel to json @@ -325,7 +325,7 @@ def _submodel_descriptor_to_json(cls, obj: model.SubmodelDescriptor) -> Dict[str return data @classmethod - def _asset_link_to_json(cls, obj: model.AssetLink) -> Dict[str, object]: + def _asset_link_to_json(cls, obj: server_model.AssetLink) -> Dict[str, object]: data = cls._abstract_classes_to_json(obj) data['name'] = obj.name data['value'] = obj.value diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index 52301b506..f33ab1651 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -58,10 +58,10 @@ def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/a }, strict_slashes=False) def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ - Iterator[model.AssetAdministrationShellDescriptor], int]: + Iterator[server_model.AssetAdministrationShellDescriptor], int]: - descriptors: Iterator[model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( - model.AssetAdministrationShellDescriptor + descriptors: Iterator[server_model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( + server_model.AssetAdministrationShellDescriptor ) id_short = request.args.get("idShort") @@ -84,11 +84,11 @@ def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ paginated_descriptors, end_index = self._get_slice(request, descriptors) return paginated_descriptors, end_index - def _get_aas_descriptor(self, url_args: Dict) -> model.AssetAdministrationShellDescriptor: - return self._get_obj_ts(url_args["aas_id"], model.AssetAdministrationShellDescriptor) + def _get_aas_descriptor(self, url_args: Dict) -> server_model.AssetAdministrationShellDescriptor: + return self._get_obj_ts(url_args["aas_id"], server_model.AssetAdministrationShellDescriptor) - def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[model.SubmodelDescriptor], int]: - submodel_descriptors: Iterator[model.Submodel] = self._get_all_obj_of_type(model.SubmodelDescriptor) + def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: + submodel_descriptors: Iterator[server_model.SubmodelDescriptor] = self._get_all_obj_of_type(server_model.SubmodelDescriptor) id_short = request.args.get("idShort") if id_short is not None: submodel_descriptors = filter(lambda sm: sm.id_short == id_short, submodel_descriptors) @@ -100,8 +100,8 @@ def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[mode paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) return paginated_submodel_descriptors, end_index - def _get_submodel_descriptor(self, url_args: Dict) -> model.SubmodelDescriptor: - return self._get_obj_ts(url_args["submodel_id"], model.SubmodelDescriptor) + def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescriptor: + return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) # ------ AAS REGISTRY ROUTES ------- def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -111,7 +111,7 @@ def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], map_adapter: MapAdapter) -> Response: - descriptor = HTTPApiDecoder.request_body(request, model.AssetAdministrationShellDescriptor, False) + descriptor = HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, False) try: self.object_store.add(descriptor) except KeyError as e: @@ -130,7 +130,7 @@ def get_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: def put_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: descriptor = self._get_aas_descriptor(url_args) - descriptor.update_from(HTTPApiDecoder.request_body(request, model.AssetAdministrationShellDescriptor, + descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, is_stripped_request(request))) descriptor.commit() return response_t() @@ -177,7 +177,7 @@ def post_submodel_descriptor_through_superpath(self, map_adapter: MapAdapter) -> Response: aas_descriptor = self._get_aas_descriptor(url_args) submodel_descriptor = HTTPApiDecoder.request_body(request, - model.SubmodelDescriptor, + server_model.SubmodelDescriptor, is_stripped_request( request)) if any(sd.id == submodel_descriptor.id for sd in @@ -211,7 +211,7 @@ def put_submodel_descriptor_by_id_through_superpath(self, f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") submodel_descriptor.update_from( HTTPApiDecoder.request_body(request, - model.SubmodelDescriptor, + server_model.SubmodelDescriptor, is_stripped_request(request))) aas_descriptor.commit() return response_t() @@ -246,7 +246,7 @@ def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, respon def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], map_adapter: MapAdapter) -> Response: - submodel_descriptor = HTTPApiDecoder.request_body(request, model.SubmodelDescriptor, + submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request)) try: self.object_store.add(submodel_descriptor) @@ -262,13 +262,13 @@ def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, respon **_kwargs) -> Response: submodel_descriptor = self._get_submodel_descriptor(url_args) submodel_descriptor.update_from( - HTTPApiDecoder.request_body(request, model.SubmodelDescriptor, is_stripped_request(request))) + HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request))) submodel_descriptor.commit() return response_t() def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: - self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], model.SubmodelDescriptor)) + self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor)) return response_t() From 115db620e3895712509befb378241fca583b5d06 Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 22 May 2025 18:33:17 +0200 Subject: [PATCH 33/52] Remove discovery/registry related code --- Discovery Server/Dockerfile | 45 ---- Discovery Server/README.md | 63 ------ Discovery Server/app/main.py | 25 --- Discovery Server/compose.yml | 7 - Discovery Server/entrypoint.sh | 71 ------ Discovery Server/stop-supervisor.sh | 8 - Discovery Server/supervisord.ini | 27 --- Discovery Server/uwsgi.ini | 9 - Registry Server/Dockerfile | 45 ---- Registry Server/README.md | 97 -------- Registry Server/compose.yml | 7 - Registry Server/entrypoint.sh | 71 ------ Registry Server/stop-supervisor.sh | 8 - Registry Server/supervisord.ini | 27 --- Registry Server/uwsgi.ini | 9 - server/app/adapter/__init__.py | 0 server/app/adapter/jsonization.py | 332 ---------------------------- server/app/interfaces/base.py | 21 +- server/app/interfaces/discovery.py | 215 ------------------ server/app/interfaces/registry.py | 280 ----------------------- server/app/model/__init__.py | 2 - server/app/model/descriptor.py | 109 --------- server/app/model/endpoint.py | 107 --------- 23 files changed, 5 insertions(+), 1580 deletions(-) delete mode 100644 Discovery Server/Dockerfile delete mode 100644 Discovery Server/README.md delete mode 100644 Discovery Server/app/main.py delete mode 100644 Discovery Server/compose.yml delete mode 100644 Discovery Server/entrypoint.sh delete mode 100644 Discovery Server/stop-supervisor.sh delete mode 100644 Discovery Server/supervisord.ini delete mode 100644 Discovery Server/uwsgi.ini delete mode 100644 Registry Server/Dockerfile delete mode 100644 Registry Server/README.md delete mode 100644 Registry Server/compose.yml delete mode 100644 Registry Server/entrypoint.sh delete mode 100644 Registry Server/stop-supervisor.sh delete mode 100644 Registry Server/supervisord.ini delete mode 100644 Registry Server/uwsgi.ini delete mode 100644 server/app/adapter/__init__.py delete mode 100644 server/app/adapter/jsonization.py delete mode 100644 server/app/interfaces/discovery.py delete mode 100644 server/app/interfaces/registry.py delete mode 100644 server/app/model/__init__.py delete mode 100644 server/app/model/descriptor.py delete mode 100644 server/app/model/endpoint.py diff --git a/Discovery Server/Dockerfile b/Discovery Server/Dockerfile deleted file mode 100644 index 6dc3c4cac..000000000 --- a/Discovery Server/Dockerfile +++ /dev/null @@ -1,45 +0,0 @@ -FROM python:3.11-alpine - -LABEL org.label-schema.name="Eclipse BaSyx" \ - org.label-schema.version="1.0" \ - org.label-schema.description="Docker image for the basyx-python-sdk server application" \ - org.label-schema.maintainer="Eclipse BaSyx" - -ENV PYTHONDONTWRITEBYTECODE=1 -ENV PYTHONUNBUFFERED=1 - -# If we have more dependencies for the server it would make sense -# to refactor uswgi to the pyproject.toml -RUN apk update && \ - apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ - pip install uwsgi && \ - pip install --no-cache-dir git+https://github.com/eclipse-basyx/basyx-python-sdk@main#subdirectory=sdk && \ - apk del git bash - - -COPY uwsgi.ini /etc/uwsgi/ -COPY supervisord.ini /etc/supervisor/conf.d/supervisord.ini -COPY stop-supervisor.sh /etc/supervisor/stop-supervisor.sh -RUN chmod +x /etc/supervisor/stop-supervisor.sh - -# Makes it possible to use a different configuration -ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini -# object stores aren't thread-safe yet -# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 -ENV UWSGI_CHEAPER=0 -ENV UWSGI_PROCESSES=1 -ENV NGINX_MAX_UPLOAD=1M -ENV NGINX_WORKER_PROCESSES=1 -ENV LISTEN_PORT=80 -ENV CLIENT_BODY_BUFFER_SIZE=1M - -# Copy the entrypoint that will generate Nginx additional configs -COPY entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh - -ENTRYPOINT ["/entrypoint.sh"] - -COPY ./app /app -WORKDIR /app - -CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Discovery Server/README.md b/Discovery Server/README.md deleted file mode 100644 index 0b0938167..000000000 --- a/Discovery Server/README.md +++ /dev/null @@ -1,63 +0,0 @@ -# Eclipse BaSyx Python SDK - Dicovery Service - -This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Discovery Service**. -It provides basic discovery functionality for AAS IDs and their corresponding assets, as specified in the official [Discovery Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/DiscoveryServiceSpecification/V3.1.0_SSP-001). - -## Overview - -The Discovery Service is a core component in the Asset Administration Shell ecosystem. Its main responsibility is to store and retrieve relations between AAS identifiers and asset identifiers. It acts as a lookup service for resolving asset-related queries to corresponding AAS. - -This implementation supports: - -- Adding links between AAS and assets -- Querying AAS by asset links -- Querying asset links by AAS ID -- Removing AAS-related asset links -- Configurable in-memory or MongoDB-based persistent storage - -## Features - -| Feature | Description | -|---------------------------------------------|-------------------------------------------------------| -| `add_asset_links` | Register specific asset ids linked to an AAS | -| `get_all_specific_asset_ids_by_aas_id` | Retrieve specific asset ids associated with an AAS | -| `search_aas_by_asset_link` | Find AAS identifiers by providing asset link values | -| `remove_asset_links_for_aas` | Delete all asset links associated with a specific AAS | - -## Specification Compliance - -- Complies with: **Discovery Service Specification v3.1.0_SSP-001** - -## Configuration - -The service can be configured to use either: - -- **In-memory storage** (default): Temporary data storage that resets on service restart. -- **MongoDB storage**: Persistent backend storage using MongoDB. - -### Configuration via Environment Variables - -| Variable | Description | Default | -|----------------|--------------------------------------------|-------------------------| -| `STORAGE_TYPE` | `inmemory` or `mongodb` | `inmemory` | -| `MONGODB_URI` | MongoDB connection URI | `mongodb://localhost:27017` | -| `MONGODB_DBNAME` | Name of the MongoDB database | `basyx_registry` | - -## Deployment via Docker - -A `Dockerfile` and `docker-compose.yml` are provided for simple deployment. - -## Acknowledgments - -This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. - -[1]: https://github.com/eclipse-basyx/basyx-python-sdk/pull/238 -[2]: https://basyx-python-sdk.readthedocs.io/en/latest/backend/local_file.html -[3]: https://github.com/eclipse-basyx/basyx-python-sdk -[4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 -[5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 -[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces -[7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx -[8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html -[9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html -[10]: https://github.com/tiangolo/uwsgi-nginx-docker diff --git a/Discovery Server/app/main.py b/Discovery Server/app/main.py deleted file mode 100644 index 19c97b416..000000000 --- a/Discovery Server/app/main.py +++ /dev/null @@ -1,25 +0,0 @@ -import os -import sys -from basyx.aas.adapter.discovery import DiscoveryAPI, MongoDiscoveryStore,InMemoryDiscoveryStore - -storage_type = os.getenv("STORAGE_TYPE", "inmemory") -base_path = os.getenv("API_BASE_PATH") - -wsgi_optparams = {} - -if base_path is not None: - wsgi_optparams["base_path"] = base_path - -if storage_type == "inmemory": - application = DiscoveryAPI(InMemoryDiscoveryStore(), **wsgi_optparams) - -elif storage_type in "mongodb": - uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") - dbname = os.getenv("MONGODB_DBNAME", "basyx_registry") - - application = DiscoveryAPI(MongoDiscoveryStore(uri,dbname), **wsgi_optparams) - -else: - print(f"STORAGE_TYPE must be either inmemory or mongodb! Current value: {storage_type}", - file=sys.stderr) - diff --git a/Discovery Server/compose.yml b/Discovery Server/compose.yml deleted file mode 100644 index 6e1d65404..000000000 --- a/Discovery Server/compose.yml +++ /dev/null @@ -1,7 +0,0 @@ -services: - app: - build: . - ports: - - "8084:80" - environment: - STORAGE_TYPE: inmemory diff --git a/Discovery Server/entrypoint.sh b/Discovery Server/entrypoint.sh deleted file mode 100644 index 722394409..000000000 --- a/Discovery Server/entrypoint.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env sh -set -e - -# Get the maximum upload file size for Nginx, default to 0: unlimited -USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} - -# Get the number of workers for Nginx, default to 1 -USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} - -# Set the max number of connections per worker for Nginx, if requested -# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below -NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} - -# Get the listen port for Nginx, default to 80 -USE_LISTEN_PORT=${LISTEN_PORT:-80} - -# Get the client_body_buffer_size for Nginx, default to 1M -USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} - -# Create the conf.d directory if it doesn't exist -if [ ! -d /etc/nginx/conf.d ]; then - mkdir -p /etc/nginx/conf.d -fi - -if [ -f /app/nginx.conf ]; then - cp /app/nginx.conf /etc/nginx/nginx.conf -else - content='user nginx;\n' - # Set the number of worker processes in Nginx - content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" - content=$content'error_log /var/log/nginx/error.log warn;\n' - content=$content'pid /var/run/nginx.pid;\n' - content=$content'events {\n' - content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" - content=$content'}\n' - content=$content'http {\n' - content=$content' include /etc/nginx/mime.types;\n' - content=$content' default_type application/octet-stream;\n' - content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" - content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" - content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" - content=$content' access_log /var/log/nginx/access.log main;\n' - content=$content' sendfile on;\n' - content=$content' keepalive_timeout 65;\n' - content=$content' include /etc/nginx/conf.d/*.conf;\n' - content=$content'}\n' - content=$content'daemon off;\n' - # Set the max number of open file descriptors for Nginx workers, if requested - if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then - content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" - fi - # Save generated /etc/nginx/nginx.conf - printf "$content" > /etc/nginx/nginx.conf - - content_server='server {\n' - content_server=$content_server" listen ${USE_LISTEN_PORT};\n" - content_server=$content_server' location / {\n' - content_server=$content_server' include uwsgi_params;\n' - content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' - content_server=$content_server' }\n' - content_server=$content_server'}\n' - # Save generated server /etc/nginx/conf.d/nginx.conf - printf "$content_server" > /etc/nginx/conf.d/nginx.conf - - # # Generate additional configuration - printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf - printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf - printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf -fi - -exec "$@" diff --git a/Discovery Server/stop-supervisor.sh b/Discovery Server/stop-supervisor.sh deleted file mode 100644 index 9a953c94b..000000000 --- a/Discovery Server/stop-supervisor.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env sh - -printf "READY\n" - -while read line; do - echo "Processing Event: $line" >&2 - kill $PPID -done < /dev/stdin diff --git a/Discovery Server/supervisord.ini b/Discovery Server/supervisord.ini deleted file mode 100644 index d73d98014..000000000 --- a/Discovery Server/supervisord.ini +++ /dev/null @@ -1,27 +0,0 @@ -[supervisord] -nodaemon=true - -[program:uwsgi] -command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 -startsecs = 0 -autorestart=false -# may make sense to have autorestart enabled in production - -[program:nginx] -command=/usr/sbin/nginx -stdout_logfile=/var/log/nginx.out.log -stdout_logfile_maxbytes=0 -stderr_logfile=/var/log/nginx.err.log -stderr_logfile_maxbytes=0 -stopsignal=QUIT -startsecs = 0 -autorestart=false -# may make sense to have autorestart enabled in production - -[eventlistener:quit_on_failure] -events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL -command=/etc/supervisor/stop-supervisor.sh diff --git a/Discovery Server/uwsgi.ini b/Discovery Server/uwsgi.ini deleted file mode 100644 index 9c54ae1cc..000000000 --- a/Discovery Server/uwsgi.ini +++ /dev/null @@ -1,9 +0,0 @@ -[uwsgi] -wsgi-file = /app/main.py -socket = /tmp/uwsgi.sock -chown-socket = nginx:nginx -chmod-socket = 664 -hook-master-start = unix_signal:15 gracefully_kill_them_all -need-app = true -die-on-term = true -show-config = false diff --git a/Registry Server/Dockerfile b/Registry Server/Dockerfile deleted file mode 100644 index 3d52a15ab..000000000 --- a/Registry Server/Dockerfile +++ /dev/null @@ -1,45 +0,0 @@ -FROM python:3.11-alpine - -LABEL org.label-schema.name="Eclipse BaSyx" \ - org.label-schema.version="1.0" \ - org.label-schema.description="Docker image for the basyx-python-sdk server application" \ - org.label-schema.maintainer="Eclipse BaSyx" - -ENV PYTHONDONTWRITEBYTECODE=1 -ENV PYTHONUNBUFFERED=1 - -# If we have more dependencies for the server it would make sense -# to refactor uswgi to the pyproject.toml -RUN apk update && \ - apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ - pip install uwsgi && \ - pip install --no-cache-dir git+https://github.com/eclipse-basyx/basyx-python-sdk@main#subdirectory=sdk && \ - apk del git bash - - -COPY uwsgi.ini /etc/uwsgi/ -COPY supervisord.ini /etc/supervisor/conf.d/supervisord.ini -COPY stop-supervisor.sh /etc/supervisor/stop-supervisor.sh -RUN chmod +x /etc/supervisor/stop-supervisor.sh - -# Makes it possible to use a different configuration -ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini -# object stores aren't thread-safe yet -# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 -ENV UWSGI_CHEAPER=0 -ENV UWSGI_PROCESSES=1 -ENV NGINX_MAX_UPLOAD=1M -ENV NGINX_WORKER_PROCESSES=1 -ENV LISTEN_PORT=80 -ENV CLIENT_BODY_BUFFER_SIZE=1M - -# Copy the entrypoint that will generate Nginx additional configs -COPY entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh - -ENTRYPOINT ["/entrypoint.sh"] - -COPY ../server/app /app -WORKDIR /app - -CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Registry Server/README.md b/Registry Server/README.md deleted file mode 100644 index 339226c53..000000000 --- a/Registry Server/README.md +++ /dev/null @@ -1,97 +0,0 @@ -# Eclipse BaSyx Python SDK - HTTP Server - -This package contains a Dockerfile to spin up an exemplary HTTP/REST server following the [Specification of the AAS Part 2 API][6] with ease. -The server currently implements the following interfaces: - -- [Asset Administration Shell Repository Service][4] -- [Submodel Repository Service][5] - -It uses the [HTTP API][1] and the [AASX][7], [JSON][8], and [XML][9] Adapters of the [BaSyx Python SDK][3], to serve regarding files from a given directory. -The files are only read, changes won't persist. - -Alternatively, the container can also be told to use the [Local-File Backend][2] instead, which stores AAS and Submodels as individual JSON files and allows for persistent changes (except supplementary files, i.e. files referenced by `File` submodel elements). -See [below](#options) on how to configure this. - -## Building -The container image can be built via: -``` -$ docker buildx build -t basyx-python-sdk-http-server . -``` - -## Running - -### Storage -The container needs to be provided with the directory `/storage` to store AAS and Submodel files: AASX, JSON, XML or JSON files of Local-File Backend. - -This directory can be mapped via the `-v` option from another image or a local directory. -To map the directory `storage` inside the container, `-v ./storage:/storage` can be used. -The directory `storage` will be created in the current working directory, if it doesn't already exist. - -### Port -The HTTP server inside the container listens on port 80 by default. -To expose it on the host on port 8080, use the option `-p 8080:80` when running it. - -### Options -The container can be configured via environment variables: -- `API_BASE_PATH` determines the base path under which all other API paths are made available. - Default: `/api/v3.0` -- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: - - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve AASX, JSON, XML files from the storage directory. - The files are not modified, all changes done via the API are only stored in memory. - - When instead set to `LOCAL_FILE`, the server makes use of the [LocalFileBackend][2], where AAS and Submodels are persistently stored as JSON files. - Supplementary files, i.e. files referenced by `File` submodel elements, are not stored in this case. -- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. - -### Running Examples - -Putting it all together, the container can be started via the following command: -``` -$ docker run -p 8080:80 -v ./storage:/storage basyx-python-sdk-http-server -``` - -Since Windows uses backslashes instead of forward slashes in paths, you'll have to adjust the path to the storage directory there: -``` -> docker run -p 8080:80 -v .\storage:/storage basyx-python-sdk-http-server -``` - -Per default, the server will use the `LOCAL_FILE_READ_ONLY` storage type and serve the API under `/api/v3.0` and read files from `/storage`. If you want to change this, you can do so like this: -``` -$ docker run -p 8080:80 -v ./storage2:/storage2 -e API_BASE_PATH=/api/v3.1 -e STORAGE_TYPE=LOCAL_FILE_BACKEND -e STORAGE_PATH=/storage2 basyx-python-sdk-http-server -``` - -## Building and running the image with docker-compose - -The container image can also be built and run via: -``` -$ docker compose up -``` - -This is the exemplary `docker-compose` file for the server: -````yaml -services: - app: - build: . - ports: - - "8080:80" - volumes: - - ./storage:/storage - -```` - -Here files are read from `/storage` and the server can be accessed at http://localhost:8080/api/v3.0/ from your host system. -To get a different setup this compose.yaml file can be adapted and expanded. - -## Acknowledgments - -This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. - -[1]: https://github.com/eclipse-basyx/basyx-python-sdk/pull/238 -[2]: https://basyx-python-sdk.readthedocs.io/en/latest/backend/local_file.html -[3]: https://github.com/eclipse-basyx/basyx-python-sdk -[4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 -[5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 -[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces -[7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx -[8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html -[9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html -[10]: https://github.com/tiangolo/uwsgi-nginx-docker diff --git a/Registry Server/compose.yml b/Registry Server/compose.yml deleted file mode 100644 index 932422dbc..000000000 --- a/Registry Server/compose.yml +++ /dev/null @@ -1,7 +0,0 @@ -services: - app: - build: . - ports: - - "8083:80" - volumes: - - ./storage:/storage diff --git a/Registry Server/entrypoint.sh b/Registry Server/entrypoint.sh deleted file mode 100644 index 722394409..000000000 --- a/Registry Server/entrypoint.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env sh -set -e - -# Get the maximum upload file size for Nginx, default to 0: unlimited -USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} - -# Get the number of workers for Nginx, default to 1 -USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} - -# Set the max number of connections per worker for Nginx, if requested -# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below -NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} - -# Get the listen port for Nginx, default to 80 -USE_LISTEN_PORT=${LISTEN_PORT:-80} - -# Get the client_body_buffer_size for Nginx, default to 1M -USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} - -# Create the conf.d directory if it doesn't exist -if [ ! -d /etc/nginx/conf.d ]; then - mkdir -p /etc/nginx/conf.d -fi - -if [ -f /app/nginx.conf ]; then - cp /app/nginx.conf /etc/nginx/nginx.conf -else - content='user nginx;\n' - # Set the number of worker processes in Nginx - content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" - content=$content'error_log /var/log/nginx/error.log warn;\n' - content=$content'pid /var/run/nginx.pid;\n' - content=$content'events {\n' - content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" - content=$content'}\n' - content=$content'http {\n' - content=$content' include /etc/nginx/mime.types;\n' - content=$content' default_type application/octet-stream;\n' - content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" - content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" - content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" - content=$content' access_log /var/log/nginx/access.log main;\n' - content=$content' sendfile on;\n' - content=$content' keepalive_timeout 65;\n' - content=$content' include /etc/nginx/conf.d/*.conf;\n' - content=$content'}\n' - content=$content'daemon off;\n' - # Set the max number of open file descriptors for Nginx workers, if requested - if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then - content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" - fi - # Save generated /etc/nginx/nginx.conf - printf "$content" > /etc/nginx/nginx.conf - - content_server='server {\n' - content_server=$content_server" listen ${USE_LISTEN_PORT};\n" - content_server=$content_server' location / {\n' - content_server=$content_server' include uwsgi_params;\n' - content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' - content_server=$content_server' }\n' - content_server=$content_server'}\n' - # Save generated server /etc/nginx/conf.d/nginx.conf - printf "$content_server" > /etc/nginx/conf.d/nginx.conf - - # # Generate additional configuration - printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf - printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf - printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf -fi - -exec "$@" diff --git a/Registry Server/stop-supervisor.sh b/Registry Server/stop-supervisor.sh deleted file mode 100644 index 9a953c94b..000000000 --- a/Registry Server/stop-supervisor.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env sh - -printf "READY\n" - -while read line; do - echo "Processing Event: $line" >&2 - kill $PPID -done < /dev/stdin diff --git a/Registry Server/supervisord.ini b/Registry Server/supervisord.ini deleted file mode 100644 index d73d98014..000000000 --- a/Registry Server/supervisord.ini +++ /dev/null @@ -1,27 +0,0 @@ -[supervisord] -nodaemon=true - -[program:uwsgi] -command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 -startsecs = 0 -autorestart=false -# may make sense to have autorestart enabled in production - -[program:nginx] -command=/usr/sbin/nginx -stdout_logfile=/var/log/nginx.out.log -stdout_logfile_maxbytes=0 -stderr_logfile=/var/log/nginx.err.log -stderr_logfile_maxbytes=0 -stopsignal=QUIT -startsecs = 0 -autorestart=false -# may make sense to have autorestart enabled in production - -[eventlistener:quit_on_failure] -events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL -command=/etc/supervisor/stop-supervisor.sh diff --git a/Registry Server/uwsgi.ini b/Registry Server/uwsgi.ini deleted file mode 100644 index 9c54ae1cc..000000000 --- a/Registry Server/uwsgi.ini +++ /dev/null @@ -1,9 +0,0 @@ -[uwsgi] -wsgi-file = /app/main.py -socket = /tmp/uwsgi.sock -chown-socket = nginx:nginx -chmod-socket = 664 -hook-master-start = unix_signal:15 gracefully_kill_them_all -need-app = true -die-on-term = true -show-config = false diff --git a/server/app/adapter/__init__.py b/server/app/adapter/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py deleted file mode 100644 index cd5f01a76..000000000 --- a/server/app/adapter/jsonization.py +++ /dev/null @@ -1,332 +0,0 @@ -from typing import Dict, Set, Optional, Type - -import server.app.model as server_model -from basyx.aas import model -from basyx.aas.adapter import _generic -from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, PathOrIO -from basyx.aas.adapter.json import AASToJsonEncoder -from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES, \ - read_aas_json_file_into - -import logging -from typing import Callable - -logger = logging.getLogger(__name__) - -JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES + ( - ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), - ('submodelDescriptors', server_model.SubmodelDescriptor) -) - - -class ServerAASFromJsonDecoder(AASFromJsonDecoder): - @classmethod - def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], object]]: - aas_class_parsers = super()._get_aas_class_parsers() - aas_class_parsers.update({ - 'AssetAdministrationShellDescriptor': cls._construct_asset_administration_shell_descriptor, - 'SubmodelDescriptor': cls._construct_submodel_descriptor, - 'AssetLink': cls._construct_asset_link, - 'ProtocolInformation': cls._construct_protocol_information, - 'Endpoint': cls._construct_endpoint - }) - return aas_class_parsers - - # ################################################################################################## - # Utility Methods used in constructor methods to add general attributes (from abstract base classes) - # ################################################################################################## - - @classmethod - def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None: - super()._amend_abstract_attributes(obj, dct) - - if isinstance(obj, server_model.Descriptor): - if 'description' in dct: - obj.description = cls._construct_lang_string_set(_get_ts(dct, 'description', list), - model.MultiLanguageTextType) - if 'displayName' in dct: - obj.display_name = cls._construct_lang_string_set(_get_ts(dct, 'displayName', list), - model.MultiLanguageNameType) - if 'extensions' in dct: - for extension in _get_ts(dct, 'extensions', list): - obj.extension.add(cls._construct_extension(extension)) - - @classmethod - def _construct_asset_administration_shell_descriptor( - cls, dct: Dict[str, object], - object_class=server_model.AssetAdministrationShellDescriptor) -> server_model.AssetAdministrationShellDescriptor: - ret = object_class(id_=_get_ts(dct, 'id', str)) - cls._amend_abstract_attributes(ret, dct) - if 'administration' in dct: - ret.administration = cls._construct_administrative_information(_get_ts(dct, 'administration', dict)) - if 'assetKind' in dct: - ret.asset_kind = ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] - if 'assetType' in dct: - ret.asset_type = _get_ts(dct, 'assetType', str) - global_asset_id = None - if 'globalAssetId' in dct: - ret.global_asset_id = _get_ts(dct, 'globalAssetId', str) - specific_asset_id = set() - if 'specificAssetIds' in dct: - for desc_data in _get_ts(dct, "specificAssetIds", list): - specific_asset_id.add(cls._construct_specific_asset_id(desc_data, model.SpecificAssetId)) - if 'endpoints' in dct: - for endpoint_dct in _get_ts(dct, 'endpoints', list): - if 'protocolInformation' in endpoint_dct: - ret.endpoints.append( - cls._construct_endpoint(endpoint_dct, - server_model.Endpoint)) - elif 'href' in endpoint_dct: - protocol_info = server_model.ProtocolInformation( - href=_get_ts(endpoint_dct['href'], 'href', str), - endpoint_protocol=_get_ts(endpoint_dct['href'], - 'endpointProtocol', - str) if 'endpointProtocol' in - endpoint_dct[ - 'href'] else None, - endpoint_protocol_version=_get_ts( - endpoint_dct['href'], - 'endpointProtocolVersion', - list) if 'endpointProtocolVersion' in - endpoint_dct['href'] else None - ) - ret.endpoints.append(server_model.Endpoint( - protocol_information=protocol_info, - interface=_get_ts(endpoint_dct, 'interface', - str))) - if 'idShort' in dct: - ret.id_short = _get_ts(dct, 'idShort', str) - if 'submodelDescriptors' in dct: - ret.submodel_descriptors = cls._construct_submodel_descriptor(_get_ts(dct, 'submodelDescriptors', list), - server_model.SubmodelDescriptor) - return ret - - @classmethod - def _construct_protocol_information(cls, dct: Dict[str, object], - object_class=server_model.ProtocolInformation) -> server_model.ProtocolInformation: - ret = object_class( - href=_get_ts(dct, 'href', str), - endpoint_protocol=_get_ts(dct, 'endpointProtocol', - str) if 'endpointProtocol' in dct else None, - endpoint_protocol_version=_get_ts(dct, - 'endpointProtocolVersion', - list) if 'endpointProtocolVersion' in dct else None, - subprotocol=_get_ts(dct, 'subprotocol', - str) if 'subprotocol' in dct else None, - subprotocol_body=_get_ts(dct, 'subprotocolBody', - str) if 'subprotocolBody' in dct else None, - subprotocol_body_encoding=_get_ts(dct, - 'subprotocolBodyEncoding', - str) if 'subprotocolBodyEncoding' in dct else None - ) - return ret - - @classmethod - def _construct_endpoint(cls, dct: Dict[str, object], - object_class=server_model.Endpoint) -> server_model.Endpoint: - ret = object_class( - protocol_information=cls._construct_protocol_information( - _get_ts(dct, 'protocolInformation', dict), - server_model.ProtocolInformation - ), - interface=_get_ts(dct, 'interface', - str) - ) - cls._amend_abstract_attributes(ret, dct) - return ret - - @classmethod - def _construct_submodel_descriptor( - cls, dct: Dict[str, object], - object_class=server_model.SubmodelDescriptor) -> server_model.SubmodelDescriptor: - ret = object_class(id_=_get_ts(dct, 'id', str), - endpoints=[]) - cls._amend_abstract_attributes(ret, dct) - for endpoint_dct in _get_ts(dct, 'endpoints', list): - if 'protocolInformation' in endpoint_dct: - ret.endpoints.append( - cls._construct_endpoint(endpoint_dct, - server_model.Endpoint)) - elif 'href' in endpoint_dct: - protocol_info = server_model.ProtocolInformation( - href=_get_ts(endpoint_dct['href'], 'href', str), - endpoint_protocol=_get_ts(endpoint_dct['href'], - 'endpointProtocol', - str) if 'endpointProtocol' in - endpoint_dct[ - 'href'] else None, - endpoint_protocol_version=_get_ts( - endpoint_dct['href'], - 'endpointProtocolVersion', - list) if 'endpointProtocolVersion' in - endpoint_dct['href'] else None - ) - ret.endpoints.append(server_model.Endpoint( - protocol_information=protocol_info, - interface=_get_ts(endpoint_dct, 'interface', - str))) - if 'administration' in dct: - ret.administration = cls._construct_administrative_information( - _get_ts(dct, 'administration', dict)) - if 'idShort' in dct: - ret.id_short = _get_ts(dct, 'idShort', str) - if 'semanticId' in dct: - ret.semantic_id = cls._construct_reference(_get_ts(dct, 'semanticId', dict)) - if 'supplementalSemanticIds' in dct: - for ref in _get_ts(dct, 'supplementalSemanticIds', list): - ret.supplemental_semantic_id.append(cls._construct_reference(ref)) - return ret - - @classmethod - def _construct_asset_link( - cls, dct: Dict[str, object], object_class=server_model.AssetLink) -> server_model.AssetLink: - ret = object_class(name=_get_ts(dct, 'name', str), - value=_get_ts(dct, 'value', str)) - return ret - - -class ServerStrictAASFromJsonDecoder(ServerAASFromJsonDecoder): - """ - A strict version of the AASFromJsonDecoder class for deserializing Asset Administration Shell data from the - official JSON format - - This version has set ``failsafe = False``, which will lead to Exceptions raised for every missing attribute or wrong - object type. - """ - failsafe = False - - -class ServerStrippedAASFromJsonDecoder(ServerAASFromJsonDecoder): - """ - Decoder for stripped JSON objects. Used in the HTTP adapter. - """ - stripped = True - - -class ServerStrictStrippedAASFromJsonDecoder(ServerStrictAASFromJsonDecoder, ServerStrippedAASFromJsonDecoder): - """ - Non-failsafe decoder for stripped JSON objects. - """ - pass - - -def read_server_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, - replace_existing: bool = False, - ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, - decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: - return read_aas_json_file_into(object_store=object_store, file=file, replace_existing=replace_existing, - ignore_existing=ignore_existing, failsafe=failsafe, stripped=stripped, - decoder=decoder, keys_to_types=JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES) - - -class ServerAASToJsonEncoder(AASToJsonEncoder): - - @classmethod - def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: - serializers = super()._get_aas_class_serializers() - serializers.update({ - server_model.AssetAdministrationShellDescriptor: cls._asset_administration_shell_descriptor_to_json, - server_model.SubmodelDescriptor: cls._submodel_descriptor_to_json, - server_model.Endpoint: cls._endpoint_to_json, - server_model.ProtocolInformation: cls._protocol_information_to_json, - server_model.AssetLink: cls._asset_link_to_json - }) - return serializers - - @classmethod - def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: - data: Dict[str, object] = super()._abstract_classes_to_json(obj) - if isinstance(obj, server_model.Descriptor): - if obj.description: - data['description'] = obj.description - if obj.display_name: - data['displayName'] = obj.display_name - if obj.extension: - data['extensions'] = list(obj.extension) - return data - - @classmethod - def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> \ - Dict[str, object]: - """ - serialization of an object from class AssetAdministrationShell to json - - :param obj: object of class AssetAdministrationShell - :return: dict with the serialized attributes of this object - """ - data = cls._abstract_classes_to_json(obj) - data.update(cls._namespace_to_json(obj)) - data['id'] = obj.id - if obj.administration: - data['administration'] = obj.administration - if obj.asset_kind: - data['assetKind'] = _generic.ASSET_KIND[obj.asset_kind] - if obj.asset_type: - data['assetType'] = obj.asset_type - if obj.global_asset_id: - data['globalAssetId'] = obj.global_asset_id - if obj.specific_asset_id: - data['specificAssetIds'] = list(obj.specific_asset_id) - if obj.endpoints: - data['endpoints'] = list(obj.endpoints) - if obj.id_short: - data['idShort'] = obj.id_short - if obj.submodel_descriptors: - data['submodelDescriptors'] = list(obj.submodel_descriptors) - return data - - @classmethod - def _protocol_information_to_json(cls, - obj: server_model.ProtocolInformation) -> \ - Dict[str, object]: - data = cls._abstract_classes_to_json(obj) - - data['href'] = obj.href - if obj.endpoint_protocol: - data['endpointProtocol'] = obj.endpoint_protocol - if obj.endpoint_protocol_version: - data['endpointProtocolVersion'] = obj.endpoint_protocol_version - if obj.subprotocol: - data['subprotocol'] = obj.subprotocol - if obj.subprotocol_body: - data['subprotocolBody'] = obj.subprotocol_body - if obj.subprotocol_body_encoding: - data['subprotocolBodyEncoding'] = obj.subprotocol_body_encoding - return data - - @classmethod - def _endpoint_to_json(cls, obj: server_model.Endpoint) -> Dict[str, object]: - data = cls._abstract_classes_to_json(obj) - data['protocolInformation'] = cls._protocol_information_to_json( - obj.protocol_information) - data['interface'] = obj.interface - return data - - @classmethod - def _submodel_descriptor_to_json(cls, obj: server_model.SubmodelDescriptor) -> Dict[str, object]: - """ - serialization of an object from class Submodel to json - - :param obj: object of class Submodel - :return: dict with the serialized attributes of this object - """ - data = cls._abstract_classes_to_json(obj) - data['id'] = obj.id - data['endpoints'] = [cls._endpoint_to_json(ep) for ep in - obj.endpoints] - if obj.id_short: - data['idShort'] = obj.id_short - if obj.administration: - data['administration'] = obj.administration - if obj.semantic_id: - data['semanticId'] = obj.semantic_id - if obj.supplemental_semantic_id: - data['supplementalSemanticIds'] = list(obj.supplemental_semantic_id) - return data - - @classmethod - def _asset_link_to_json(cls, obj: server_model.AssetLink) -> Dict[str, object]: - data = cls._abstract_classes_to_json(obj) - data['name'] = obj.name - data['value'] = obj.value - return data diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index cc2ac9a4a..caabb4311 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -16,11 +16,9 @@ from basyx.aas import model from basyx.aas.adapter._generic import XML_NS_MAP +from basyx.aas.adapter.json import StrictStrippedAASFromJsonDecoder, StrictAASFromJsonDecoder, AASToJsonEncoder from basyx.aas.adapter.xml import xml_serialization, XMLConstructables, read_aas_xml_element from basyx.aas.model import AbstractObjectStore -from server.app import model as server_model -from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, \ - ServerStrictStrippedAASFromJsonDecoder from server.app.util.converters import base64url_decode @@ -155,7 +153,7 @@ def __init__(self, *args, content_type="text/xml", **kwargs): super().__init__(*args, **kwargs, content_type=content_type) -class ResultToJsonEncoder(ServerAASToJsonEncoder): +class ResultToJsonEncoder(AASToJsonEncoder): @classmethod def _result_to_json(cls, result: Result) -> Dict[str, object]: return { @@ -288,12 +286,7 @@ class HTTPApiDecoder: @classmethod def check_type_support(cls, type_: type): - tolerated_types = ( - server_model.AssetAdministrationShellDescriptor, - server_model.SubmodelDescriptor, - server_model.AssetLink, - ) - if type_ not in cls.type_constructables_map and type_ not in tolerated_types: + if type_ not in cls.type_constructables_map: raise TypeError(f"Parsing {type_} is not supported!") @classmethod @@ -305,8 +298,8 @@ def assert_type(cls, obj: object, type_: Type[T]) -> T: @classmethod def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: cls.check_type_support(expect_type) - decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ - else ServerStrictAASFromJsonDecoder + decoder: Type[StrictAASFromJsonDecoder] = StrictStrippedAASFromJsonDecoder if stripped \ + else StrictAASFromJsonDecoder try: parsed = json.loads(data, cls=decoder) if isinstance(parsed, list) and expect_single: @@ -325,10 +318,6 @@ def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool model.SpecificAssetId: decoder._construct_specific_asset_id, # type: ignore[assignment] model.Reference: decoder._construct_reference, # type: ignore[assignment] model.Qualifier: decoder._construct_qualifier, # type: ignore[assignment] - server_model.AssetAdministrationShellDescriptor: - decoder._construct_asset_administration_shell_descriptor, # type: ignore[assignment] - server_model.SubmodelDescriptor: decoder._construct_submodel_descriptor, # type: ignore[assignment] - server_model.AssetLink: decoder._construct_asset_link, # type: ignore[assignment] } constructor: Optional[Callable[..., T]] = mapping.get(expect_type) diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py deleted file mode 100644 index b08448b06..000000000 --- a/server/app/interfaces/discovery.py +++ /dev/null @@ -1,215 +0,0 @@ -""" -This module implements the Discovery interface defined in the 'Specification of the Asset Administration Shell Part 2 – Application Programming Interface'. -""" - -import abc -import json -from typing import Dict, List, Set, Any - -import werkzeug.exceptions -from pymongo import MongoClient -from pymongo.collection import Collection -from werkzeug.routing import Rule, Submount -from werkzeug.wrappers import Request, Response - -from basyx.aas import model -from server.app.util.converters import Base64URLConverter -from server.app.interfaces.base import BaseWSGIApp, HTTPApiDecoder -from .. import model as server_model -from ..adapter.jsonization import ServerAASToJsonEncoder - -encoder=ServerAASToJsonEncoder() - -class AbstractDiscoveryStore(metaclass=abc.ABCMeta): - aas_id_to_asset_ids: Any - asset_id_to_aas_ids: Any - - @abc.abstractmethod - def __init__(self): - pass - - @abc.abstractmethod - def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: - pass - - @abc.abstractmethod - def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: - pass - - @abc.abstractmethod - def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: - pass - - @abc.abstractmethod - def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: - pass - - @abc.abstractmethod - def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: - pass - - @abc.abstractmethod - def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: - pass - - - -class InMemoryDiscoveryStore(AbstractDiscoveryStore): - def __init__(self): - self.aas_id_to_asset_ids: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} - self.asset_id_to_aas_ids: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} - - def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: - return list(self.aas_id_to_asset_ids.get(aas_id, set())) - - def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, - asset_ids: List[model.SpecificAssetId]) -> None: - serialized_assets = [encoder.default(asset_id) for asset_id in asset_ids] - if aas_id in self.aas_id_to_asset_ids: - for asset in serialized_assets: - if asset not in self.aas_id_to_asset_ids[aas_id]: - self.aas_id_to_asset_ids[aas_id].append(asset) - else: - self.aas_id_to_asset_ids[aas_id] = serialized_assets[:] - - def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: - key = aas_id - if key in self.aas_id_to_asset_ids: - del self.aas_id_to_asset_ids[key] - - def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: - result = [] - for asset_key, aas_ids in self.asset_id_to_aas_ids.items(): - expected_key = f"{asset_link.name}:{asset_link.value}" - if asset_key == expected_key: - result.extend(list(aas_ids)) - return result - - def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: - asset_key = f"{asset_id.name}:{asset_id.value}" - if asset_key in self.asset_id_to_aas_ids: - self.asset_id_to_aas_ids[asset_key].add(aas_id) - else: - self.asset_id_to_aas_ids[asset_key] = {aas_id} - - def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: - asset_key = f"{asset_id.name}:{asset_id.value}" - if asset_key in self.asset_id_to_aas_ids: - self.asset_id_to_aas_ids[asset_key].discard(aas_id) - - -class MongoDiscoveryStore(AbstractDiscoveryStore): - def __init__(self, - uri: str = "mongodb://localhost:27017", - db_name: str = "basyx", - coll_aas_to_assets: str = "aas_to_assets", - coll_asset_to_aas: str = "asset_to_aas"): - self.client = MongoClient(uri) - self.db = self.client[db_name] - self.coll_aas_to_assets: Collection = self.db[coll_aas_to_assets] - self.coll_asset_to_aas: Collection = self.db[coll_asset_to_aas] - # Create an index for fast asset reverse lookups. - self.coll_asset_to_aas.create_index("_id") - - def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: - key = aas_id - doc = self.coll_aas_to_assets.find_one({"_id": key}) - return doc["asset_ids"] if doc and "asset_ids" in doc else [] - - def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: - key = aas_id - # Convert each SpecificAssetId using the serialization helper. - serializable_assets = [encoder.default(asset_id) for asset_id in asset_ids] - self.coll_aas_to_assets.update_one( - {"_id": key}, - {"$addToSet": {"asset_ids": {"$each": serializable_assets}}}, - upsert=True - ) - - def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: - key = aas_id - self.coll_aas_to_assets.delete_one({"_id": key}) - - def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: - # Query MongoDB for specificAssetIds where 'name' and 'value' match - doc = self.coll_asset_to_aas.find_one({ - "name": asset_link.name, - "value": asset_link.value - }) - return doc["aas_ids"] if doc and "aas_ids" in doc else [] - - def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: - asset_key = str(encoder.default(asset_id)) - self.coll_asset_to_aas.update_one( - {"_id": asset_key}, - {"$addToSet": {"aas_ids": aas_id}}, - upsert=True - ) - - def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: - asset_key = str(encoder.default(asset_id)) - self.coll_asset_to_aas.update_one( - {"_id": asset_key}, - {"$pull": {"aas_ids": aas_id}} - ) - - -class DiscoveryAPI(BaseWSGIApp): - def __init__(self, - persistent_store: AbstractDiscoveryStore, base_path: str = "/api/v3.0"): - self.persistent_store: AbstractDiscoveryStore = persistent_store - self.url_map = werkzeug.routing.Map([ - Submount(base_path, [ - Rule("/lookup/shellsByAssetLink", methods=["POST"], - endpoint=self.search_all_aas_ids_by_asset_link), - Submount("/lookup/shells", [ - Rule("/", methods=["GET"], - endpoint=self.get_all_specific_asset_ids_by_aas_id), - Rule("/", methods=["POST"], - endpoint=self.post_all_asset_links_by_id), - Rule("/", methods=["DELETE"], - endpoint=self.delete_all_asset_links_by_id), - ]), - ]) - ], converters={ - "base64url": Base64URLConverter - }, strict_slashes=False) - - def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, response_t: type, - **_kwargs) -> Response: - asset_links = HTTPApiDecoder.request_body_list(request, server_model.AssetLink, False) - matching_aas_keys = set() - for asset_link in asset_links: - aas_keys = self.persistent_store.search_aas_ids_by_asset_link(asset_link) - matching_aas_keys.update(aas_keys) - matching_aas_keys = list(matching_aas_keys) - paginated_slice, cursor = self._get_slice(request, matching_aas_keys) - return response_t(list(paginated_slice), cursor=cursor) - - def get_all_specific_asset_ids_by_aas_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: - aas_identifier = url_args.get("aas_id") - asset_ids = self.persistent_store.get_all_specific_asset_ids_by_aas_id(aas_identifier) - return response_t(asset_ids) - - def post_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: - aas_identifier = url_args.get("aas_id") - specific_asset_ids = HTTPApiDecoder.request_body_list(request, model.SpecificAssetId, False) - self.persistent_store.add_specific_asset_ids_to_aas(aas_identifier, specific_asset_ids) - for asset_id in specific_asset_ids: - self.persistent_store._add_aas_id_to_specific_asset_id(asset_id, aas_identifier) - updated = {aas_identifier: self.persistent_store.get_all_specific_asset_ids_by_aas_id(aas_identifier)} - return response_t(updated) - - def delete_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: - aas_identifier = url_args.get("aas_id") - self.persistent_store.delete_specific_asset_ids_by_aas_id(aas_identifier) - for key in list(self.persistent_store.asset_id_to_aas_ids.keys()): - self.persistent_store.asset_id_to_aas_ids[key].discard(aas_identifier) - return response_t() - - -if __name__ == "__main__": - from werkzeug.serving import run_simple - - run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), - use_debugger=True, use_reloader=True) diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py deleted file mode 100644 index f33ab1651..000000000 --- a/server/app/interfaces/registry.py +++ /dev/null @@ -1,280 +0,0 @@ -""" -This module implements the Registry interface defined in the 'Specification of the Asset Administration Shell Part 2 – Application Programming Interface'. -""" - -from typing import Dict, Iterator, List, Type, Tuple - -import werkzeug.exceptions -import werkzeug.routing -import werkzeug.urls -import werkzeug.utils -from werkzeug.exceptions import Conflict, NotFound -from werkzeug.routing import MapAdapter, Rule, Submount -from werkzeug.wrappers import Request, Response - -import server.app.model as server_model -from basyx.aas import model -from server.app.util.converters import Base64URLConverter -from server.app.interfaces.base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder - - -class RegistryAPI(ObjectStoreWSGIApp): - def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/api/v3.0"): - self.object_store: model.AbstractObjectStore = object_store - self.url_map = werkzeug.routing.Map([ - Submount(base_path, [ - Rule("/shell-descriptors", methods=["GET"], endpoint=self.get_all_aas_descriptors), - Rule("/shell-descriptors", methods=["POST"], endpoint=self.post_aas_descriptor), - Submount("/shell-descriptors", [ - Rule("/", methods=["GET"], endpoint=self.get_aas_descriptor_by_id), - Rule("/", methods=["PUT"], endpoint=self.put_aas_descriptor_by_id), - Rule("/", methods=["DELETE"], endpoint=self.delete_aas_descriptor_by_id), - Submount("/", [ - Rule("/submodel-descriptors", methods=["GET"], - endpoint=self.get_all_submodel_descriptors_through_superpath), - Rule("/submodel-descriptors", methods=["POST"], - endpoint=self.post_submodel_descriptor_through_superpath), - Submount("/submodel-descriptors", [ - Rule("/", methods=["GET"], - endpoint=self.get_submodel_descriptor_by_id_through_superpath), - Rule("/", methods=["PUT"], - endpoint=self.put_submodel_descriptor_by_id_through_superpath), - Rule("/", methods=["DELETE"], - endpoint=self.delete_submodel_descriptor_by_id_through_superpath), - ]) - ]) - ]), - Rule("/submodel-descriptors", methods=["GET"], endpoint=self.get_all_submodel_descriptors), - Rule("/submodel-descriptors", methods=["POST"], endpoint=self.post_submodel_descriptor), - Submount("/submodel-descriptors", [ - Rule("/", methods=["GET"], endpoint=self.get_submodel_descriptor_by_id), - Rule("/", methods=["PUT"], endpoint=self.put_submodel_descriptor_by_id), - Rule("/", methods=["DELETE"], - endpoint=self.delete_submodel_descriptor_by_id), - ]) - ]) - ], converters={ - "base64url": Base64URLConverter - }, strict_slashes=False) - - def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ - Iterator[server_model.AssetAdministrationShellDescriptor], int]: - - descriptors: Iterator[server_model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( - server_model.AssetAdministrationShellDescriptor - ) - - id_short = request.args.get("idShort") - if id_short is not None: - descriptors = filter(lambda desc: desc.id_short == id_short, descriptors) - - asset_ids = request.args.getlist("assetIds") - if asset_ids is not None: - # Decode and instantiate SpecificAssetIds - specific_asset_ids: List[model.SpecificAssetId] = list( - map(lambda asset_id: HTTPApiDecoder.base64url_json(asset_id, model.SpecificAssetId, False), asset_ids) - ) - # Filter AAS based on these SpecificAssetIds - descriptors = filter( - lambda desc: all(specific_asset_id in desc.specific_asset_id - for specific_asset_id in specific_asset_ids), - descriptors - ) - - paginated_descriptors, end_index = self._get_slice(request, descriptors) - return paginated_descriptors, end_index - - def _get_aas_descriptor(self, url_args: Dict) -> server_model.AssetAdministrationShellDescriptor: - return self._get_obj_ts(url_args["aas_id"], server_model.AssetAdministrationShellDescriptor) - - def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: - submodel_descriptors: Iterator[server_model.SubmodelDescriptor] = self._get_all_obj_of_type(server_model.SubmodelDescriptor) - id_short = request.args.get("idShort") - if id_short is not None: - submodel_descriptors = filter(lambda sm: sm.id_short == id_short, submodel_descriptors) - semantic_id = request.args.get("semanticId") - if semantic_id is not None: - spec_semantic_id = HTTPApiDecoder.base64url_json( - semantic_id, model.Reference, False) # type: ignore[type-abstract] - submodel_descriptors = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodel_descriptors) - paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) - return paginated_submodel_descriptors, end_index - - def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescriptor: - return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) - - # ------ AAS REGISTRY ROUTES ------- - def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - aas_descriptors, cursor = self._get_all_aas_descriptors(request) - return response_t(list(aas_descriptors), cursor=cursor) - - def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - map_adapter: MapAdapter) -> Response: - descriptor = HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, False) - try: - self.object_store.add(descriptor) - except KeyError as e: - raise Conflict(f"AssetAdministrationShellDescriptor with Identifier {descriptor.id} already exists!") from e - descriptor.commit() - created_resource_url = map_adapter.build(self.get_aas_descriptor_by_id, { - "aas_id": descriptor.id - }, force_external=True) - return response_t(descriptor, status=201, headers={"Location": created_resource_url}) - - def get_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - descriptor = self._get_aas_descriptor(url_args) - return response_t(descriptor) - - def put_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - descriptor = self._get_aas_descriptor(url_args) - descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, - is_stripped_request(request))) - descriptor.commit() - return response_t() - - def delete_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - descriptor = self._get_aas_descriptor(url_args) - self.object_store.remove(descriptor) - return response_t() - - def get_all_submodel_descriptors_through_superpath(self, - request: Request, - url_args: Dict, - response_t: Type[ - APIResponse], - **_kwargs) -> Response: - aas_descriptor = self._get_aas_descriptor(url_args) - submodel_descriptors, cursor = self._get_slice(request, - aas_descriptor.submodel_descriptors) - return response_t(list(submodel_descriptors), cursor=cursor) - - def get_submodel_descriptor_by_id_through_superpath(self, - request: Request, - url_args: Dict, - response_t: - Type[ - APIResponse], - **_kwargs) -> Response: - aas_descriptor = self._get_aas_descriptor(url_args) - submodel_id = url_args["submodel_id"] - submodel_descriptor = next( - (sd for sd in aas_descriptor.submodel_descriptors if - sd.id == submodel_id), None) - if submodel_descriptor is None: - raise NotFound( - f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") - return response_t(submodel_descriptor) - - def post_submodel_descriptor_through_superpath(self, - request: Request, - url_args: Dict, - response_t: Type[ - APIResponse], - map_adapter: MapAdapter) -> Response: - aas_descriptor = self._get_aas_descriptor(url_args) - submodel_descriptor = HTTPApiDecoder.request_body(request, - server_model.SubmodelDescriptor, - is_stripped_request( - request)) - if any(sd.id == submodel_descriptor.id for sd in - aas_descriptor.submodel_descriptors): - raise Conflict( - f"Submodel Descriptor with Identifier {submodel_descriptor.id} already exists!") - aas_descriptor.submodel_descriptors.append(submodel_descriptor) - aas_descriptor.commit() - created_resource_url = map_adapter.build( - self.get_submodel_descriptor_by_id_through_superpath, { - "aas_id": aas_descriptor.id, - "submodel_id": submodel_descriptor.id - }, force_external=True) - return response_t(submodel_descriptor, status=201, - headers={"Location": created_resource_url}) - - def put_submodel_descriptor_by_id_through_superpath(self, - request: Request, - url_args: Dict, - response_t: - Type[ - APIResponse], - **_kwargs) -> Response: - aas_descriptor = self._get_aas_descriptor(url_args) - submodel_id = url_args["submodel_id"] - submodel_descriptor = next( - (sd for sd in aas_descriptor.submodel_descriptors if - sd.id == submodel_id), None) - if submodel_descriptor is None: - raise NotFound( - f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") - submodel_descriptor.update_from( - HTTPApiDecoder.request_body(request, - server_model.SubmodelDescriptor, - is_stripped_request(request))) - aas_descriptor.commit() - return response_t() - - def delete_submodel_descriptor_by_id_through_superpath(self, - request: Request, - url_args: Dict, - response_t: - Type[ - APIResponse], - **_kwargs) -> Response: - aas_descriptor = self._get_aas_descriptor(url_args) - submodel_id = url_args["submodel_id"] - submodel_descriptor = next( - (sd for sd in aas_descriptor.submodel_descriptors if sd.id == submodel_id), None) - if submodel_descriptor is None: - raise NotFound(f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") - aas_descriptor.submodel_descriptors.remove(submodel_descriptor) - aas_descriptor.commit() - return response_t() - - # ------ Submodel REGISTRY ROUTES ------- - def get_all_submodel_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - submodel_descriptors, cursor = self._get_all_submodel_descriptors(request) - return response_t(list(submodel_descriptors), cursor=cursor, stripped=is_stripped_request(request)) - - def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - submodel_descriptor = self._get_submodel_descriptor(url_args) - return response_t(submodel_descriptor, stripped=is_stripped_request(request)) - - def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - map_adapter: MapAdapter) -> Response: - submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, - is_stripped_request(request)) - try: - self.object_store.add(submodel_descriptor) - except KeyError as e: - raise Conflict(f"Submodel Descriptor with Identifier {submodel_descriptor.id} already exists!") from e - submodel_descriptor.commit() - created_resource_url = map_adapter.build(self.get_submodel_descriptor_by_id, { - "submodel_id": submodel_descriptor.id - }, force_external=True) - return response_t(submodel_descriptor, status=201, headers={"Location": created_resource_url}) - - def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - submodel_descriptor = self._get_submodel_descriptor(url_args) - submodel_descriptor.update_from( - HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request))) - submodel_descriptor.commit() - return response_t() - - def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], - **_kwargs) -> Response: - self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor)) - return response_t() - - -if __name__ == "__main__": - from werkzeug.serving import run_simple - from basyx.aas.examples.data.example_aas import create_full_example - - run_simple("localhost", 8083, RegistryAPI(create_full_example()), - use_debugger=True, use_reloader=True) diff --git a/server/app/model/__init__.py b/server/app/model/__init__.py deleted file mode 100644 index 5736b5492..000000000 --- a/server/app/model/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .descriptor import * -from .endpoint import * diff --git a/server/app/model/descriptor.py b/server/app/model/descriptor.py deleted file mode 100644 index 38276cd2d..000000000 --- a/server/app/model/descriptor.py +++ /dev/null @@ -1,109 +0,0 @@ -from __future__ import absolute_import - -import abc -from typing import Optional, Iterable, List - -from basyx.aas import model -from server.app.model.endpoint import Endpoint - - -class Descriptor(model.HasExtension, metaclass=abc.ABCMeta): - @abc.abstractmethod - def __init__(self, description: Optional[model.MultiLanguageTextType] = None, - display_name: Optional[model.MultiLanguageNameType] = None, extension: Iterable[model.Extension] = ()): - super().__init__() - self.description: Optional[model.MultiLanguageTextType] = description - self.display_name: Optional[model.MultiLanguageNameType] = display_name - self.extension = model.NamespaceSet(self, [("name", True)], extension) - - def commit(self): - pass - - def update(self): - pass - - def update_from(self, other: "Descriptor", update_source: bool = False): - """ - Updates the descriptor's attributes from another descriptor. - - :param other: The descriptor to update from. - :param update_source: Placeholder for compatibility; not used in this context. - """ - for attr in vars(other): - if attr == "id": - continue # Skip updating the unique identifier of the AAS - setattr(self, attr, getattr(other, attr)) - - -class SubmodelDescriptor(Descriptor): - - def __init__(self, id_: model.Identifier, endpoints: List[Endpoint], - administration: Optional[model.AdministrativeInformation] = None, - id_short: Optional[model.NameType] = None, semantic_id: Optional[model.Reference] = None, - supplemental_semantic_id: Iterable[model.Reference] = ()): - super().__init__() - self.id: model.Identifier = id_ - self.endpoints: List[Endpoint] = endpoints - self.administration: Optional[model.AdministrativeInformation] = administration - self.id_short: Optional[model.NameType] = id_short - self.semantic_id: Optional[model.Reference] = semantic_id - self.supplemental_semantic_id: model.ConstrainedList[model.Reference] = \ - model.ConstrainedList(supplemental_semantic_id) - - -class AssetAdministrationShellDescriptor(Descriptor): - - def __init__(self, - id_: model.Identifier, - administration: Optional[model.AdministrativeInformation] = None, - asset_kind: Optional[model.AssetKind] = None, - asset_type: Optional[model.Identifier] = None, - endpoints: Optional[List[Endpoint]] = None, - global_asset_id: Optional[model.Identifier] = None, - id_short: Optional[model.NameType] = None, - specific_asset_id: Iterable[model.SpecificAssetId] = (), - submodel_descriptors: Optional[List[SubmodelDescriptor]] = None, - description: Optional[model.MultiLanguageTextType] = None, - display_name: Optional[model.MultiLanguageNameType] = None, - extension: Iterable[model.Extension] = ()): - """AssetAdministrationShellDescriptor - - - Nur das 'id'-Feld (id_) ist zwingend erforderlich. Alle anderen Felder erhalten Defaultwerte. - """ - super().__init__() - self.administration: Optional[model.AdministrativeInformation] = administration - self.asset_kind: Optional[model.AssetKind] = asset_kind - self.asset_type: Optional[model.Identifier] = asset_type - self.endpoints: Optional[ - List[Endpoint]] = endpoints if endpoints is not None else [] # leere Liste, falls nicht gesetzt - self.global_asset_id: Optional[model.Identifier] = global_asset_id - self.id_short: Optional[model.NameType] = id_short - self.id: model.Identifier = id_ - self._specific_asset_id: model.ConstrainedList[model.SpecificAssetId] = model.ConstrainedList( - specific_asset_id, - item_set_hook=self._check_constraint_set_spec_asset_id, - item_del_hook=self._check_constraint_del_spec_asset_id - ) - self.submodel_descriptors = submodel_descriptors if submodel_descriptors is not None else [] - self.description: Optional[model.MultiLanguageTextType] = description - self.display_name: Optional[model.MultiLanguageNameType] = display_name - self.extension = model.NamespaceSet(self, [("name", True)], extension) - - @property - def specific_asset_id(self) -> model.ConstrainedList[model.SpecificAssetId]: - return self._specific_asset_id - - @specific_asset_id.setter - def specific_asset_id(self, specific_asset_id: Iterable[model.SpecificAssetId]) -> None: - # constraints are checked via _check_constraint_set_spec_asset_id() in this case - self._specific_asset_id[:] = specific_asset_id - - def _check_constraint_set_spec_asset_id(self, items_to_replace: List[model.SpecificAssetId], - new_items: List[model.SpecificAssetId], - old_list: List[model.SpecificAssetId]) -> None: - model.AssetInformation._validate_aasd_131(self.global_asset_id, - len(old_list) - len(items_to_replace) + len(new_items) > 0) - - def _check_constraint_del_spec_asset_id(self, _item_to_del: model.SpecificAssetId, - old_list: List[model.SpecificAssetId]) -> None: - model.AssetInformation._validate_aasd_131(self.global_asset_id, len(old_list) > 1) diff --git a/server/app/model/endpoint.py b/server/app/model/endpoint.py deleted file mode 100644 index 3be6dc061..000000000 --- a/server/app/model/endpoint.py +++ /dev/null @@ -1,107 +0,0 @@ -from __future__ import absolute_import - -import re -from enum import Enum -from typing import Optional, List - -from basyx.aas.model import base - - -class AssetLink: - def __init__(self, name: base.LabelType, value: base.Identifier): - if not name: - raise ValueError("AssetLink 'name' must be a non-empty string.") - if not value: - raise ValueError("AssetLink 'value' must be a non-empty string.") - self.name = name - self.value = value - - -class SecurityTypeEnum(Enum): - NONE = "NONE" - RFC_TLSA = "RFC_TLSA" - W3C_DID = "W3C_DID" - - -class SecurityAttributeObject: - def __init__(self, type_: SecurityTypeEnum, key: str, value: str): - - if not isinstance(type_, SecurityTypeEnum): - raise ValueError(f"Invalid security type: {type_}. Must be one of {list(SecurityTypeEnum)}") - if not key or not isinstance(key, str): - raise ValueError("Key must be a non-empty string.") - if not value or not isinstance(value, str): - raise ValueError("Value must be a non-empty string.") - self.type = type_ - self.key = key - self.value = value - - -class ProtocolInformation: - - def __init__( - self, - href: str, - endpoint_protocol: Optional[str] = None, - endpoint_protocol_version: Optional[List[str]] = None, - subprotocol: Optional[str] = None, - subprotocol_body: Optional[str] = None, - subprotocol_body_encoding: Optional[str] = None, - security_attributes: Optional[List[SecurityAttributeObject]] = None - ): - if not href or not isinstance(href, str): - raise ValueError("href must be a non-empty string representing a valid URL.") - - self.href = href - self.endpoint_protocol = endpoint_protocol - self.endpoint_protocol_version = endpoint_protocol_version or [] - self.subprotocol = subprotocol - self.subprotocol_body = subprotocol_body - self.subprotocol_body_encoding = subprotocol_body_encoding - self.security_attributes = security_attributes or [] - - -class Endpoint: - INTERFACE_SHORTNAMES = { - "AAS", "SUBMODEL", "SERIALIZE", "AASX-FILE", "AAS-REGISTRY", - "SUBMODEL-REGISTRY", "AAS-REPOSITORY", "SUBMODEL-REPOSITORY", - "CD-REPOSITORY", "AAS-DISCOVERY" - } - VERSION_PATTERN = re.compile(r"^\d+(\.\d+)*$") - - def __init__(self, interface: base.NameType, protocol_information: ProtocolInformation): # noqa: E501 - - self.interface = interface - self.protocol_information = protocol_information - - @property - def interface(self) -> str: - return self._interface - - @interface.setter - def interface(self, interface: base.NameType): - if interface is None: - raise ValueError("Invalid value for `interface`, must not be `None`") - if not self.is_valid_interface(interface): - raise ValueError(f"Invalid interface format: {interface}. Expected format: '-', ") - - self._interface = interface - - @classmethod - def is_valid_interface(cls, interface: base.NameType) -> bool: - parts = interface.split("-", 1) - if len(parts) != 2: - return False - short_name, version = parts - return short_name in cls.INTERFACE_SHORTNAMES and cls.VERSION_PATTERN.match(version) - - @property - def protocol_information(self) -> ProtocolInformation: - return self._protocol_information - - @protocol_information.setter - def protocol_information(self, protocol_information: ProtocolInformation): - if protocol_information is None: - raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 - - self._protocol_information = protocol_information From 0c36396a73c2dcbeb17cec0280aa9e79667cda8d Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 22 May 2025 23:33:11 +0200 Subject: [PATCH 34/52] Add missing code from PR #362 --- server/app/interfaces/base.py | 29 +++++++++++++++++------------ server/app/interfaces/repository.py | 4 +--- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index caabb4311..be7690fd1 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -11,7 +11,7 @@ import werkzeug.utils from lxml import etree from werkzeug import Response, Request -from werkzeug.exceptions import NotFound, BadRequest, UnprocessableEntity +from werkzeug.exceptions import NotFound, BadRequest from werkzeug.routing import MapAdapter from basyx.aas import model @@ -195,13 +195,13 @@ def __call__(self, environ, start_response) -> Iterable[bytes]: @classmethod def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: limit_str = request.args.get('limit', default="10") - cursor_str = request.args.get('cursor', default="0") + cursor_str = request.args.get('cursor', default="1") try: - limit, cursor = int(limit_str), int(cursor_str) + limit, cursor = int(limit_str), int(cursor_str) - 1 # cursor is 1-indexed if limit < 0 or cursor < 0: raise ValueError except ValueError: - raise BadRequest("Cursor and limit must be positive integers!") + raise BadRequest("Limit can not be negative, cursor must be positive!") start_index = cursor end_index = cursor + limit paginated_slice = itertools.islice(iterator, start_index, end_index) @@ -292,7 +292,7 @@ def check_type_support(cls, type_: type): @classmethod def assert_type(cls, obj: object, type_: Type[T]) -> T: if not isinstance(obj, type_): - raise UnprocessableEntity(f"Object {obj!r} is not of type {type_.__name__}!") + raise BadRequest(f"Object {obj!r} is not of type {type_.__name__}!") return obj @classmethod @@ -303,9 +303,9 @@ def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool try: parsed = json.loads(data, cls=decoder) if isinstance(parsed, list) and expect_single: - raise UnprocessableEntity(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") + raise BadRequest(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") if not isinstance(parsed, list) and not expect_single: - raise UnprocessableEntity(f"Expected List[{expect_type.__name__}], got {parsed!r}!") + raise BadRequest(f"Expected List[{expect_type.__name__}], got {parsed!r}!") parsed = [parsed] if not isinstance(parsed, list) else parsed # TODO: the following is ugly, but necessary because references aren't self-identified objects @@ -330,7 +330,7 @@ def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool return [constructor(obj, *args) for obj in parsed] except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: - raise UnprocessableEntity(str(e)) from e + raise BadRequest(str(e)) from e return [cls.assert_type(obj, expect_type) for obj in parsed] @@ -360,9 +360,9 @@ def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: f: BaseException = e while f.__cause__ is not None: f = f.__cause__ - raise UnprocessableEntity(str(f)) from e + raise BadRequest(str(f)) from e except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: - raise UnprocessableEntity(str(e)) from e + raise BadRequest(str(e)) from e return cls.assert_type(rv, expect_type) @classmethod @@ -426,5 +426,10 @@ def _convert_single_json_item(cls, data: any, expect_type: Type[T], stripped: bo def is_stripped_request(request: Request) -> bool: - return request.args.get("level") == "core" - + level = request.args.get("level") + if level not in {"deep", "core", None}: + raise BadRequest(f"Level {level} is not a valid level!") + extent = request.args.get("extent") + if extent is not None: + raise werkzeug.exceptions.NotImplemented(f"The parameter extent is not yet implemented for this server!") + return level == "core" diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index e35d4e8ed..2e78ded78 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -36,8 +36,6 @@ import io import json -import itertools -import urllib from typing import Type, Iterator, List, Dict, Union, Callable, Tuple, Optional import werkzeug.exceptions @@ -50,7 +48,7 @@ from basyx.aas import model from basyx.aas.adapter import aasx -from server.app.util.converters import Base64URLConverter, IdShortPathConverter +from server.app.util.converters import Base64URLConverter, IdShortPathConverter, base64url_decode from .base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder, T From 6b3c6466e63c9582cdeaea3487f85b24be060f09 Mon Sep 17 00:00:00 2001 From: zrgt Date: Thu, 22 May 2025 23:50:02 +0200 Subject: [PATCH 35/52] Revert changes in .gitignore --- .gitignore | 3 --- 1 file changed, 3 deletions(-) diff --git a/.gitignore b/.gitignore index ad036926e..18b522c3a 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,3 @@ compliance_tool/aas_compliance_tool/version.py # ignore the content of the server storage server/storage/ - -# local testing file, do not commit -test.py From 0a8546eb973477f5c3261fd0f3f2665655058070 Mon Sep 17 00:00:00 2001 From: zrgt Date: Fri, 23 May 2025 00:20:55 +0200 Subject: [PATCH 36/52] Fix copyright --- server/app/interfaces/base.py | 6 ++++++ server/app/util/converters.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index be7690fd1..ae5bb6506 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -1,3 +1,9 @@ +# Copyright (c) 2025 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT import abc import datetime import enum diff --git a/server/app/util/converters.py b/server/app/util/converters.py index 47e1ed645..c79ded3c2 100644 --- a/server/app/util/converters.py +++ b/server/app/util/converters.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors +# Copyright (c) 2025 the Eclipse BaSyx Authors # # This program and the accompanying materials are made available under the terms of the MIT License, available in # the LICENSE file of this project. From bfd1411a92808c7da8a9511576219c0746f399ab Mon Sep 17 00:00:00 2001 From: zrgt Date: Fri, 23 May 2025 00:32:08 +0200 Subject: [PATCH 37/52] Refactor `test_http.py` to `test_repository.py` --- server/test/__init__.py | 0 server/test/interfaces/__init__.py | 0 .../test_http.py => server/test/interfaces/test_repository.py | 2 +- 3 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 server/test/__init__.py create mode 100644 server/test/interfaces/__init__.py rename sdk/test/adapter/test_http.py => server/test/interfaces/test_repository.py (99%) diff --git a/server/test/__init__.py b/server/test/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/test/interfaces/__init__.py b/server/test/interfaces/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/sdk/test/adapter/test_http.py b/server/test/interfaces/test_repository.py similarity index 99% rename from sdk/test/adapter/test_http.py rename to server/test/interfaces/test_repository.py index 09dadf865..32c6ea444 100644 --- a/sdk/test/adapter/test_http.py +++ b/server/test/interfaces/test_repository.py @@ -34,7 +34,7 @@ from basyx.aas import model from basyx.aas.adapter.aasx import DictSupplementaryFileContainer -from basyx.aas.adapter.http import WSGIApp +from server.app.interfaces.repository import WSGIApp from basyx.aas.examples.data.example_aas import create_full_example from typing import Set From 1fd76de1472b2870de258debafcadd7375b0a67c Mon Sep 17 00:00:00 2001 From: Sercan Sahin Date: Thu, 29 May 2025 13:15:16 +0200 Subject: [PATCH 38/52] fix copyright --- server/test/interfaces/test_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/test/interfaces/test_repository.py b/server/test/interfaces/test_repository.py index 32c6ea444..5177dfacb 100644 --- a/server/test/interfaces/test_repository.py +++ b/server/test/interfaces/test_repository.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors +# Copyright (c) 2025 the Eclipse BaSyx Authors # # This program and the accompanying materials are made available under the terms of the MIT License, available in # the LICENSE file of this project. From a78306630cf63fa3d36995c06b38305079095579 Mon Sep 17 00:00:00 2001 From: Sercan Sahin Date: Thu, 29 May 2025 16:11:17 +0200 Subject: [PATCH 39/52] fix MyPy errors, some tests --- sdk/basyx/aas/adapter/json/json_deserialization.py | 5 +++-- sdk/basyx/aas/adapter/json/json_serialization.py | 5 +++-- sdk/test/adapter/json/test_json_deserialization.py | 10 ++++++---- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index d1c770efc..4d07ec86b 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -34,7 +34,8 @@ import json import logging import pprint -from typing import Dict, Callable, ContextManager, TypeVar, Type, List, IO, Optional, Set, get_args, Tuple, Iterable +from typing import (Dict, Callable, ContextManager, TypeVar, Type, + List, IO, Optional, Set, get_args, Tuple, Iterable, Any) from basyx.aas import model from .._generic import MODELLING_KIND_INVERSE, ASSET_KIND_INVERSE, KEY_TYPES_INVERSE, ENTITY_TYPES_INVERSE, \ @@ -811,7 +812,7 @@ def _select_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[AASFr def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, decoder: Optional[Type[AASFromJsonDecoder]] = None, - keys_to_types: Iterable[Tuple[str, any]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) -> Set[model.Identifier]: + keys_to_types: Iterable[Tuple[str, Any]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) -> Set[model.Identifier]: """ Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 into a given object store. diff --git a/sdk/basyx/aas/adapter/json/json_serialization.py b/sdk/basyx/aas/adapter/json/json_serialization.py index 14a1f2e77..024226d97 100644 --- a/sdk/basyx/aas/adapter/json/json_serialization.py +++ b/sdk/basyx/aas/adapter/json/json_serialization.py @@ -700,7 +700,8 @@ def _select_encoder(stripped: bool, encoder: Optional[Type[AASToJsonEncoder]] = def _create_dict(data: model.AbstractObjectStore, - keys_to_types: Iterable[Tuple[str, Type]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) -> Dict[str, List[object]]: + keys_to_types: Iterable[Tuple[str, Type]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) \ + -> Dict[str, List[model.Identifiable]]: """ Categorizes objects from an AbstractObjectStore into a dictionary based on their types. @@ -714,7 +715,7 @@ def _create_dict(data: model.AbstractObjectStore, - A type to match objects against. :return: A dictionary where keys are category names and values are lists of objects of the corresponding types. """ - objects = {} + objects: Dict[str, List[model.Identifiable]] = {} for obj in data: # Iterate through the mapping of category names to expected types diff --git a/sdk/test/adapter/json/test_json_deserialization.py b/sdk/test/adapter/json/test_json_deserialization.py index 9272bdf98..645f2a8ab 100644 --- a/sdk/test/adapter/json/test_json_deserialization.py +++ b/sdk/test/adapter/json/test_json_deserialization.py @@ -37,7 +37,8 @@ def test_file_format_wrong_list(self) -> None: } ] }""" - with self.assertRaisesRegex(TypeError, r"submodels.*AssetAdministrationShell"): + with self.assertRaisesRegex(TypeError, r"AssetAdministrationShell.* was " + r"in the wrong list 'submodels'"): read_aas_json_file(io.StringIO(data), failsafe=False) with self.assertLogs(logging.getLogger(), level=logging.WARNING) as cm: read_aas_json_file(io.StringIO(data), failsafe=True) @@ -54,7 +55,8 @@ def test_file_format_unknown_object(self) -> None: { "x": "foo" } ] }""" - with self.assertRaisesRegex(TypeError, r"submodels.*'foo'"): + with self.assertRaisesRegex(TypeError, r"\{\s?'x':\s?'foo'\s?\} was in" + r" the wrong list 'submodels'"): read_aas_json_file(io.StringIO(data), failsafe=False) with self.assertLogs(logging.getLogger(), level=logging.WARNING) as cm: read_aas_json_file(io.StringIO(data), failsafe=True) @@ -196,7 +198,7 @@ def get_clean_store() -> model.DictObjectStore: with self.assertLogs(logging.getLogger(), level=logging.INFO) as log_ctx: identifiers = read_aas_json_file_into(object_store, string_io, replace_existing=False, ignore_existing=True) self.assertEqual(len(identifiers), 0) - self.assertIn("already exists in the object store", log_ctx.output[0]) # type: ignore + self.assertIn("already exists in store", log_ctx.output[0]) # type: ignore submodel = object_store.pop() self.assertIsInstance(submodel, model.Submodel) self.assertEqual(submodel.id_short, "test123") @@ -204,7 +206,7 @@ def get_clean_store() -> model.DictObjectStore: string_io.seek(0) object_store = get_clean_store() - with self.assertRaisesRegex(KeyError, r"already exists in the object store"): + with self.assertRaisesRegex(KeyError, r"already exists in store"): identifiers = read_aas_json_file_into(object_store, string_io, replace_existing=False, ignore_existing=False) self.assertEqual(len(identifiers), 0) From 66f3320f877faea0277d376a779a7309789c313d Mon Sep 17 00:00:00 2001 From: Sercan Sahin Date: Tue, 3 Jun 2025 15:03:16 +0200 Subject: [PATCH 40/52] fix bugs, reintroduce Identifiable check --- .../aas/adapter/json/json_deserialization.py | 54 ++++++++++--------- sdk/pyproject.toml | 2 +- .../adapter/json/test_json_deserialization.py | 3 +- 3 files changed, 32 insertions(+), 27 deletions(-) diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index 4d07ec86b..cd7ce9fb0 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -812,7 +812,8 @@ def _select_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[AASFr def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, decoder: Optional[Type[AASFromJsonDecoder]] = None, - keys_to_types: Iterable[Tuple[str, Any]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) -> Set[model.Identifier]: + keys_to_types: Iterable[Tuple[str, Any]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) \ + -> Set[model.Identifier]: """ Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 into a given object store. @@ -864,31 +865,36 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO continue for item in lst: - if not isinstance(item, expected_type): - if not decoder_.failsafe: - raise TypeError(f"{item} was in the wrong list '{name}'") - logger.warning(f"{item} was in the wrong list '{name}'; nevertheless, we'll use it") - - if item.id in ret: - error_msg = f"{item} has a duplicate identifier already parsed in the document!" - if not decoder_.failsafe: - raise KeyError(error_msg) - logger.error(f"{error_msg} skipping it...") - continue - - existing_element = object_store.get(item.id) - if existing_element is not None: - if not replace_existing: - error_msg = f"Object with id '{item.id}' already exists in store: {existing_element}!" - if not ignore_existing: - raise KeyError(f"{error_msg} Failed to insert {item}!") - logger.info(f"{error_msg}; Skipping {item}...") + error_msg = f"Expected a {expected_type.__name__} in list '{name}', but found {repr(item)}." + if isinstance(item, model.Identifiable): + if not isinstance(item, expected_type): + if not decoder_.failsafe: + raise TypeError(f"{item} was in the wrong list '{name}'") + logger.warning(f"{item} was in the wrong list '{name}'; nevertheless, we'll use it") + + if item.id in ret: + error_msg = f"{item} has a duplicate identifier already parsed in the document!" + if not decoder_.failsafe: + raise KeyError(error_msg) + logger.error(f"{error_msg} Skipping it...") continue - object_store.discard(existing_element) - - object_store.add(item) - ret.add(item.id) + existing_element = object_store.get(item.id) + if existing_element is not None: + if not replace_existing: + error_msg = f"Object with id '{item.id}' already exists in store: {existing_element}!" + if not ignore_existing: + raise KeyError(f"{error_msg} Failed to insert {item}!") + logger.info(f"{error_msg} Skipping {item}...") + continue + object_store.discard(existing_element) + + object_store.add(item) + ret.add(item.id) + elif decoder_.failsafe: + logger.error(f"{error_msg} Skipping it...") + else: + raise TypeError(error_msg) return ret diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index 70308891b..47dee3488 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -20,7 +20,7 @@ root = ".." # Defines the path to the root of the repository version_file = "basyx/version.py" [project] - name = "basyx-python-sdk" +name = "basyx-python-sdk" dynamic = ["version"] description = "The Eclipse BaSyx Python SDK, an implementation of the Asset Administration Shell for Industry 4.0 systems" authors = [ diff --git a/sdk/test/adapter/json/test_json_deserialization.py b/sdk/test/adapter/json/test_json_deserialization.py index 645f2a8ab..0dba6dbdb 100644 --- a/sdk/test/adapter/json/test_json_deserialization.py +++ b/sdk/test/adapter/json/test_json_deserialization.py @@ -55,8 +55,7 @@ def test_file_format_unknown_object(self) -> None: { "x": "foo" } ] }""" - with self.assertRaisesRegex(TypeError, r"\{\s?'x':\s?'foo'\s?\} was in" - r" the wrong list 'submodels'"): + with self.assertRaisesRegex(TypeError, r"submodels.*'foo'"): read_aas_json_file(io.StringIO(data), failsafe=False) with self.assertLogs(logging.getLogger(), level=logging.WARNING) as cm: read_aas_json_file(io.StringIO(data), failsafe=True) From 322671819c14786d7a4acb5d7a5fbe66aebbe07c Mon Sep 17 00:00:00 2001 From: zrgt Date: Tue, 17 Jun 2025 14:34:19 +0200 Subject: [PATCH 41/52] Revert "Remove discovery/registry related code" This reverts commit 115db620e3895712509befb378241fca583b5d06. --- Discovery Server/Dockerfile | 45 ++++ Discovery Server/README.md | 63 ++++++ Discovery Server/app/main.py | 25 +++ Discovery Server/compose.yml | 7 + Discovery Server/entrypoint.sh | 71 ++++++ Discovery Server/stop-supervisor.sh | 8 + Discovery Server/supervisord.ini | 27 +++ Discovery Server/uwsgi.ini | 9 + Registry Server/Dockerfile | 45 ++++ Registry Server/README.md | 97 ++++++++ Registry Server/compose.yml | 7 + Registry Server/entrypoint.sh | 71 ++++++ Registry Server/stop-supervisor.sh | 8 + Registry Server/supervisord.ini | 27 +++ Registry Server/uwsgi.ini | 9 + server/app/adapter/__init__.py | 0 server/app/adapter/jsonization.py | 332 ++++++++++++++++++++++++++++ server/app/interfaces/base.py | 21 +- server/app/interfaces/discovery.py | 215 ++++++++++++++++++ server/app/interfaces/registry.py | 280 +++++++++++++++++++++++ server/app/model/__init__.py | 2 + server/app/model/descriptor.py | 109 +++++++++ server/app/model/endpoint.py | 107 +++++++++ 23 files changed, 1580 insertions(+), 5 deletions(-) create mode 100644 Discovery Server/Dockerfile create mode 100644 Discovery Server/README.md create mode 100644 Discovery Server/app/main.py create mode 100644 Discovery Server/compose.yml create mode 100644 Discovery Server/entrypoint.sh create mode 100644 Discovery Server/stop-supervisor.sh create mode 100644 Discovery Server/supervisord.ini create mode 100644 Discovery Server/uwsgi.ini create mode 100644 Registry Server/Dockerfile create mode 100644 Registry Server/README.md create mode 100644 Registry Server/compose.yml create mode 100644 Registry Server/entrypoint.sh create mode 100644 Registry Server/stop-supervisor.sh create mode 100644 Registry Server/supervisord.ini create mode 100644 Registry Server/uwsgi.ini create mode 100644 server/app/adapter/__init__.py create mode 100644 server/app/adapter/jsonization.py create mode 100644 server/app/interfaces/discovery.py create mode 100644 server/app/interfaces/registry.py create mode 100644 server/app/model/__init__.py create mode 100644 server/app/model/descriptor.py create mode 100644 server/app/model/endpoint.py diff --git a/Discovery Server/Dockerfile b/Discovery Server/Dockerfile new file mode 100644 index 000000000..6dc3c4cac --- /dev/null +++ b/Discovery Server/Dockerfile @@ -0,0 +1,45 @@ +FROM python:3.11-alpine + +LABEL org.label-schema.name="Eclipse BaSyx" \ + org.label-schema.version="1.0" \ + org.label-schema.description="Docker image for the basyx-python-sdk server application" \ + org.label-schema.maintainer="Eclipse BaSyx" + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +# If we have more dependencies for the server it would make sense +# to refactor uswgi to the pyproject.toml +RUN apk update && \ + apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ + pip install uwsgi && \ + pip install --no-cache-dir git+https://github.com/eclipse-basyx/basyx-python-sdk@main#subdirectory=sdk && \ + apk del git bash + + +COPY uwsgi.ini /etc/uwsgi/ +COPY supervisord.ini /etc/supervisor/conf.d/supervisord.ini +COPY stop-supervisor.sh /etc/supervisor/stop-supervisor.sh +RUN chmod +x /etc/supervisor/stop-supervisor.sh + +# Makes it possible to use a different configuration +ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini +# object stores aren't thread-safe yet +# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 +ENV UWSGI_CHEAPER=0 +ENV UWSGI_PROCESSES=1 +ENV NGINX_MAX_UPLOAD=1M +ENV NGINX_WORKER_PROCESSES=1 +ENV LISTEN_PORT=80 +ENV CLIENT_BODY_BUFFER_SIZE=1M + +# Copy the entrypoint that will generate Nginx additional configs +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +COPY ./app /app +WORKDIR /app + +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Discovery Server/README.md b/Discovery Server/README.md new file mode 100644 index 000000000..0b0938167 --- /dev/null +++ b/Discovery Server/README.md @@ -0,0 +1,63 @@ +# Eclipse BaSyx Python SDK - Dicovery Service + +This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Discovery Service**. +It provides basic discovery functionality for AAS IDs and their corresponding assets, as specified in the official [Discovery Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/DiscoveryServiceSpecification/V3.1.0_SSP-001). + +## Overview + +The Discovery Service is a core component in the Asset Administration Shell ecosystem. Its main responsibility is to store and retrieve relations between AAS identifiers and asset identifiers. It acts as a lookup service for resolving asset-related queries to corresponding AAS. + +This implementation supports: + +- Adding links between AAS and assets +- Querying AAS by asset links +- Querying asset links by AAS ID +- Removing AAS-related asset links +- Configurable in-memory or MongoDB-based persistent storage + +## Features + +| Feature | Description | +|---------------------------------------------|-------------------------------------------------------| +| `add_asset_links` | Register specific asset ids linked to an AAS | +| `get_all_specific_asset_ids_by_aas_id` | Retrieve specific asset ids associated with an AAS | +| `search_aas_by_asset_link` | Find AAS identifiers by providing asset link values | +| `remove_asset_links_for_aas` | Delete all asset links associated with a specific AAS | + +## Specification Compliance + +- Complies with: **Discovery Service Specification v3.1.0_SSP-001** + +## Configuration + +The service can be configured to use either: + +- **In-memory storage** (default): Temporary data storage that resets on service restart. +- **MongoDB storage**: Persistent backend storage using MongoDB. + +### Configuration via Environment Variables + +| Variable | Description | Default | +|----------------|--------------------------------------------|-------------------------| +| `STORAGE_TYPE` | `inmemory` or `mongodb` | `inmemory` | +| `MONGODB_URI` | MongoDB connection URI | `mongodb://localhost:27017` | +| `MONGODB_DBNAME` | Name of the MongoDB database | `basyx_registry` | + +## Deployment via Docker + +A `Dockerfile` and `docker-compose.yml` are provided for simple deployment. + +## Acknowledgments + +This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. + +[1]: https://github.com/eclipse-basyx/basyx-python-sdk/pull/238 +[2]: https://basyx-python-sdk.readthedocs.io/en/latest/backend/local_file.html +[3]: https://github.com/eclipse-basyx/basyx-python-sdk +[4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 +[5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 +[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces +[7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx +[8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html +[9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html +[10]: https://github.com/tiangolo/uwsgi-nginx-docker diff --git a/Discovery Server/app/main.py b/Discovery Server/app/main.py new file mode 100644 index 000000000..19c97b416 --- /dev/null +++ b/Discovery Server/app/main.py @@ -0,0 +1,25 @@ +import os +import sys +from basyx.aas.adapter.discovery import DiscoveryAPI, MongoDiscoveryStore,InMemoryDiscoveryStore + +storage_type = os.getenv("STORAGE_TYPE", "inmemory") +base_path = os.getenv("API_BASE_PATH") + +wsgi_optparams = {} + +if base_path is not None: + wsgi_optparams["base_path"] = base_path + +if storage_type == "inmemory": + application = DiscoveryAPI(InMemoryDiscoveryStore(), **wsgi_optparams) + +elif storage_type in "mongodb": + uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") + dbname = os.getenv("MONGODB_DBNAME", "basyx_registry") + + application = DiscoveryAPI(MongoDiscoveryStore(uri,dbname), **wsgi_optparams) + +else: + print(f"STORAGE_TYPE must be either inmemory or mongodb! Current value: {storage_type}", + file=sys.stderr) + diff --git a/Discovery Server/compose.yml b/Discovery Server/compose.yml new file mode 100644 index 000000000..6e1d65404 --- /dev/null +++ b/Discovery Server/compose.yml @@ -0,0 +1,7 @@ +services: + app: + build: . + ports: + - "8084:80" + environment: + STORAGE_TYPE: inmemory diff --git a/Discovery Server/entrypoint.sh b/Discovery Server/entrypoint.sh new file mode 100644 index 000000000..722394409 --- /dev/null +++ b/Discovery Server/entrypoint.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env sh +set -e + +# Get the maximum upload file size for Nginx, default to 0: unlimited +USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} + +# Get the number of workers for Nginx, default to 1 +USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} + +# Set the max number of connections per worker for Nginx, if requested +# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below +NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} + +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +# Get the client_body_buffer_size for Nginx, default to 1M +USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} + +# Create the conf.d directory if it doesn't exist +if [ ! -d /etc/nginx/conf.d ]; then + mkdir -p /etc/nginx/conf.d +fi + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content='user nginx;\n' + # Set the number of worker processes in Nginx + content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" + content=$content'error_log /var/log/nginx/error.log warn;\n' + content=$content'pid /var/run/nginx.pid;\n' + content=$content'events {\n' + content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" + content=$content'}\n' + content=$content'http {\n' + content=$content' include /etc/nginx/mime.types;\n' + content=$content' default_type application/octet-stream;\n' + content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" + content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" + content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" + content=$content' access_log /var/log/nginx/access.log main;\n' + content=$content' sendfile on;\n' + content=$content' keepalive_timeout 65;\n' + content=$content' include /etc/nginx/conf.d/*.conf;\n' + content=$content'}\n' + content=$content'daemon off;\n' + # Set the max number of open file descriptors for Nginx workers, if requested + if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then + content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" + fi + # Save generated /etc/nginx/nginx.conf + printf "$content" > /etc/nginx/nginx.conf + + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server' location / {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf + + # # Generate additional configuration + printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf + printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf + printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf +fi + +exec "$@" diff --git a/Discovery Server/stop-supervisor.sh b/Discovery Server/stop-supervisor.sh new file mode 100644 index 000000000..9a953c94b --- /dev/null +++ b/Discovery Server/stop-supervisor.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +printf "READY\n" + +while read line; do + echo "Processing Event: $line" >&2 + kill $PPID +done < /dev/stdin diff --git a/Discovery Server/supervisord.ini b/Discovery Server/supervisord.ini new file mode 100644 index 000000000..d73d98014 --- /dev/null +++ b/Discovery Server/supervisord.ini @@ -0,0 +1,27 @@ +[supervisord] +nodaemon=true + +[program:uwsgi] +command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[program:nginx] +command=/usr/sbin/nginx +stdout_logfile=/var/log/nginx.out.log +stdout_logfile_maxbytes=0 +stderr_logfile=/var/log/nginx.err.log +stderr_logfile_maxbytes=0 +stopsignal=QUIT +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[eventlistener:quit_on_failure] +events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL +command=/etc/supervisor/stop-supervisor.sh diff --git a/Discovery Server/uwsgi.ini b/Discovery Server/uwsgi.ini new file mode 100644 index 000000000..9c54ae1cc --- /dev/null +++ b/Discovery Server/uwsgi.ini @@ -0,0 +1,9 @@ +[uwsgi] +wsgi-file = /app/main.py +socket = /tmp/uwsgi.sock +chown-socket = nginx:nginx +chmod-socket = 664 +hook-master-start = unix_signal:15 gracefully_kill_them_all +need-app = true +die-on-term = true +show-config = false diff --git a/Registry Server/Dockerfile b/Registry Server/Dockerfile new file mode 100644 index 000000000..3d52a15ab --- /dev/null +++ b/Registry Server/Dockerfile @@ -0,0 +1,45 @@ +FROM python:3.11-alpine + +LABEL org.label-schema.name="Eclipse BaSyx" \ + org.label-schema.version="1.0" \ + org.label-schema.description="Docker image for the basyx-python-sdk server application" \ + org.label-schema.maintainer="Eclipse BaSyx" + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +# If we have more dependencies for the server it would make sense +# to refactor uswgi to the pyproject.toml +RUN apk update && \ + apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ + pip install uwsgi && \ + pip install --no-cache-dir git+https://github.com/eclipse-basyx/basyx-python-sdk@main#subdirectory=sdk && \ + apk del git bash + + +COPY uwsgi.ini /etc/uwsgi/ +COPY supervisord.ini /etc/supervisor/conf.d/supervisord.ini +COPY stop-supervisor.sh /etc/supervisor/stop-supervisor.sh +RUN chmod +x /etc/supervisor/stop-supervisor.sh + +# Makes it possible to use a different configuration +ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini +# object stores aren't thread-safe yet +# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 +ENV UWSGI_CHEAPER=0 +ENV UWSGI_PROCESSES=1 +ENV NGINX_MAX_UPLOAD=1M +ENV NGINX_WORKER_PROCESSES=1 +ENV LISTEN_PORT=80 +ENV CLIENT_BODY_BUFFER_SIZE=1M + +# Copy the entrypoint that will generate Nginx additional configs +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +COPY ../server/app /app +WORKDIR /app + +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Registry Server/README.md b/Registry Server/README.md new file mode 100644 index 000000000..339226c53 --- /dev/null +++ b/Registry Server/README.md @@ -0,0 +1,97 @@ +# Eclipse BaSyx Python SDK - HTTP Server + +This package contains a Dockerfile to spin up an exemplary HTTP/REST server following the [Specification of the AAS Part 2 API][6] with ease. +The server currently implements the following interfaces: + +- [Asset Administration Shell Repository Service][4] +- [Submodel Repository Service][5] + +It uses the [HTTP API][1] and the [AASX][7], [JSON][8], and [XML][9] Adapters of the [BaSyx Python SDK][3], to serve regarding files from a given directory. +The files are only read, changes won't persist. + +Alternatively, the container can also be told to use the [Local-File Backend][2] instead, which stores AAS and Submodels as individual JSON files and allows for persistent changes (except supplementary files, i.e. files referenced by `File` submodel elements). +See [below](#options) on how to configure this. + +## Building +The container image can be built via: +``` +$ docker buildx build -t basyx-python-sdk-http-server . +``` + +## Running + +### Storage +The container needs to be provided with the directory `/storage` to store AAS and Submodel files: AASX, JSON, XML or JSON files of Local-File Backend. + +This directory can be mapped via the `-v` option from another image or a local directory. +To map the directory `storage` inside the container, `-v ./storage:/storage` can be used. +The directory `storage` will be created in the current working directory, if it doesn't already exist. + +### Port +The HTTP server inside the container listens on port 80 by default. +To expose it on the host on port 8080, use the option `-p 8080:80` when running it. + +### Options +The container can be configured via environment variables: +- `API_BASE_PATH` determines the base path under which all other API paths are made available. + Default: `/api/v3.0` +- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: + - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve AASX, JSON, XML files from the storage directory. + The files are not modified, all changes done via the API are only stored in memory. + - When instead set to `LOCAL_FILE`, the server makes use of the [LocalFileBackend][2], where AAS and Submodels are persistently stored as JSON files. + Supplementary files, i.e. files referenced by `File` submodel elements, are not stored in this case. +- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. + +### Running Examples + +Putting it all together, the container can be started via the following command: +``` +$ docker run -p 8080:80 -v ./storage:/storage basyx-python-sdk-http-server +``` + +Since Windows uses backslashes instead of forward slashes in paths, you'll have to adjust the path to the storage directory there: +``` +> docker run -p 8080:80 -v .\storage:/storage basyx-python-sdk-http-server +``` + +Per default, the server will use the `LOCAL_FILE_READ_ONLY` storage type and serve the API under `/api/v3.0` and read files from `/storage`. If you want to change this, you can do so like this: +``` +$ docker run -p 8080:80 -v ./storage2:/storage2 -e API_BASE_PATH=/api/v3.1 -e STORAGE_TYPE=LOCAL_FILE_BACKEND -e STORAGE_PATH=/storage2 basyx-python-sdk-http-server +``` + +## Building and running the image with docker-compose + +The container image can also be built and run via: +``` +$ docker compose up +``` + +This is the exemplary `docker-compose` file for the server: +````yaml +services: + app: + build: . + ports: + - "8080:80" + volumes: + - ./storage:/storage + +```` + +Here files are read from `/storage` and the server can be accessed at http://localhost:8080/api/v3.0/ from your host system. +To get a different setup this compose.yaml file can be adapted and expanded. + +## Acknowledgments + +This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. + +[1]: https://github.com/eclipse-basyx/basyx-python-sdk/pull/238 +[2]: https://basyx-python-sdk.readthedocs.io/en/latest/backend/local_file.html +[3]: https://github.com/eclipse-basyx/basyx-python-sdk +[4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 +[5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 +[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces +[7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx +[8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html +[9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html +[10]: https://github.com/tiangolo/uwsgi-nginx-docker diff --git a/Registry Server/compose.yml b/Registry Server/compose.yml new file mode 100644 index 000000000..932422dbc --- /dev/null +++ b/Registry Server/compose.yml @@ -0,0 +1,7 @@ +services: + app: + build: . + ports: + - "8083:80" + volumes: + - ./storage:/storage diff --git a/Registry Server/entrypoint.sh b/Registry Server/entrypoint.sh new file mode 100644 index 000000000..722394409 --- /dev/null +++ b/Registry Server/entrypoint.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env sh +set -e + +# Get the maximum upload file size for Nginx, default to 0: unlimited +USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} + +# Get the number of workers for Nginx, default to 1 +USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} + +# Set the max number of connections per worker for Nginx, if requested +# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below +NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} + +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +# Get the client_body_buffer_size for Nginx, default to 1M +USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} + +# Create the conf.d directory if it doesn't exist +if [ ! -d /etc/nginx/conf.d ]; then + mkdir -p /etc/nginx/conf.d +fi + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content='user nginx;\n' + # Set the number of worker processes in Nginx + content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" + content=$content'error_log /var/log/nginx/error.log warn;\n' + content=$content'pid /var/run/nginx.pid;\n' + content=$content'events {\n' + content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" + content=$content'}\n' + content=$content'http {\n' + content=$content' include /etc/nginx/mime.types;\n' + content=$content' default_type application/octet-stream;\n' + content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" + content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" + content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" + content=$content' access_log /var/log/nginx/access.log main;\n' + content=$content' sendfile on;\n' + content=$content' keepalive_timeout 65;\n' + content=$content' include /etc/nginx/conf.d/*.conf;\n' + content=$content'}\n' + content=$content'daemon off;\n' + # Set the max number of open file descriptors for Nginx workers, if requested + if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then + content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" + fi + # Save generated /etc/nginx/nginx.conf + printf "$content" > /etc/nginx/nginx.conf + + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server' location / {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf + + # # Generate additional configuration + printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf + printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf + printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf +fi + +exec "$@" diff --git a/Registry Server/stop-supervisor.sh b/Registry Server/stop-supervisor.sh new file mode 100644 index 000000000..9a953c94b --- /dev/null +++ b/Registry Server/stop-supervisor.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +printf "READY\n" + +while read line; do + echo "Processing Event: $line" >&2 + kill $PPID +done < /dev/stdin diff --git a/Registry Server/supervisord.ini b/Registry Server/supervisord.ini new file mode 100644 index 000000000..d73d98014 --- /dev/null +++ b/Registry Server/supervisord.ini @@ -0,0 +1,27 @@ +[supervisord] +nodaemon=true + +[program:uwsgi] +command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[program:nginx] +command=/usr/sbin/nginx +stdout_logfile=/var/log/nginx.out.log +stdout_logfile_maxbytes=0 +stderr_logfile=/var/log/nginx.err.log +stderr_logfile_maxbytes=0 +stopsignal=QUIT +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[eventlistener:quit_on_failure] +events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL +command=/etc/supervisor/stop-supervisor.sh diff --git a/Registry Server/uwsgi.ini b/Registry Server/uwsgi.ini new file mode 100644 index 000000000..9c54ae1cc --- /dev/null +++ b/Registry Server/uwsgi.ini @@ -0,0 +1,9 @@ +[uwsgi] +wsgi-file = /app/main.py +socket = /tmp/uwsgi.sock +chown-socket = nginx:nginx +chmod-socket = 664 +hook-master-start = unix_signal:15 gracefully_kill_them_all +need-app = true +die-on-term = true +show-config = false diff --git a/server/app/adapter/__init__.py b/server/app/adapter/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py new file mode 100644 index 000000000..cd5f01a76 --- /dev/null +++ b/server/app/adapter/jsonization.py @@ -0,0 +1,332 @@ +from typing import Dict, Set, Optional, Type + +import server.app.model as server_model +from basyx.aas import model +from basyx.aas.adapter import _generic +from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, PathOrIO +from basyx.aas.adapter.json import AASToJsonEncoder +from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES, \ + read_aas_json_file_into + +import logging +from typing import Callable + +logger = logging.getLogger(__name__) + +JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES + ( + ('assetAdministrationShellDescriptors', server_model.AssetAdministrationShellDescriptor), + ('submodelDescriptors', server_model.SubmodelDescriptor) +) + + +class ServerAASFromJsonDecoder(AASFromJsonDecoder): + @classmethod + def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], object]]: + aas_class_parsers = super()._get_aas_class_parsers() + aas_class_parsers.update({ + 'AssetAdministrationShellDescriptor': cls._construct_asset_administration_shell_descriptor, + 'SubmodelDescriptor': cls._construct_submodel_descriptor, + 'AssetLink': cls._construct_asset_link, + 'ProtocolInformation': cls._construct_protocol_information, + 'Endpoint': cls._construct_endpoint + }) + return aas_class_parsers + + # ################################################################################################## + # Utility Methods used in constructor methods to add general attributes (from abstract base classes) + # ################################################################################################## + + @classmethod + def _amend_abstract_attributes(cls, obj: object, dct: Dict[str, object]) -> None: + super()._amend_abstract_attributes(obj, dct) + + if isinstance(obj, server_model.Descriptor): + if 'description' in dct: + obj.description = cls._construct_lang_string_set(_get_ts(dct, 'description', list), + model.MultiLanguageTextType) + if 'displayName' in dct: + obj.display_name = cls._construct_lang_string_set(_get_ts(dct, 'displayName', list), + model.MultiLanguageNameType) + if 'extensions' in dct: + for extension in _get_ts(dct, 'extensions', list): + obj.extension.add(cls._construct_extension(extension)) + + @classmethod + def _construct_asset_administration_shell_descriptor( + cls, dct: Dict[str, object], + object_class=server_model.AssetAdministrationShellDescriptor) -> server_model.AssetAdministrationShellDescriptor: + ret = object_class(id_=_get_ts(dct, 'id', str)) + cls._amend_abstract_attributes(ret, dct) + if 'administration' in dct: + ret.administration = cls._construct_administrative_information(_get_ts(dct, 'administration', dict)) + if 'assetKind' in dct: + ret.asset_kind = ASSET_KIND_INVERSE[_get_ts(dct, 'assetKind', str)] + if 'assetType' in dct: + ret.asset_type = _get_ts(dct, 'assetType', str) + global_asset_id = None + if 'globalAssetId' in dct: + ret.global_asset_id = _get_ts(dct, 'globalAssetId', str) + specific_asset_id = set() + if 'specificAssetIds' in dct: + for desc_data in _get_ts(dct, "specificAssetIds", list): + specific_asset_id.add(cls._construct_specific_asset_id(desc_data, model.SpecificAssetId)) + if 'endpoints' in dct: + for endpoint_dct in _get_ts(dct, 'endpoints', list): + if 'protocolInformation' in endpoint_dct: + ret.endpoints.append( + cls._construct_endpoint(endpoint_dct, + server_model.Endpoint)) + elif 'href' in endpoint_dct: + protocol_info = server_model.ProtocolInformation( + href=_get_ts(endpoint_dct['href'], 'href', str), + endpoint_protocol=_get_ts(endpoint_dct['href'], + 'endpointProtocol', + str) if 'endpointProtocol' in + endpoint_dct[ + 'href'] else None, + endpoint_protocol_version=_get_ts( + endpoint_dct['href'], + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in + endpoint_dct['href'] else None + ) + ret.endpoints.append(server_model.Endpoint( + protocol_information=protocol_info, + interface=_get_ts(endpoint_dct, 'interface', + str))) + if 'idShort' in dct: + ret.id_short = _get_ts(dct, 'idShort', str) + if 'submodelDescriptors' in dct: + ret.submodel_descriptors = cls._construct_submodel_descriptor(_get_ts(dct, 'submodelDescriptors', list), + server_model.SubmodelDescriptor) + return ret + + @classmethod + def _construct_protocol_information(cls, dct: Dict[str, object], + object_class=server_model.ProtocolInformation) -> server_model.ProtocolInformation: + ret = object_class( + href=_get_ts(dct, 'href', str), + endpoint_protocol=_get_ts(dct, 'endpointProtocol', + str) if 'endpointProtocol' in dct else None, + endpoint_protocol_version=_get_ts(dct, + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in dct else None, + subprotocol=_get_ts(dct, 'subprotocol', + str) if 'subprotocol' in dct else None, + subprotocol_body=_get_ts(dct, 'subprotocolBody', + str) if 'subprotocolBody' in dct else None, + subprotocol_body_encoding=_get_ts(dct, + 'subprotocolBodyEncoding', + str) if 'subprotocolBodyEncoding' in dct else None + ) + return ret + + @classmethod + def _construct_endpoint(cls, dct: Dict[str, object], + object_class=server_model.Endpoint) -> server_model.Endpoint: + ret = object_class( + protocol_information=cls._construct_protocol_information( + _get_ts(dct, 'protocolInformation', dict), + server_model.ProtocolInformation + ), + interface=_get_ts(dct, 'interface', + str) + ) + cls._amend_abstract_attributes(ret, dct) + return ret + + @classmethod + def _construct_submodel_descriptor( + cls, dct: Dict[str, object], + object_class=server_model.SubmodelDescriptor) -> server_model.SubmodelDescriptor: + ret = object_class(id_=_get_ts(dct, 'id', str), + endpoints=[]) + cls._amend_abstract_attributes(ret, dct) + for endpoint_dct in _get_ts(dct, 'endpoints', list): + if 'protocolInformation' in endpoint_dct: + ret.endpoints.append( + cls._construct_endpoint(endpoint_dct, + server_model.Endpoint)) + elif 'href' in endpoint_dct: + protocol_info = server_model.ProtocolInformation( + href=_get_ts(endpoint_dct['href'], 'href', str), + endpoint_protocol=_get_ts(endpoint_dct['href'], + 'endpointProtocol', + str) if 'endpointProtocol' in + endpoint_dct[ + 'href'] else None, + endpoint_protocol_version=_get_ts( + endpoint_dct['href'], + 'endpointProtocolVersion', + list) if 'endpointProtocolVersion' in + endpoint_dct['href'] else None + ) + ret.endpoints.append(server_model.Endpoint( + protocol_information=protocol_info, + interface=_get_ts(endpoint_dct, 'interface', + str))) + if 'administration' in dct: + ret.administration = cls._construct_administrative_information( + _get_ts(dct, 'administration', dict)) + if 'idShort' in dct: + ret.id_short = _get_ts(dct, 'idShort', str) + if 'semanticId' in dct: + ret.semantic_id = cls._construct_reference(_get_ts(dct, 'semanticId', dict)) + if 'supplementalSemanticIds' in dct: + for ref in _get_ts(dct, 'supplementalSemanticIds', list): + ret.supplemental_semantic_id.append(cls._construct_reference(ref)) + return ret + + @classmethod + def _construct_asset_link( + cls, dct: Dict[str, object], object_class=server_model.AssetLink) -> server_model.AssetLink: + ret = object_class(name=_get_ts(dct, 'name', str), + value=_get_ts(dct, 'value', str)) + return ret + + +class ServerStrictAASFromJsonDecoder(ServerAASFromJsonDecoder): + """ + A strict version of the AASFromJsonDecoder class for deserializing Asset Administration Shell data from the + official JSON format + + This version has set ``failsafe = False``, which will lead to Exceptions raised for every missing attribute or wrong + object type. + """ + failsafe = False + + +class ServerStrippedAASFromJsonDecoder(ServerAASFromJsonDecoder): + """ + Decoder for stripped JSON objects. Used in the HTTP adapter. + """ + stripped = True + + +class ServerStrictStrippedAASFromJsonDecoder(ServerStrictAASFromJsonDecoder, ServerStrippedAASFromJsonDecoder): + """ + Non-failsafe decoder for stripped JSON objects. + """ + pass + + +def read_server_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, + replace_existing: bool = False, + ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, + decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: + return read_aas_json_file_into(object_store=object_store, file=file, replace_existing=replace_existing, + ignore_existing=ignore_existing, failsafe=failsafe, stripped=stripped, + decoder=decoder, keys_to_types=JSON_SERVER_AAS_TOP_LEVEL_KEYS_TO_TYPES) + + +class ServerAASToJsonEncoder(AASToJsonEncoder): + + @classmethod + def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: + serializers = super()._get_aas_class_serializers() + serializers.update({ + server_model.AssetAdministrationShellDescriptor: cls._asset_administration_shell_descriptor_to_json, + server_model.SubmodelDescriptor: cls._submodel_descriptor_to_json, + server_model.Endpoint: cls._endpoint_to_json, + server_model.ProtocolInformation: cls._protocol_information_to_json, + server_model.AssetLink: cls._asset_link_to_json + }) + return serializers + + @classmethod + def _abstract_classes_to_json(cls, obj: object) -> Dict[str, object]: + data: Dict[str, object] = super()._abstract_classes_to_json(obj) + if isinstance(obj, server_model.Descriptor): + if obj.description: + data['description'] = obj.description + if obj.display_name: + data['displayName'] = obj.display_name + if obj.extension: + data['extensions'] = list(obj.extension) + return data + + @classmethod + def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetAdministrationShellDescriptor) -> \ + Dict[str, object]: + """ + serialization of an object from class AssetAdministrationShell to json + + :param obj: object of class AssetAdministrationShell + :return: dict with the serialized attributes of this object + """ + data = cls._abstract_classes_to_json(obj) + data.update(cls._namespace_to_json(obj)) + data['id'] = obj.id + if obj.administration: + data['administration'] = obj.administration + if obj.asset_kind: + data['assetKind'] = _generic.ASSET_KIND[obj.asset_kind] + if obj.asset_type: + data['assetType'] = obj.asset_type + if obj.global_asset_id: + data['globalAssetId'] = obj.global_asset_id + if obj.specific_asset_id: + data['specificAssetIds'] = list(obj.specific_asset_id) + if obj.endpoints: + data['endpoints'] = list(obj.endpoints) + if obj.id_short: + data['idShort'] = obj.id_short + if obj.submodel_descriptors: + data['submodelDescriptors'] = list(obj.submodel_descriptors) + return data + + @classmethod + def _protocol_information_to_json(cls, + obj: server_model.ProtocolInformation) -> \ + Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + + data['href'] = obj.href + if obj.endpoint_protocol: + data['endpointProtocol'] = obj.endpoint_protocol + if obj.endpoint_protocol_version: + data['endpointProtocolVersion'] = obj.endpoint_protocol_version + if obj.subprotocol: + data['subprotocol'] = obj.subprotocol + if obj.subprotocol_body: + data['subprotocolBody'] = obj.subprotocol_body + if obj.subprotocol_body_encoding: + data['subprotocolBodyEncoding'] = obj.subprotocol_body_encoding + return data + + @classmethod + def _endpoint_to_json(cls, obj: server_model.Endpoint) -> Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + data['protocolInformation'] = cls._protocol_information_to_json( + obj.protocol_information) + data['interface'] = obj.interface + return data + + @classmethod + def _submodel_descriptor_to_json(cls, obj: server_model.SubmodelDescriptor) -> Dict[str, object]: + """ + serialization of an object from class Submodel to json + + :param obj: object of class Submodel + :return: dict with the serialized attributes of this object + """ + data = cls._abstract_classes_to_json(obj) + data['id'] = obj.id + data['endpoints'] = [cls._endpoint_to_json(ep) for ep in + obj.endpoints] + if obj.id_short: + data['idShort'] = obj.id_short + if obj.administration: + data['administration'] = obj.administration + if obj.semantic_id: + data['semanticId'] = obj.semantic_id + if obj.supplemental_semantic_id: + data['supplementalSemanticIds'] = list(obj.supplemental_semantic_id) + return data + + @classmethod + def _asset_link_to_json(cls, obj: server_model.AssetLink) -> Dict[str, object]: + data = cls._abstract_classes_to_json(obj) + data['name'] = obj.name + data['value'] = obj.value + return data diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index ae5bb6506..04100051e 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -22,9 +22,11 @@ from basyx.aas import model from basyx.aas.adapter._generic import XML_NS_MAP -from basyx.aas.adapter.json import StrictStrippedAASFromJsonDecoder, StrictAASFromJsonDecoder, AASToJsonEncoder from basyx.aas.adapter.xml import xml_serialization, XMLConstructables, read_aas_xml_element from basyx.aas.model import AbstractObjectStore +from server.app import model as server_model +from server.app.adapter.jsonization import ServerAASToJsonEncoder, ServerStrictAASFromJsonDecoder, \ + ServerStrictStrippedAASFromJsonDecoder from server.app.util.converters import base64url_decode @@ -159,7 +161,7 @@ def __init__(self, *args, content_type="text/xml", **kwargs): super().__init__(*args, **kwargs, content_type=content_type) -class ResultToJsonEncoder(AASToJsonEncoder): +class ResultToJsonEncoder(ServerAASToJsonEncoder): @classmethod def _result_to_json(cls, result: Result) -> Dict[str, object]: return { @@ -292,7 +294,12 @@ class HTTPApiDecoder: @classmethod def check_type_support(cls, type_: type): - if type_ not in cls.type_constructables_map: + tolerated_types = ( + server_model.AssetAdministrationShellDescriptor, + server_model.SubmodelDescriptor, + server_model.AssetLink, + ) + if type_ not in cls.type_constructables_map and type_ not in tolerated_types: raise TypeError(f"Parsing {type_} is not supported!") @classmethod @@ -304,8 +311,8 @@ def assert_type(cls, obj: object, type_: Type[T]) -> T: @classmethod def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: cls.check_type_support(expect_type) - decoder: Type[StrictAASFromJsonDecoder] = StrictStrippedAASFromJsonDecoder if stripped \ - else StrictAASFromJsonDecoder + decoder: Type[ServerStrictAASFromJsonDecoder] = ServerStrictStrippedAASFromJsonDecoder if stripped \ + else ServerStrictAASFromJsonDecoder try: parsed = json.loads(data, cls=decoder) if isinstance(parsed, list) and expect_single: @@ -324,6 +331,10 @@ def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool model.SpecificAssetId: decoder._construct_specific_asset_id, # type: ignore[assignment] model.Reference: decoder._construct_reference, # type: ignore[assignment] model.Qualifier: decoder._construct_qualifier, # type: ignore[assignment] + server_model.AssetAdministrationShellDescriptor: + decoder._construct_asset_administration_shell_descriptor, # type: ignore[assignment] + server_model.SubmodelDescriptor: decoder._construct_submodel_descriptor, # type: ignore[assignment] + server_model.AssetLink: decoder._construct_asset_link, # type: ignore[assignment] } constructor: Optional[Callable[..., T]] = mapping.get(expect_type) diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py new file mode 100644 index 000000000..b08448b06 --- /dev/null +++ b/server/app/interfaces/discovery.py @@ -0,0 +1,215 @@ +""" +This module implements the Discovery interface defined in the 'Specification of the Asset Administration Shell Part 2 – Application Programming Interface'. +""" + +import abc +import json +from typing import Dict, List, Set, Any + +import werkzeug.exceptions +from pymongo import MongoClient +from pymongo.collection import Collection +from werkzeug.routing import Rule, Submount +from werkzeug.wrappers import Request, Response + +from basyx.aas import model +from server.app.util.converters import Base64URLConverter +from server.app.interfaces.base import BaseWSGIApp, HTTPApiDecoder +from .. import model as server_model +from ..adapter.jsonization import ServerAASToJsonEncoder + +encoder=ServerAASToJsonEncoder() + +class AbstractDiscoveryStore(metaclass=abc.ABCMeta): + aas_id_to_asset_ids: Any + asset_id_to_aas_ids: Any + + @abc.abstractmethod + def __init__(self): + pass + + @abc.abstractmethod + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + pass + + @abc.abstractmethod + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: + pass + + @abc.abstractmethod + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + pass + + @abc.abstractmethod + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: + pass + + @abc.abstractmethod + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_identifier: model.Identifier) -> None: + pass + + @abc.abstractmethod + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + pass + + + +class InMemoryDiscoveryStore(AbstractDiscoveryStore): + def __init__(self): + self.aas_id_to_asset_ids: Dict[model.Identifier, Set[model.SpecificAssetId]] = {} + self.asset_id_to_aas_ids: Dict[model.SpecificAssetId, Set[model.Identifier]] = {} + + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + return list(self.aas_id_to_asset_ids.get(aas_id, set())) + + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, + asset_ids: List[model.SpecificAssetId]) -> None: + serialized_assets = [encoder.default(asset_id) for asset_id in asset_ids] + if aas_id in self.aas_id_to_asset_ids: + for asset in serialized_assets: + if asset not in self.aas_id_to_asset_ids[aas_id]: + self.aas_id_to_asset_ids[aas_id].append(asset) + else: + self.aas_id_to_asset_ids[aas_id] = serialized_assets[:] + + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + key = aas_id + if key in self.aas_id_to_asset_ids: + del self.aas_id_to_asset_ids[key] + + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: + result = [] + for asset_key, aas_ids in self.asset_id_to_aas_ids.items(): + expected_key = f"{asset_link.name}:{asset_link.value}" + if asset_key == expected_key: + result.extend(list(aas_ids)) + return result + + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = f"{asset_id.name}:{asset_id.value}" + if asset_key in self.asset_id_to_aas_ids: + self.asset_id_to_aas_ids[asset_key].add(aas_id) + else: + self.asset_id_to_aas_ids[asset_key] = {aas_id} + + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = f"{asset_id.name}:{asset_id.value}" + if asset_key in self.asset_id_to_aas_ids: + self.asset_id_to_aas_ids[asset_key].discard(aas_id) + + +class MongoDiscoveryStore(AbstractDiscoveryStore): + def __init__(self, + uri: str = "mongodb://localhost:27017", + db_name: str = "basyx", + coll_aas_to_assets: str = "aas_to_assets", + coll_asset_to_aas: str = "asset_to_aas"): + self.client = MongoClient(uri) + self.db = self.client[db_name] + self.coll_aas_to_assets: Collection = self.db[coll_aas_to_assets] + self.coll_asset_to_aas: Collection = self.db[coll_asset_to_aas] + # Create an index for fast asset reverse lookups. + self.coll_asset_to_aas.create_index("_id") + + def get_all_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> List[model.SpecificAssetId]: + key = aas_id + doc = self.coll_aas_to_assets.find_one({"_id": key}) + return doc["asset_ids"] if doc and "asset_ids" in doc else [] + + def add_specific_asset_ids_to_aas(self, aas_id: model.Identifier, asset_ids: List[model.SpecificAssetId]) -> None: + key = aas_id + # Convert each SpecificAssetId using the serialization helper. + serializable_assets = [encoder.default(asset_id) for asset_id in asset_ids] + self.coll_aas_to_assets.update_one( + {"_id": key}, + {"$addToSet": {"asset_ids": {"$each": serializable_assets}}}, + upsert=True + ) + + def delete_specific_asset_ids_by_aas_id(self, aas_id: model.Identifier) -> None: + key = aas_id + self.coll_aas_to_assets.delete_one({"_id": key}) + + def search_aas_ids_by_asset_link(self, asset_link: server_model.AssetLink) -> List[model.Identifier]: + # Query MongoDB for specificAssetIds where 'name' and 'value' match + doc = self.coll_asset_to_aas.find_one({ + "name": asset_link.name, + "value": asset_link.value + }) + return doc["aas_ids"] if doc and "aas_ids" in doc else [] + + def _add_aas_id_to_specific_asset_id(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = str(encoder.default(asset_id)) + self.coll_asset_to_aas.update_one( + {"_id": asset_key}, + {"$addToSet": {"aas_ids": aas_id}}, + upsert=True + ) + + def _delete_aas_id_from_specific_asset_ids(self, asset_id: model.SpecificAssetId, aas_id: model.Identifier) -> None: + asset_key = str(encoder.default(asset_id)) + self.coll_asset_to_aas.update_one( + {"_id": asset_key}, + {"$pull": {"aas_ids": aas_id}} + ) + + +class DiscoveryAPI(BaseWSGIApp): + def __init__(self, + persistent_store: AbstractDiscoveryStore, base_path: str = "/api/v3.0"): + self.persistent_store: AbstractDiscoveryStore = persistent_store + self.url_map = werkzeug.routing.Map([ + Submount(base_path, [ + Rule("/lookup/shellsByAssetLink", methods=["POST"], + endpoint=self.search_all_aas_ids_by_asset_link), + Submount("/lookup/shells", [ + Rule("/", methods=["GET"], + endpoint=self.get_all_specific_asset_ids_by_aas_id), + Rule("/", methods=["POST"], + endpoint=self.post_all_asset_links_by_id), + Rule("/", methods=["DELETE"], + endpoint=self.delete_all_asset_links_by_id), + ]), + ]) + ], converters={ + "base64url": Base64URLConverter + }, strict_slashes=False) + + def search_all_aas_ids_by_asset_link(self, request: Request, url_args: dict, response_t: type, + **_kwargs) -> Response: + asset_links = HTTPApiDecoder.request_body_list(request, server_model.AssetLink, False) + matching_aas_keys = set() + for asset_link in asset_links: + aas_keys = self.persistent_store.search_aas_ids_by_asset_link(asset_link) + matching_aas_keys.update(aas_keys) + matching_aas_keys = list(matching_aas_keys) + paginated_slice, cursor = self._get_slice(request, matching_aas_keys) + return response_t(list(paginated_slice), cursor=cursor) + + def get_all_specific_asset_ids_by_aas_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + asset_ids = self.persistent_store.get_all_specific_asset_ids_by_aas_id(aas_identifier) + return response_t(asset_ids) + + def post_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + specific_asset_ids = HTTPApiDecoder.request_body_list(request, model.SpecificAssetId, False) + self.persistent_store.add_specific_asset_ids_to_aas(aas_identifier, specific_asset_ids) + for asset_id in specific_asset_ids: + self.persistent_store._add_aas_id_to_specific_asset_id(asset_id, aas_identifier) + updated = {aas_identifier: self.persistent_store.get_all_specific_asset_ids_by_aas_id(aas_identifier)} + return response_t(updated) + + def delete_all_asset_links_by_id(self, request: Request, url_args: dict, response_t: type, **_kwargs) -> Response: + aas_identifier = url_args.get("aas_id") + self.persistent_store.delete_specific_asset_ids_by_aas_id(aas_identifier) + for key in list(self.persistent_store.asset_id_to_aas_ids.keys()): + self.persistent_store.asset_id_to_aas_ids[key].discard(aas_identifier) + return response_t() + + +if __name__ == "__main__": + from werkzeug.serving import run_simple + + run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), + use_debugger=True, use_reloader=True) diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py new file mode 100644 index 000000000..f33ab1651 --- /dev/null +++ b/server/app/interfaces/registry.py @@ -0,0 +1,280 @@ +""" +This module implements the Registry interface defined in the 'Specification of the Asset Administration Shell Part 2 – Application Programming Interface'. +""" + +from typing import Dict, Iterator, List, Type, Tuple + +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.urls +import werkzeug.utils +from werkzeug.exceptions import Conflict, NotFound +from werkzeug.routing import MapAdapter, Rule, Submount +from werkzeug.wrappers import Request, Response + +import server.app.model as server_model +from basyx.aas import model +from server.app.util.converters import Base64URLConverter +from server.app.interfaces.base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder + + +class RegistryAPI(ObjectStoreWSGIApp): + def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/api/v3.0"): + self.object_store: model.AbstractObjectStore = object_store + self.url_map = werkzeug.routing.Map([ + Submount(base_path, [ + Rule("/shell-descriptors", methods=["GET"], endpoint=self.get_all_aas_descriptors), + Rule("/shell-descriptors", methods=["POST"], endpoint=self.post_aas_descriptor), + Submount("/shell-descriptors", [ + Rule("/", methods=["GET"], endpoint=self.get_aas_descriptor_by_id), + Rule("/", methods=["PUT"], endpoint=self.put_aas_descriptor_by_id), + Rule("/", methods=["DELETE"], endpoint=self.delete_aas_descriptor_by_id), + Submount("/", [ + Rule("/submodel-descriptors", methods=["GET"], + endpoint=self.get_all_submodel_descriptors_through_superpath), + Rule("/submodel-descriptors", methods=["POST"], + endpoint=self.post_submodel_descriptor_through_superpath), + Submount("/submodel-descriptors", [ + Rule("/", methods=["GET"], + endpoint=self.get_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["PUT"], + endpoint=self.put_submodel_descriptor_by_id_through_superpath), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_descriptor_by_id_through_superpath), + ]) + ]) + ]), + Rule("/submodel-descriptors", methods=["GET"], endpoint=self.get_all_submodel_descriptors), + Rule("/submodel-descriptors", methods=["POST"], endpoint=self.post_submodel_descriptor), + Submount("/submodel-descriptors", [ + Rule("/", methods=["GET"], endpoint=self.get_submodel_descriptor_by_id), + Rule("/", methods=["PUT"], endpoint=self.put_submodel_descriptor_by_id), + Rule("/", methods=["DELETE"], + endpoint=self.delete_submodel_descriptor_by_id), + ]) + ]) + ], converters={ + "base64url": Base64URLConverter + }, strict_slashes=False) + + def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ + Iterator[server_model.AssetAdministrationShellDescriptor], int]: + + descriptors: Iterator[server_model.AssetAdministrationShellDescriptor] = self._get_all_obj_of_type( + server_model.AssetAdministrationShellDescriptor + ) + + id_short = request.args.get("idShort") + if id_short is not None: + descriptors = filter(lambda desc: desc.id_short == id_short, descriptors) + + asset_ids = request.args.getlist("assetIds") + if asset_ids is not None: + # Decode and instantiate SpecificAssetIds + specific_asset_ids: List[model.SpecificAssetId] = list( + map(lambda asset_id: HTTPApiDecoder.base64url_json(asset_id, model.SpecificAssetId, False), asset_ids) + ) + # Filter AAS based on these SpecificAssetIds + descriptors = filter( + lambda desc: all(specific_asset_id in desc.specific_asset_id + for specific_asset_id in specific_asset_ids), + descriptors + ) + + paginated_descriptors, end_index = self._get_slice(request, descriptors) + return paginated_descriptors, end_index + + def _get_aas_descriptor(self, url_args: Dict) -> server_model.AssetAdministrationShellDescriptor: + return self._get_obj_ts(url_args["aas_id"], server_model.AssetAdministrationShellDescriptor) + + def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: + submodel_descriptors: Iterator[server_model.SubmodelDescriptor] = self._get_all_obj_of_type(server_model.SubmodelDescriptor) + id_short = request.args.get("idShort") + if id_short is not None: + submodel_descriptors = filter(lambda sm: sm.id_short == id_short, submodel_descriptors) + semantic_id = request.args.get("semanticId") + if semantic_id is not None: + spec_semantic_id = HTTPApiDecoder.base64url_json( + semantic_id, model.Reference, False) # type: ignore[type-abstract] + submodel_descriptors = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodel_descriptors) + paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) + return paginated_submodel_descriptors, end_index + + def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescriptor: + return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) + + # ------ AAS REGISTRY ROUTES ------- + def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + aas_descriptors, cursor = self._get_all_aas_descriptors(request) + return response_t(list(aas_descriptors), cursor=cursor) + + def post_aas_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + map_adapter: MapAdapter) -> Response: + descriptor = HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, False) + try: + self.object_store.add(descriptor) + except KeyError as e: + raise Conflict(f"AssetAdministrationShellDescriptor with Identifier {descriptor.id} already exists!") from e + descriptor.commit() + created_resource_url = map_adapter.build(self.get_aas_descriptor_by_id, { + "aas_id": descriptor.id + }, force_external=True) + return response_t(descriptor, status=201, headers={"Location": created_resource_url}) + + def get_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) + return response_t(descriptor) + + def put_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) + descriptor.update_from(HTTPApiDecoder.request_body(request, server_model.AssetAdministrationShellDescriptor, + is_stripped_request(request))) + descriptor.commit() + return response_t() + + def delete_aas_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + descriptor = self._get_aas_descriptor(url_args) + self.object_store.remove(descriptor) + return response_t() + + def get_all_submodel_descriptors_through_superpath(self, + request: Request, + url_args: Dict, + response_t: Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_descriptors, cursor = self._get_slice(request, + aas_descriptor.submodel_descriptors) + return response_t(list(submodel_descriptors), cursor=cursor) + + def get_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if + sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound( + f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + return response_t(submodel_descriptor) + + def post_submodel_descriptor_through_superpath(self, + request: Request, + url_args: Dict, + response_t: Type[ + APIResponse], + map_adapter: MapAdapter) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_descriptor = HTTPApiDecoder.request_body(request, + server_model.SubmodelDescriptor, + is_stripped_request( + request)) + if any(sd.id == submodel_descriptor.id for sd in + aas_descriptor.submodel_descriptors): + raise Conflict( + f"Submodel Descriptor with Identifier {submodel_descriptor.id} already exists!") + aas_descriptor.submodel_descriptors.append(submodel_descriptor) + aas_descriptor.commit() + created_resource_url = map_adapter.build( + self.get_submodel_descriptor_by_id_through_superpath, { + "aas_id": aas_descriptor.id, + "submodel_id": submodel_descriptor.id + }, force_external=True) + return response_t(submodel_descriptor, status=201, + headers={"Location": created_resource_url}) + + def put_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if + sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound( + f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + submodel_descriptor.update_from( + HTTPApiDecoder.request_body(request, + server_model.SubmodelDescriptor, + is_stripped_request(request))) + aas_descriptor.commit() + return response_t() + + def delete_submodel_descriptor_by_id_through_superpath(self, + request: Request, + url_args: Dict, + response_t: + Type[ + APIResponse], + **_kwargs) -> Response: + aas_descriptor = self._get_aas_descriptor(url_args) + submodel_id = url_args["submodel_id"] + submodel_descriptor = next( + (sd for sd in aas_descriptor.submodel_descriptors if sd.id == submodel_id), None) + if submodel_descriptor is None: + raise NotFound(f"Submodel Descriptor with Identifier {submodel_id} not found in AssetAdministrationShell!") + aas_descriptor.submodel_descriptors.remove(submodel_descriptor) + aas_descriptor.commit() + return response_t() + + # ------ Submodel REGISTRY ROUTES ------- + def get_all_submodel_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + submodel_descriptors, cursor = self._get_all_submodel_descriptors(request) + return response_t(list(submodel_descriptors), cursor=cursor, stripped=is_stripped_request(request)) + + def get_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + submodel_descriptor = self._get_submodel_descriptor(url_args) + return response_t(submodel_descriptor, stripped=is_stripped_request(request)) + + def post_submodel_descriptor(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + map_adapter: MapAdapter) -> Response: + submodel_descriptor = HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, + is_stripped_request(request)) + try: + self.object_store.add(submodel_descriptor) + except KeyError as e: + raise Conflict(f"Submodel Descriptor with Identifier {submodel_descriptor.id} already exists!") from e + submodel_descriptor.commit() + created_resource_url = map_adapter.build(self.get_submodel_descriptor_by_id, { + "submodel_id": submodel_descriptor.id + }, force_external=True) + return response_t(submodel_descriptor, status=201, headers={"Location": created_resource_url}) + + def put_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + submodel_descriptor = self._get_submodel_descriptor(url_args) + submodel_descriptor.update_from( + HTTPApiDecoder.request_body(request, server_model.SubmodelDescriptor, is_stripped_request(request))) + submodel_descriptor.commit() + return response_t() + + def delete_submodel_descriptor_by_id(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + self.object_store.remove(self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor)) + return response_t() + + +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8083, RegistryAPI(create_full_example()), + use_debugger=True, use_reloader=True) diff --git a/server/app/model/__init__.py b/server/app/model/__init__.py new file mode 100644 index 000000000..5736b5492 --- /dev/null +++ b/server/app/model/__init__.py @@ -0,0 +1,2 @@ +from .descriptor import * +from .endpoint import * diff --git a/server/app/model/descriptor.py b/server/app/model/descriptor.py new file mode 100644 index 000000000..38276cd2d --- /dev/null +++ b/server/app/model/descriptor.py @@ -0,0 +1,109 @@ +from __future__ import absolute_import + +import abc +from typing import Optional, Iterable, List + +from basyx.aas import model +from server.app.model.endpoint import Endpoint + + +class Descriptor(model.HasExtension, metaclass=abc.ABCMeta): + @abc.abstractmethod + def __init__(self, description: Optional[model.MultiLanguageTextType] = None, + display_name: Optional[model.MultiLanguageNameType] = None, extension: Iterable[model.Extension] = ()): + super().__init__() + self.description: Optional[model.MultiLanguageTextType] = description + self.display_name: Optional[model.MultiLanguageNameType] = display_name + self.extension = model.NamespaceSet(self, [("name", True)], extension) + + def commit(self): + pass + + def update(self): + pass + + def update_from(self, other: "Descriptor", update_source: bool = False): + """ + Updates the descriptor's attributes from another descriptor. + + :param other: The descriptor to update from. + :param update_source: Placeholder for compatibility; not used in this context. + """ + for attr in vars(other): + if attr == "id": + continue # Skip updating the unique identifier of the AAS + setattr(self, attr, getattr(other, attr)) + + +class SubmodelDescriptor(Descriptor): + + def __init__(self, id_: model.Identifier, endpoints: List[Endpoint], + administration: Optional[model.AdministrativeInformation] = None, + id_short: Optional[model.NameType] = None, semantic_id: Optional[model.Reference] = None, + supplemental_semantic_id: Iterable[model.Reference] = ()): + super().__init__() + self.id: model.Identifier = id_ + self.endpoints: List[Endpoint] = endpoints + self.administration: Optional[model.AdministrativeInformation] = administration + self.id_short: Optional[model.NameType] = id_short + self.semantic_id: Optional[model.Reference] = semantic_id + self.supplemental_semantic_id: model.ConstrainedList[model.Reference] = \ + model.ConstrainedList(supplemental_semantic_id) + + +class AssetAdministrationShellDescriptor(Descriptor): + + def __init__(self, + id_: model.Identifier, + administration: Optional[model.AdministrativeInformation] = None, + asset_kind: Optional[model.AssetKind] = None, + asset_type: Optional[model.Identifier] = None, + endpoints: Optional[List[Endpoint]] = None, + global_asset_id: Optional[model.Identifier] = None, + id_short: Optional[model.NameType] = None, + specific_asset_id: Iterable[model.SpecificAssetId] = (), + submodel_descriptors: Optional[List[SubmodelDescriptor]] = None, + description: Optional[model.MultiLanguageTextType] = None, + display_name: Optional[model.MultiLanguageNameType] = None, + extension: Iterable[model.Extension] = ()): + """AssetAdministrationShellDescriptor - + + Nur das 'id'-Feld (id_) ist zwingend erforderlich. Alle anderen Felder erhalten Defaultwerte. + """ + super().__init__() + self.administration: Optional[model.AdministrativeInformation] = administration + self.asset_kind: Optional[model.AssetKind] = asset_kind + self.asset_type: Optional[model.Identifier] = asset_type + self.endpoints: Optional[ + List[Endpoint]] = endpoints if endpoints is not None else [] # leere Liste, falls nicht gesetzt + self.global_asset_id: Optional[model.Identifier] = global_asset_id + self.id_short: Optional[model.NameType] = id_short + self.id: model.Identifier = id_ + self._specific_asset_id: model.ConstrainedList[model.SpecificAssetId] = model.ConstrainedList( + specific_asset_id, + item_set_hook=self._check_constraint_set_spec_asset_id, + item_del_hook=self._check_constraint_del_spec_asset_id + ) + self.submodel_descriptors = submodel_descriptors if submodel_descriptors is not None else [] + self.description: Optional[model.MultiLanguageTextType] = description + self.display_name: Optional[model.MultiLanguageNameType] = display_name + self.extension = model.NamespaceSet(self, [("name", True)], extension) + + @property + def specific_asset_id(self) -> model.ConstrainedList[model.SpecificAssetId]: + return self._specific_asset_id + + @specific_asset_id.setter + def specific_asset_id(self, specific_asset_id: Iterable[model.SpecificAssetId]) -> None: + # constraints are checked via _check_constraint_set_spec_asset_id() in this case + self._specific_asset_id[:] = specific_asset_id + + def _check_constraint_set_spec_asset_id(self, items_to_replace: List[model.SpecificAssetId], + new_items: List[model.SpecificAssetId], + old_list: List[model.SpecificAssetId]) -> None: + model.AssetInformation._validate_aasd_131(self.global_asset_id, + len(old_list) - len(items_to_replace) + len(new_items) > 0) + + def _check_constraint_del_spec_asset_id(self, _item_to_del: model.SpecificAssetId, + old_list: List[model.SpecificAssetId]) -> None: + model.AssetInformation._validate_aasd_131(self.global_asset_id, len(old_list) > 1) diff --git a/server/app/model/endpoint.py b/server/app/model/endpoint.py new file mode 100644 index 000000000..3be6dc061 --- /dev/null +++ b/server/app/model/endpoint.py @@ -0,0 +1,107 @@ +from __future__ import absolute_import + +import re +from enum import Enum +from typing import Optional, List + +from basyx.aas.model import base + + +class AssetLink: + def __init__(self, name: base.LabelType, value: base.Identifier): + if not name: + raise ValueError("AssetLink 'name' must be a non-empty string.") + if not value: + raise ValueError("AssetLink 'value' must be a non-empty string.") + self.name = name + self.value = value + + +class SecurityTypeEnum(Enum): + NONE = "NONE" + RFC_TLSA = "RFC_TLSA" + W3C_DID = "W3C_DID" + + +class SecurityAttributeObject: + def __init__(self, type_: SecurityTypeEnum, key: str, value: str): + + if not isinstance(type_, SecurityTypeEnum): + raise ValueError(f"Invalid security type: {type_}. Must be one of {list(SecurityTypeEnum)}") + if not key or not isinstance(key, str): + raise ValueError("Key must be a non-empty string.") + if not value or not isinstance(value, str): + raise ValueError("Value must be a non-empty string.") + self.type = type_ + self.key = key + self.value = value + + +class ProtocolInformation: + + def __init__( + self, + href: str, + endpoint_protocol: Optional[str] = None, + endpoint_protocol_version: Optional[List[str]] = None, + subprotocol: Optional[str] = None, + subprotocol_body: Optional[str] = None, + subprotocol_body_encoding: Optional[str] = None, + security_attributes: Optional[List[SecurityAttributeObject]] = None + ): + if not href or not isinstance(href, str): + raise ValueError("href must be a non-empty string representing a valid URL.") + + self.href = href + self.endpoint_protocol = endpoint_protocol + self.endpoint_protocol_version = endpoint_protocol_version or [] + self.subprotocol = subprotocol + self.subprotocol_body = subprotocol_body + self.subprotocol_body_encoding = subprotocol_body_encoding + self.security_attributes = security_attributes or [] + + +class Endpoint: + INTERFACE_SHORTNAMES = { + "AAS", "SUBMODEL", "SERIALIZE", "AASX-FILE", "AAS-REGISTRY", + "SUBMODEL-REGISTRY", "AAS-REPOSITORY", "SUBMODEL-REPOSITORY", + "CD-REPOSITORY", "AAS-DISCOVERY" + } + VERSION_PATTERN = re.compile(r"^\d+(\.\d+)*$") + + def __init__(self, interface: base.NameType, protocol_information: ProtocolInformation): # noqa: E501 + + self.interface = interface + self.protocol_information = protocol_information + + @property + def interface(self) -> str: + return self._interface + + @interface.setter + def interface(self, interface: base.NameType): + if interface is None: + raise ValueError("Invalid value for `interface`, must not be `None`") + if not self.is_valid_interface(interface): + raise ValueError(f"Invalid interface format: {interface}. Expected format: '-', ") + + self._interface = interface + + @classmethod + def is_valid_interface(cls, interface: base.NameType) -> bool: + parts = interface.split("-", 1) + if len(parts) != 2: + return False + short_name, version = parts + return short_name in cls.INTERFACE_SHORTNAMES and cls.VERSION_PATTERN.match(version) + + @property + def protocol_information(self) -> ProtocolInformation: + return self._protocol_information + + @protocol_information.setter + def protocol_information(self, protocol_information: ProtocolInformation): + if protocol_information is None: + raise ValueError("Invalid value for `protocol_information`, must not be `None`") # noqa: E501 + + self._protocol_information = protocol_information From 1268f6a086d60b6a23097426df8b0eb9d654d24a Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Thu, 24 Apr 2025 15:56:07 +0200 Subject: [PATCH 42/52] correct json serialisation for AASDescriptor --- server/app/adapter/jsonization.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index cd5f01a76..92a87fc5a 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -97,8 +97,11 @@ def _construct_asset_administration_shell_descriptor( if 'idShort' in dct: ret.id_short = _get_ts(dct, 'idShort', str) if 'submodelDescriptors' in dct: - ret.submodel_descriptors = cls._construct_submodel_descriptor(_get_ts(dct, 'submodelDescriptors', list), - server_model.SubmodelDescriptor) + for sm_dct in _get_ts(dct, 'submodelDescriptors', list): + ret.submodel_descriptors.append(cls._construct_submodel_descriptor( + sm_dct, + server_model.SubmodelDescriptor + )) return ret @classmethod From 74b64d2bd57425c0d9ad91b1cb391834dd241143 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Fri, 25 Apr 2025 12:58:33 +0200 Subject: [PATCH 43/52] adapt filter options for get_all_aas_descritors and remove filter for get_all_submodels_descriptors --- server/app/interfaces/registry.py | 42 +++++++++++++++---------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index f33ab1651..fbef785a3 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -8,13 +8,13 @@ import werkzeug.routing import werkzeug.urls import werkzeug.utils -from werkzeug.exceptions import Conflict, NotFound +from werkzeug.exceptions import Conflict, NotFound, BadRequest from werkzeug.routing import MapAdapter, Rule, Submount from werkzeug.wrappers import Request, Response import server.app.model as server_model from basyx.aas import model -from server.app.util.converters import Base64URLConverter +from server.app.util.converters import Base64URLConverter, base64url_decode from server.app.interfaces.base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder @@ -64,20 +64,26 @@ def _get_all_aas_descriptors(self, request: "Request") -> Tuple[ server_model.AssetAdministrationShellDescriptor ) - id_short = request.args.get("idShort") - if id_short is not None: - descriptors = filter(lambda desc: desc.id_short == id_short, descriptors) - - asset_ids = request.args.getlist("assetIds") - if asset_ids is not None: - # Decode and instantiate SpecificAssetIds - specific_asset_ids: List[model.SpecificAssetId] = list( - map(lambda asset_id: HTTPApiDecoder.base64url_json(asset_id, model.SpecificAssetId, False), asset_ids) + asset_kind = request.args.get("assetKind") + if asset_kind is not None: + try: + asset_kind = model.AssetKind[asset_kind] + except KeyError: + raise BadRequest(f"Invalid assetKind '{asset_kind}', must be one of {list(model.AssetKind.__members__)}") + descriptors = filter( + lambda desc: desc.asset_kind == asset_kind, + descriptors ) - # Filter AAS based on these SpecificAssetIds + + asset_type = request.args.get("assetType") + if asset_type is not None: + asset_type = base64url_decode(asset_type) + try: + asset_type = model.Identifier(asset_type) + except Exception: + raise BadRequest(f"Invalid assetType: '{asset_type}'") descriptors = filter( - lambda desc: all(specific_asset_id in desc.specific_asset_id - for specific_asset_id in specific_asset_ids), + lambda desc: desc.asset_type == asset_type, descriptors ) @@ -89,14 +95,6 @@ def _get_aas_descriptor(self, url_args: Dict) -> server_model.AssetAdministratio def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[server_model.SubmodelDescriptor], int]: submodel_descriptors: Iterator[server_model.SubmodelDescriptor] = self._get_all_obj_of_type(server_model.SubmodelDescriptor) - id_short = request.args.get("idShort") - if id_short is not None: - submodel_descriptors = filter(lambda sm: sm.id_short == id_short, submodel_descriptors) - semantic_id = request.args.get("semanticId") - if semantic_id is not None: - spec_semantic_id = HTTPApiDecoder.base64url_json( - semantic_id, model.Reference, False) # type: ignore[type-abstract] - submodel_descriptors = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodel_descriptors) paginated_submodel_descriptors, end_index = self._get_slice(request, submodel_descriptors) return paginated_submodel_descriptors, end_index From b64d589ed6bd92b29cc55c8549c19dbd09fb5a40 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Fri, 25 Apr 2025 13:53:59 +0200 Subject: [PATCH 44/52] add service description --- server/app/interfaces/registry.py | 12 ++++++++++++ server/app/model/__init__.py | 1 + server/app/model/service.py | 21 +++++++++++++++++++++ 3 files changed, 34 insertions(+) create mode 100644 server/app/model/service.py diff --git a/server/app/interfaces/registry.py b/server/app/interfaces/registry.py index fbef785a3..2d34bcbf6 100644 --- a/server/app/interfaces/registry.py +++ b/server/app/interfaces/registry.py @@ -23,6 +23,7 @@ def __init__(self, object_store: model.AbstractObjectStore, base_path: str = "/a self.object_store: model.AbstractObjectStore = object_store self.url_map = werkzeug.routing.Map([ Submount(base_path, [ + Rule("/description", methods=["GET"], endpoint=self.get_self_description), Rule("/shell-descriptors", methods=["GET"], endpoint=self.get_all_aas_descriptors), Rule("/shell-descriptors", methods=["POST"], endpoint=self.post_aas_descriptor), Submount("/shell-descriptors", [ @@ -101,6 +102,17 @@ def _get_all_submodel_descriptors(self, request: Request) -> Tuple[Iterator[serv def _get_submodel_descriptor(self, url_args: Dict) -> server_model.SubmodelDescriptor: return self._get_obj_ts(url_args["submodel_id"], server_model.SubmodelDescriptor) + # ------ COMMON ROUTES ------- + def get_self_description(self, request: Request, url_args: Dict, response_t: Type[APIResponse], + **_kwargs) -> Response: + service_description = server_model.ServiceDescription(profiles=[ + server_model.ServiceSpecificationProfileEnum.AAS_REGISTRY_FULL, + server_model.ServiceSpecificationProfileEnum.AAS_REGISTRY_READ, + server_model.ServiceSpecificationProfileEnum.SUBMODEL_REGISTRY_FULL, + server_model.ServiceSpecificationProfileEnum.SUBMODEL_REGISTRY_READ + ]) + return response_t(service_description.to_dict()) + # ------ AAS REGISTRY ROUTES ------- def get_all_aas_descriptors(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: diff --git a/server/app/model/__init__.py b/server/app/model/__init__.py index 5736b5492..0f5e5d953 100644 --- a/server/app/model/__init__.py +++ b/server/app/model/__init__.py @@ -1,2 +1,3 @@ from .descriptor import * from .endpoint import * +from .service import * diff --git a/server/app/model/service.py b/server/app/model/service.py new file mode 100644 index 000000000..39bc3dc03 --- /dev/null +++ b/server/app/model/service.py @@ -0,0 +1,21 @@ +from typing import List +from enum import Enum + +class ServiceSpecificationProfileEnum(str, Enum): + AAS_REGISTRY_FULL = "https://adminshell.io/aas/API/3/0/AssetAdministrationShellRegistryServiceSpecification/SSP-001" + AAS_REGISTRY_READ = "https://adminshell.io/aas/API/3/0/AssetAdministrationShellRegistryServiceSpecification/SSP-002" + SUBMODEL_REGISTRY_FULL = "https://adminshell.io/aas/API/3/0/SubmodelRegistryServiceSpecification/SSP-001" + SUBMODEL_REGISTRY_READ = "https://adminshell.io/aas/API/3/0/SubmodelRegistryServiceSpecification/SSP-002" + #TODO add other profiles + + +class ServiceDescription: + def __init__(self, profiles: List[ServiceSpecificationProfileEnum]): + if not profiles: + raise ValueError("At least one profile must be specified") + self.profiles = profiles + + def to_dict(self): + return { + "profiles": [p.value for p in self.profiles] + } \ No newline at end of file From 1de92b3ea84137131758f8a70af31f586974235e Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Mon, 23 Jun 2025 08:35:31 +0200 Subject: [PATCH 45/52] clean code --- server/app/adapter/jsonization.py | 7 +++---- server/app/interfaces/discovery.py | 1 - 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/server/app/adapter/jsonization.py b/server/app/adapter/jsonization.py index 92a87fc5a..e3e21f463 100644 --- a/server/app/adapter/jsonization.py +++ b/server/app/adapter/jsonization.py @@ -2,10 +2,9 @@ import server.app.model as server_model from basyx.aas import model -from basyx.aas.adapter import _generic -from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, PathOrIO +from basyx.aas.adapter._generic import ASSET_KIND_INVERSE, PathOrIO, ASSET_KIND, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES from basyx.aas.adapter.json import AASToJsonEncoder -from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES, \ +from basyx.aas.adapter.json.json_deserialization import _get_ts, AASFromJsonDecoder, \ read_aas_json_file_into import logging @@ -263,7 +262,7 @@ def _asset_administration_shell_descriptor_to_json(cls, obj: server_model.AssetA if obj.administration: data['administration'] = obj.administration if obj.asset_kind: - data['assetKind'] = _generic.ASSET_KIND[obj.asset_kind] + data['assetKind'] = ASSET_KIND[obj.asset_kind] if obj.asset_type: data['assetType'] = obj.asset_type if obj.global_asset_id: diff --git a/server/app/interfaces/discovery.py b/server/app/interfaces/discovery.py index b08448b06..4f456f789 100644 --- a/server/app/interfaces/discovery.py +++ b/server/app/interfaces/discovery.py @@ -3,7 +3,6 @@ """ import abc -import json from typing import Dict, List, Set, Any import werkzeug.exceptions From 59748cf87b2bd46ddcc81596912b5de5854527b7 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 8 Jul 2025 16:46:24 +0200 Subject: [PATCH 46/52] add README and docker deployment for registry --- Discovery Server/README.md | 2 +- Json2RegoRulesConverter.py | 98 +++++++++++++++++ Registry Server/README.md | 97 ----------------- policies/access_policy.rego | 12 +++ policies/rules.json | 101 ++++++++++++++++++ .../Dockerfile | 28 ++--- registry_server/README.md | 74 +++++++++++++ registry_server/app/__init__.py | 0 registry_server/app/main.py | 27 +++++ .../compose.yml | 4 +- .../entrypoint.sh | 2 +- registry_server/requirements.txt | 1 + .../stop-supervisor.sh | 0 .../storage/descriptorCompleteExample.json | 71 ++++++++++++ .../supervisord.ini | 0 .../uwsgi.ini | 1 + server/app/version.py | 21 ++++ test.py | 26 +++++ 18 files changed, 453 insertions(+), 112 deletions(-) create mode 100644 Json2RegoRulesConverter.py delete mode 100644 Registry Server/README.md create mode 100644 policies/access_policy.rego create mode 100644 policies/rules.json rename {Registry Server => registry_server}/Dockerfile (62%) create mode 100644 registry_server/README.md create mode 100644 registry_server/app/__init__.py create mode 100644 registry_server/app/main.py rename {Registry Server => registry_server}/compose.yml (53%) rename {Registry Server => registry_server}/entrypoint.sh (99%) create mode 100644 registry_server/requirements.txt rename {Registry Server => registry_server}/stop-supervisor.sh (100%) create mode 100644 registry_server/storage/descriptorCompleteExample.json rename {Registry Server => registry_server}/supervisord.ini (100%) rename {Registry Server => registry_server}/uwsgi.ini (90%) create mode 100644 server/app/version.py create mode 100644 test.py diff --git a/Discovery Server/README.md b/Discovery Server/README.md index 0b0938167..a562dbae0 100644 --- a/Discovery Server/README.md +++ b/Discovery Server/README.md @@ -1,4 +1,4 @@ -# Eclipse BaSyx Python SDK - Dicovery Service +# Eclipse BaSyx Python SDK - Discovery Service This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Discovery Service**. It provides basic discovery functionality for AAS IDs and their corresponding assets, as specified in the official [Discovery Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/DiscoveryServiceSpecification/V3.1.0_SSP-001). diff --git a/Json2RegoRulesConverter.py b/Json2RegoRulesConverter.py new file mode 100644 index 000000000..34c69898a --- /dev/null +++ b/Json2RegoRulesConverter.py @@ -0,0 +1,98 @@ +import json +from typing import Dict, Any + +def convert_idta_rule_to_rego(rule: Dict[str, Any], rule_index: int = 0) -> str: + formula = rule.get("FORMULA", {}) + acl = rule.get("ACL", {}) + rights = acl.get("RIGHTS", []) + access = acl.get("ACCESS", "DENY").upper() + + if access != "ALLOW": + return "" + + rego_conditions = [] + + def parse_expression(expr: Dict[str, Any]) -> str: + if "$eq" in expr: + left = parse_operand(expr["$eq"][0]) + right = parse_operand(expr["$eq"][1]) + return f"{left} == {right}" + elif "$regex" in expr: + left = parse_operand(expr["$regex"][0]) + pattern = expr["$regex"][1].get("$strVal", "") + return f're_match("{pattern}", {left})' + elif "$ge" in expr: + left = parse_operand(expr["$ge"][0]) + right = parse_operand(expr["$ge"][1]) + return f"{left} >= {right}" + elif "$le" in expr: + left = parse_operand(expr["$le"][0]) + right = parse_operand(expr["$le"][1]) + return f"{left} <= {right}" + elif "$and" in expr: + return " and ".join([f"({parse_expression(sub)})" for sub in expr["$and"]]) + elif "$or" in expr: + return " or ".join([f"({parse_expression(sub)})" for sub in expr["$or"]]) + else: + return "# unsupported expression" + + def parse_operand(operand: Dict[str, Any]) -> str: + if "$attribute" in operand: + attr = operand["$attribute"] + if "CLAIM" in attr: + return f'input.subject.{attr["CLAIM"]}' + if "REFERENCE" in attr: + return f'input.resource.{attr["REFERENCE"].replace("(Submodel)*#", "")}' + if "GLOBAL" in attr: + return f'input.context.{attr["GLOBAL"]}' + elif "$field" in operand: + return f'input.resource.{operand["$field"].replace("$sm#", "")}' + elif "$strVal" in operand: + return f'"{operand["$strVal"]}"' + elif "$timeVal" in operand: + return f'"{operand["$timeVal"]}"' + else: + return "# unsupported operand" + + if formula: + rego_conditions.append(parse_expression(formula)) + + if rights: + right_exprs = [f'input.action == "{right}"' for right in rights] + rego_conditions.append(f"({' or '.join(right_exprs)})") + + if not rego_conditions: + return "" + + conditions_block = "\n ".join(rego_conditions) + rule_block = f""" +allow_rule_{rule_index} {{ + {conditions_block} +}}""" + return rule_block + + +def generate_rego_policy_from_idta(json_data: Dict[str, Any]) -> str: + """ + Generate a complete Rego policy from an IDTA-01004 rule set. + """ + rules = json_data.get("AllAccessPermissionRules", {}).get("rules", []) + rule_blocks = [convert_idta_rule_to_rego(rule, idx) for idx, rule in enumerate(rules)] + rule_blocks = [rb for rb in rule_blocks if rb.strip()] + + allow_conditions = " or ".join([f"allow_rule_{i}" for i in range(len(rule_blocks))]) + rego_policy = "package accesscontrol\n\ndefault allow = false\n\n" + rego_policy += f"allow {{\n {allow_conditions}\n}}\n" + rego_policy += "\n".join(rule_blocks) + return rego_policy + +def save_rego_policy_to_file(rego_policy: str, file_path: str) -> None: + with open(file_path, "w") as f: + f.write(rego_policy) + + +if __name__ == "__main__": + with open("policies/rules.json") as f: + json_data = json.load(f) + rego_policy = generate_rego_policy_from_idta(json_data) + save_rego_policy_to_file(rego_policy, "policies/access_policy.rego") diff --git a/Registry Server/README.md b/Registry Server/README.md deleted file mode 100644 index 339226c53..000000000 --- a/Registry Server/README.md +++ /dev/null @@ -1,97 +0,0 @@ -# Eclipse BaSyx Python SDK - HTTP Server - -This package contains a Dockerfile to spin up an exemplary HTTP/REST server following the [Specification of the AAS Part 2 API][6] with ease. -The server currently implements the following interfaces: - -- [Asset Administration Shell Repository Service][4] -- [Submodel Repository Service][5] - -It uses the [HTTP API][1] and the [AASX][7], [JSON][8], and [XML][9] Adapters of the [BaSyx Python SDK][3], to serve regarding files from a given directory. -The files are only read, changes won't persist. - -Alternatively, the container can also be told to use the [Local-File Backend][2] instead, which stores AAS and Submodels as individual JSON files and allows for persistent changes (except supplementary files, i.e. files referenced by `File` submodel elements). -See [below](#options) on how to configure this. - -## Building -The container image can be built via: -``` -$ docker buildx build -t basyx-python-sdk-http-server . -``` - -## Running - -### Storage -The container needs to be provided with the directory `/storage` to store AAS and Submodel files: AASX, JSON, XML or JSON files of Local-File Backend. - -This directory can be mapped via the `-v` option from another image or a local directory. -To map the directory `storage` inside the container, `-v ./storage:/storage` can be used. -The directory `storage` will be created in the current working directory, if it doesn't already exist. - -### Port -The HTTP server inside the container listens on port 80 by default. -To expose it on the host on port 8080, use the option `-p 8080:80` when running it. - -### Options -The container can be configured via environment variables: -- `API_BASE_PATH` determines the base path under which all other API paths are made available. - Default: `/api/v3.0` -- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: - - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve AASX, JSON, XML files from the storage directory. - The files are not modified, all changes done via the API are only stored in memory. - - When instead set to `LOCAL_FILE`, the server makes use of the [LocalFileBackend][2], where AAS and Submodels are persistently stored as JSON files. - Supplementary files, i.e. files referenced by `File` submodel elements, are not stored in this case. -- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. - -### Running Examples - -Putting it all together, the container can be started via the following command: -``` -$ docker run -p 8080:80 -v ./storage:/storage basyx-python-sdk-http-server -``` - -Since Windows uses backslashes instead of forward slashes in paths, you'll have to adjust the path to the storage directory there: -``` -> docker run -p 8080:80 -v .\storage:/storage basyx-python-sdk-http-server -``` - -Per default, the server will use the `LOCAL_FILE_READ_ONLY` storage type and serve the API under `/api/v3.0` and read files from `/storage`. If you want to change this, you can do so like this: -``` -$ docker run -p 8080:80 -v ./storage2:/storage2 -e API_BASE_PATH=/api/v3.1 -e STORAGE_TYPE=LOCAL_FILE_BACKEND -e STORAGE_PATH=/storage2 basyx-python-sdk-http-server -``` - -## Building and running the image with docker-compose - -The container image can also be built and run via: -``` -$ docker compose up -``` - -This is the exemplary `docker-compose` file for the server: -````yaml -services: - app: - build: . - ports: - - "8080:80" - volumes: - - ./storage:/storage - -```` - -Here files are read from `/storage` and the server can be accessed at http://localhost:8080/api/v3.0/ from your host system. -To get a different setup this compose.yaml file can be adapted and expanded. - -## Acknowledgments - -This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. - -[1]: https://github.com/eclipse-basyx/basyx-python-sdk/pull/238 -[2]: https://basyx-python-sdk.readthedocs.io/en/latest/backend/local_file.html -[3]: https://github.com/eclipse-basyx/basyx-python-sdk -[4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 -[5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 -[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces -[7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx -[8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html -[9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html -[10]: https://github.com/tiangolo/uwsgi-nginx-docker diff --git a/policies/access_policy.rego b/policies/access_policy.rego new file mode 100644 index 000000000..48d95fa41 --- /dev/null +++ b/policies/access_policy.rego @@ -0,0 +1,12 @@ +package accesscontrol + +default allow = false + +allow { + allow_rule_0 +} + +allow_rule_0 { + ((input.resource.semanticId == "SemanticID-Nameplate") or (input.resource.semanticId == "SemanticID-TechnicalData")) and (input.subject.Role == "admin") and (re_match("^https://company1.com/.*$", input.resource.Id)) and (input.context.UTCNOW >= "09:00") and (input.context.UTCNOW <= "17:00") + (input.action == "READ" or input.action == "WRITE") +} \ No newline at end of file diff --git a/policies/rules.json b/policies/rules.json new file mode 100644 index 000000000..bfe82ff72 --- /dev/null +++ b/policies/rules.json @@ -0,0 +1,101 @@ +{ + "AllAccessPermissionRules": { + "rules": [ + { + "ACL": { + "ATTRIBUTES": [ + { + "CLAIM": "Roles" + } + ], + "RIGHTS": [ + "READ", + "WRITE" + ], + "ACCESS": "ALLOW" + }, + "OBJECTS": [ + { + "IDENTIFIABLE": "(Submodel)*" + } + ], + "FORMULA": { + "$and": [ + { + "$or": [ + { + "$eq": [ + { + "$field": "$sm#semanticId" + }, + { + "$strVal": "SemanticID-Nameplate" + } + ] + }, + { + "$eq": [ + { + "$field": "$sm#semanticId" + }, + { + "$strVal": "SemanticID-TechnicalData" + } + ] + } + ] + }, + { + "$eq": [ + { + "$attribute": { + "CLAIM": "Role" + } + }, + { + "$strVal": "admin" + } + ] + }, + { + "$regex": [ + { + "$attribute": { + "REFERENCE": "(Submodel)*#Id" + } + }, + { + "$strVal": "^https://company1.com/.*$" + } + ] + }, + { + "$ge": [ + { + "$attribute": { + "GLOBAL": "UTCNOW" + } + }, + { + "$timeVal": "09:00" + } + ] + }, + { + "$le": [ + { + "$attribute": { + "GLOBAL": "UTCNOW" + } + }, + { + "$timeVal": "17:00" + } + ] + } + ] + } + } + ] + } +} \ No newline at end of file diff --git a/Registry Server/Dockerfile b/registry_server/Dockerfile similarity index 62% rename from Registry Server/Dockerfile rename to registry_server/Dockerfile index 3d52a15ab..344ddd0ee 100644 --- a/Registry Server/Dockerfile +++ b/registry_server/Dockerfile @@ -1,10 +1,5 @@ FROM python:3.11-alpine -LABEL org.label-schema.name="Eclipse BaSyx" \ - org.label-schema.version="1.0" \ - org.label-schema.description="Docker image for the basyx-python-sdk server application" \ - org.label-schema.maintainer="Eclipse BaSyx" - ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 @@ -13,13 +8,12 @@ ENV PYTHONUNBUFFERED=1 RUN apk update && \ apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ pip install uwsgi && \ - pip install --no-cache-dir git+https://github.com/eclipse-basyx/basyx-python-sdk@main#subdirectory=sdk && \ apk del git bash -COPY uwsgi.ini /etc/uwsgi/ -COPY supervisord.ini /etc/supervisor/conf.d/supervisord.ini -COPY stop-supervisor.sh /etc/supervisor/stop-supervisor.sh +COPY registry_server/uwsgi.ini /etc/uwsgi/ +COPY registry_server/supervisord.ini /etc/supervisor/conf.d/supervisord.ini +COPY registry_server/stop-supervisor.sh /etc/supervisor/stop-supervisor.sh RUN chmod +x /etc/supervisor/stop-supervisor.sh # Makes it possible to use a different configuration @@ -34,12 +28,22 @@ ENV LISTEN_PORT=80 ENV CLIENT_BODY_BUFFER_SIZE=1M # Copy the entrypoint that will generate Nginx additional configs -COPY entrypoint.sh /entrypoint.sh +COPY registry_server/entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] -COPY ../server/app /app +ENV SETUPTOOLS_SCM_PRETEND_VERSION=1.0.0 + + +COPY ./registry_server/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY ./sdk /sdk +COPY ./server /server +COPY ./registry_server/app /app + WORKDIR /app +RUN pip install ../sdk -CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] \ No newline at end of file diff --git a/registry_server/README.md b/registry_server/README.md new file mode 100644 index 000000000..887fb8bcb --- /dev/null +++ b/registry_server/README.md @@ -0,0 +1,74 @@ +# Eclipse BaSyx Python SDK - Registry Service + +This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Registry Service**. +It provides basic registry functionality for AAS and submodels descriptors, as specified in the official [Asset Administration Shell Registry Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRegistryServiceSpecification/V3.1.0_SSP-001) and [Submodel Registry Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRegistryServiceSpecification/V3.1.0_SSP-001). + +## Overview + +The Registry Service provides the endpoint for a given AAS-ID or Submodel-ID. Such an endpoint for an AAS and the related Submodel-IDs make the AAS and the submodels with their submodelElements accessible. + + + +## Features +# AAS Registry: +| Function | Description | Example URL | +|--------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------| +| **GetAllAssetAdministrationShellDescriptors** | Return all AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors` | +| **GetAssetAdministrationShellDescriptorById** | Return a specific AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}` | +| **PostAssetAdministrationShellDescriptor** | Register/create a new AAS descriptor | `POST http://localhost:8083/api/v3.0/shell-descriptors` | +| **PutAssetAdministrationShellDescriptorById** | Update an existing AAS descriptor | `PUT http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}` | +| **DeleteAssetAdministrationShellDescriptorById** | Delete an AAS descriptor by ID | `DELETE http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}` | +| **GetSubmodelDescriptorsThroughSuperPath** | Return all submodel descriptors under AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors` | +| **PostSubmodelDescriptorThroughSuperPath** | Register/create a new submodel descriptor under AAS descriptor | `Post http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors` | +| **GetSubmodelDescriptorThroughSuperPath** | Return a specific submodel descriptor under AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | +| **PutSubmodelDescriptorThroughSuperPath** | Update a specific submodel descriptor under AAS descriptor | `PUT http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | +| **DeleteSubmodelDescriptorThroughSuperPath** | Delete a specific submodel descriptor under AAS descriptor | `DELETE http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | +| **GetDescription** | Return the self‑description of the AAS registry service | `GET http://localhost:8083/api/v3.0/description` | + +# Submodel Registry: +| Function | Description | Example URL | +|----------------------------------|--------------------------------------------------------------|-----------------------------------------------------------------------------------| +| **GetAllSubmodelDescriptors** | Return all submodel descriptors | `GET http://localhost:8083/api/v3.0/submodel-descriptors` | +| **PostSubmodelDescriptor** | Register/create a new submodel descriptor | `Post http://localhost:8083/api/v3.0/submodel-descriptors` | +| **GetSubmodelDescriptorById** | Return a specific submodel descriptor | `GET http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | +| **PutSubmodelDescriptorById** | Update a specific submodel descriptor | `PUT http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | +| **DeleteSubmodelDescriptorById** | Delete a specific submodel descriptor | `DELETE http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | +| **GetDescription** | Return the self‑description of the submodel registry service | `GET http://localhost:8083/api/v3.0/description` | + + + +## Specification Compliance + +- Complies with: **Asset Administration Shell Registry Service Specification v3.1.0_SSP-001** and **Submodel Registry Service Specification v3.1.0_SSP-001** + +## Configuration + +The service can be configured to use either: + +- **In-memory storage** (default): Temporary data storage that resets on service restart. +- **MongoDB storage**: Persistent backend storage using MongoDB. + +### Configuration via Environment Variables + +| Variable | Description | Default | +|----------------|--------------------------------------------|-------------------------| +| `STORAGE_TYPE` | `inmemory` or `mongodb` | `inmemory` | +| `MONGODB_URI` | MongoDB connection URI | `mongodb://localhost:27017` | +| `MONGODB_DBNAME` | Name of the MongoDB database | `basyx_registry` | + +## Deployment via Docker + +A `Dockerfile` and `docker-compose.yml` are provided for simple deployment. +The container image can be built and run via: +```bash +docker compose up --build +``` + +## Test + +An example descriptor for testing purposes is provided as a JSON file in the [storage](./storage) folder. + +## Acknowledgments + +This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker](https://github.com/tiangolo/uwsgi-nginx-docker) repository. + diff --git a/registry_server/app/__init__.py b/registry_server/app/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/registry_server/app/main.py b/registry_server/app/main.py new file mode 100644 index 000000000..b182118ee --- /dev/null +++ b/registry_server/app/main.py @@ -0,0 +1,27 @@ +import sys +import os +sys.path.insert(0, "/") +from basyx.aas.backend.local_file import LocalFileObjectStore +from basyx.aas import model +from server.app.interfaces.registry import RegistryAPI + +storage_path = os.getenv("STORAGE_PATH", "/storage") +storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") +base_path = os.getenv("API_BASE_PATH") + +wsgi_optparams = {} + +if base_path is not None: + wsgi_optparams["base_path"] = base_path + +if storage_type == "LOCAL_FILE_BACKEND": + application = RegistryAPI(LocalFileObjectStore(storage_path), **wsgi_optparams) + +elif storage_type in "LOCAL_FILE_READ_ONLY": + object_store: model.DictObjectStore = model.DictObjectStore() + + application = RegistryAPI(object_store, **wsgi_optparams) + +else: + print(f"STORAGE_TYPE must be either LOCAL_FILE or LOCAL_FILE_READ_ONLY! Current value: {storage_type}", + file=sys.stderr) \ No newline at end of file diff --git a/Registry Server/compose.yml b/registry_server/compose.yml similarity index 53% rename from Registry Server/compose.yml rename to registry_server/compose.yml index 932422dbc..99f3e3cc9 100644 --- a/Registry Server/compose.yml +++ b/registry_server/compose.yml @@ -1,6 +1,8 @@ services: app: - build: . + build: + context: .. + dockerfile: registry_server/Dockerfile ports: - "8083:80" volumes: diff --git a/Registry Server/entrypoint.sh b/registry_server/entrypoint.sh similarity index 99% rename from Registry Server/entrypoint.sh rename to registry_server/entrypoint.sh index 722394409..522d4fca2 100644 --- a/Registry Server/entrypoint.sh +++ b/registry_server/entrypoint.sh @@ -68,4 +68,4 @@ else printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf fi -exec "$@" +exec "$@" \ No newline at end of file diff --git a/registry_server/requirements.txt b/registry_server/requirements.txt new file mode 100644 index 000000000..9c12eecda --- /dev/null +++ b/registry_server/requirements.txt @@ -0,0 +1 @@ +Werkzeug diff --git a/Registry Server/stop-supervisor.sh b/registry_server/stop-supervisor.sh similarity index 100% rename from Registry Server/stop-supervisor.sh rename to registry_server/stop-supervisor.sh diff --git a/registry_server/storage/descriptorCompleteExample.json b/registry_server/storage/descriptorCompleteExample.json new file mode 100644 index 000000000..f762aa836 --- /dev/null +++ b/registry_server/storage/descriptorCompleteExample.json @@ -0,0 +1,71 @@ +{ + "id": "https://example.org/aas/motor", + "endpoints": [ + { + "protocolInformation": { + "href": "https://localhost:1234/api/v3.0/aas", + "endpointProtocol": "HTTP", + "endpointProtocolVersion": [ + "1.1" + ] + }, + "interface": "AAS-3.0" + }, + { + "protocolInformation": { + "href": "opc.tcp://localhost:4840" + }, + "interface": "AAS-3.0" + }, + { + "protocolInformation": { + "href": "https://localhost:5678", + "endpointProtocol": "HTTP", + "endpointProtocolVersion": [ + "1.1" + ], + "subprotocol": "OPC UA Basic SOAP", + "subprotocolBody": "ns=2;s=MyAAS", + "subprotocolBodyEncoding": "application/soap+xml" + }, + "interface": "AAS-3.0" + } + ], + "submodelDescriptors":[ + { + "id": "https://admin-shell.io/zvei/nameplate/1/0/Nameplate", + "endpoints": [ + { + "href": { + "href": "https://localhost:1234/api/v3.0/submodel", + "endpointProtocol": "HTTP", + "endpointProtocolVersion": [ + "1.1" + ] + }, + "interface": "AAS-3.0" + }, + { + "protocolInformation": { + "href": "opc.tcp://localhost:4840" + }, + "interface": "AAS-3.0" + }, + { + "protocolInformation": { + "href": "https://localhost:5678", + "endpointProtocol": "HTTP", + "endpointProtocolVersion": [ + "1.1" + ], + "subprotocol": "OPC UA Basic SOAP", + "subprotocolBody": "ns=2;s=MyAAS", + "subprotocolBodyEncoding": "application/soap+xml" + }, + "interface": "AAS-3.0" + } + ] + } + + ] +} \ No newline at end of file diff --git a/Registry Server/supervisord.ini b/registry_server/supervisord.ini similarity index 100% rename from Registry Server/supervisord.ini rename to registry_server/supervisord.ini diff --git a/Registry Server/uwsgi.ini b/registry_server/uwsgi.ini similarity index 90% rename from Registry Server/uwsgi.ini rename to registry_server/uwsgi.ini index 9c54ae1cc..f333b2299 100644 --- a/Registry Server/uwsgi.ini +++ b/registry_server/uwsgi.ini @@ -7,3 +7,4 @@ hook-master-start = unix_signal:15 gracefully_kill_them_all need-app = true die-on-term = true show-config = false +logto = /tmp/uwsgi.log diff --git a/server/app/version.py b/server/app/version.py new file mode 100644 index 000000000..844ef354c --- /dev/null +++ b/server/app/version.py @@ -0,0 +1,21 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '0.1.dev2109+g5f9e7d2' +__version_tuple__ = version_tuple = (0, 1, 'dev2109', 'g5f9e7d2') diff --git a/test.py b/test.py new file mode 100644 index 000000000..f66264eff --- /dev/null +++ b/test.py @@ -0,0 +1,26 @@ + +from server.app.interfaces.registry import * +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8083, RegistryAPI(create_full_example()), + use_debugger=True, use_reloader=True) + +#from server.app.interfaces.discovery import * +#if __name__ == "__main__": +# from werkzeug.serving import run_simple + + # run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), + # use_debugger=True, use_reloader=True) +# from server.app.interfaces.repository import * +# if __name__ == "__main__": +# from werkzeug.serving import run_simple +# from basyx.aas.examples.data.example_aas import create_full_example +# +# run_simple("localhost", 8080, WSGIApp(create_full_example(), aasx.DictSupplementaryFileContainer()), +# use_debugger=True, use_reloader=True) + + + + From a7efefca4c4e15ef1d8f6da42c9a4c3647199ac6 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 8 Jul 2025 17:10:13 +0200 Subject: [PATCH 47/52] remove files from another branch --- .gitignore | 2 + policies/access_policy.rego | 12 ----- policies/rules.json | 101 ------------------------------------ 3 files changed, 2 insertions(+), 113 deletions(-) delete mode 100644 policies/access_policy.rego delete mode 100644 policies/rules.json diff --git a/.gitignore b/.gitignore index 18b522c3a..c78c82036 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,5 @@ compliance_tool/aas_compliance_tool/version.py # ignore the content of the server storage server/storage/ +test.py/ +Json2RegoRulesConverter.py diff --git a/policies/access_policy.rego b/policies/access_policy.rego deleted file mode 100644 index 48d95fa41..000000000 --- a/policies/access_policy.rego +++ /dev/null @@ -1,12 +0,0 @@ -package accesscontrol - -default allow = false - -allow { - allow_rule_0 -} - -allow_rule_0 { - ((input.resource.semanticId == "SemanticID-Nameplate") or (input.resource.semanticId == "SemanticID-TechnicalData")) and (input.subject.Role == "admin") and (re_match("^https://company1.com/.*$", input.resource.Id)) and (input.context.UTCNOW >= "09:00") and (input.context.UTCNOW <= "17:00") - (input.action == "READ" or input.action == "WRITE") -} \ No newline at end of file diff --git a/policies/rules.json b/policies/rules.json deleted file mode 100644 index bfe82ff72..000000000 --- a/policies/rules.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "AllAccessPermissionRules": { - "rules": [ - { - "ACL": { - "ATTRIBUTES": [ - { - "CLAIM": "Roles" - } - ], - "RIGHTS": [ - "READ", - "WRITE" - ], - "ACCESS": "ALLOW" - }, - "OBJECTS": [ - { - "IDENTIFIABLE": "(Submodel)*" - } - ], - "FORMULA": { - "$and": [ - { - "$or": [ - { - "$eq": [ - { - "$field": "$sm#semanticId" - }, - { - "$strVal": "SemanticID-Nameplate" - } - ] - }, - { - "$eq": [ - { - "$field": "$sm#semanticId" - }, - { - "$strVal": "SemanticID-TechnicalData" - } - ] - } - ] - }, - { - "$eq": [ - { - "$attribute": { - "CLAIM": "Role" - } - }, - { - "$strVal": "admin" - } - ] - }, - { - "$regex": [ - { - "$attribute": { - "REFERENCE": "(Submodel)*#Id" - } - }, - { - "$strVal": "^https://company1.com/.*$" - } - ] - }, - { - "$ge": [ - { - "$attribute": { - "GLOBAL": "UTCNOW" - } - }, - { - "$timeVal": "09:00" - } - ] - }, - { - "$le": [ - { - "$attribute": { - "GLOBAL": "UTCNOW" - } - }, - { - "$timeVal": "17:00" - } - ] - } - ] - } - } - ] - } -} \ No newline at end of file From d3d4dbb65e542d56f6e142e2ede11b713bc95488 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 8 Jul 2025 17:13:45 +0200 Subject: [PATCH 48/52] Stop tracking unnecessary files --- Json2RegoRulesConverter.py | 98 -------------------------------------- test.py | 26 ---------- 2 files changed, 124 deletions(-) delete mode 100644 Json2RegoRulesConverter.py delete mode 100644 test.py diff --git a/Json2RegoRulesConverter.py b/Json2RegoRulesConverter.py deleted file mode 100644 index 34c69898a..000000000 --- a/Json2RegoRulesConverter.py +++ /dev/null @@ -1,98 +0,0 @@ -import json -from typing import Dict, Any - -def convert_idta_rule_to_rego(rule: Dict[str, Any], rule_index: int = 0) -> str: - formula = rule.get("FORMULA", {}) - acl = rule.get("ACL", {}) - rights = acl.get("RIGHTS", []) - access = acl.get("ACCESS", "DENY").upper() - - if access != "ALLOW": - return "" - - rego_conditions = [] - - def parse_expression(expr: Dict[str, Any]) -> str: - if "$eq" in expr: - left = parse_operand(expr["$eq"][0]) - right = parse_operand(expr["$eq"][1]) - return f"{left} == {right}" - elif "$regex" in expr: - left = parse_operand(expr["$regex"][0]) - pattern = expr["$regex"][1].get("$strVal", "") - return f're_match("{pattern}", {left})' - elif "$ge" in expr: - left = parse_operand(expr["$ge"][0]) - right = parse_operand(expr["$ge"][1]) - return f"{left} >= {right}" - elif "$le" in expr: - left = parse_operand(expr["$le"][0]) - right = parse_operand(expr["$le"][1]) - return f"{left} <= {right}" - elif "$and" in expr: - return " and ".join([f"({parse_expression(sub)})" for sub in expr["$and"]]) - elif "$or" in expr: - return " or ".join([f"({parse_expression(sub)})" for sub in expr["$or"]]) - else: - return "# unsupported expression" - - def parse_operand(operand: Dict[str, Any]) -> str: - if "$attribute" in operand: - attr = operand["$attribute"] - if "CLAIM" in attr: - return f'input.subject.{attr["CLAIM"]}' - if "REFERENCE" in attr: - return f'input.resource.{attr["REFERENCE"].replace("(Submodel)*#", "")}' - if "GLOBAL" in attr: - return f'input.context.{attr["GLOBAL"]}' - elif "$field" in operand: - return f'input.resource.{operand["$field"].replace("$sm#", "")}' - elif "$strVal" in operand: - return f'"{operand["$strVal"]}"' - elif "$timeVal" in operand: - return f'"{operand["$timeVal"]}"' - else: - return "# unsupported operand" - - if formula: - rego_conditions.append(parse_expression(formula)) - - if rights: - right_exprs = [f'input.action == "{right}"' for right in rights] - rego_conditions.append(f"({' or '.join(right_exprs)})") - - if not rego_conditions: - return "" - - conditions_block = "\n ".join(rego_conditions) - rule_block = f""" -allow_rule_{rule_index} {{ - {conditions_block} -}}""" - return rule_block - - -def generate_rego_policy_from_idta(json_data: Dict[str, Any]) -> str: - """ - Generate a complete Rego policy from an IDTA-01004 rule set. - """ - rules = json_data.get("AllAccessPermissionRules", {}).get("rules", []) - rule_blocks = [convert_idta_rule_to_rego(rule, idx) for idx, rule in enumerate(rules)] - rule_blocks = [rb for rb in rule_blocks if rb.strip()] - - allow_conditions = " or ".join([f"allow_rule_{i}" for i in range(len(rule_blocks))]) - rego_policy = "package accesscontrol\n\ndefault allow = false\n\n" - rego_policy += f"allow {{\n {allow_conditions}\n}}\n" - rego_policy += "\n".join(rule_blocks) - return rego_policy - -def save_rego_policy_to_file(rego_policy: str, file_path: str) -> None: - with open(file_path, "w") as f: - f.write(rego_policy) - - -if __name__ == "__main__": - with open("policies/rules.json") as f: - json_data = json.load(f) - rego_policy = generate_rego_policy_from_idta(json_data) - save_rego_policy_to_file(rego_policy, "policies/access_policy.rego") diff --git a/test.py b/test.py deleted file mode 100644 index f66264eff..000000000 --- a/test.py +++ /dev/null @@ -1,26 +0,0 @@ - -from server.app.interfaces.registry import * -if __name__ == "__main__": - from werkzeug.serving import run_simple - from basyx.aas.examples.data.example_aas import create_full_example - - run_simple("localhost", 8083, RegistryAPI(create_full_example()), - use_debugger=True, use_reloader=True) - -#from server.app.interfaces.discovery import * -#if __name__ == "__main__": -# from werkzeug.serving import run_simple - - # run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), - # use_debugger=True, use_reloader=True) -# from server.app.interfaces.repository import * -# if __name__ == "__main__": -# from werkzeug.serving import run_simple -# from basyx.aas.examples.data.example_aas import create_full_example -# -# run_simple("localhost", 8080, WSGIApp(create_full_example(), aasx.DictSupplementaryFileContainer()), -# use_debugger=True, use_reloader=True) - - - - From 673f18d0a825174de60f323a8de3d77fe6269d67 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 15 Jul 2025 11:30:30 +0200 Subject: [PATCH 49/52] Update README --- registry_server/README.md | 24 ++++++++---------------- 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/registry_server/README.md b/registry_server/README.md index 887fb8bcb..6c62e5068 100644 --- a/registry_server/README.md +++ b/registry_server/README.md @@ -19,7 +19,7 @@ The Registry Service provides the endpoint for a given AAS-ID or Submodel-ID. Su | **PutAssetAdministrationShellDescriptorById** | Update an existing AAS descriptor | `PUT http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}` | | **DeleteAssetAdministrationShellDescriptorById** | Delete an AAS descriptor by ID | `DELETE http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}` | | **GetSubmodelDescriptorsThroughSuperPath** | Return all submodel descriptors under AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors` | -| **PostSubmodelDescriptorThroughSuperPath** | Register/create a new submodel descriptor under AAS descriptor | `Post http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors` | +| **PostSubmodelDescriptorThroughSuperPath** | Register/create a new submodel descriptor under AAS descriptor | `POST http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors` | | **GetSubmodelDescriptorThroughSuperPath** | Return a specific submodel descriptor under AAS descriptor | `GET http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | | **PutSubmodelDescriptorThroughSuperPath** | Update a specific submodel descriptor under AAS descriptor | `PUT http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | | **DeleteSubmodelDescriptorThroughSuperPath** | Delete a specific submodel descriptor under AAS descriptor | `DELETE http://localhost:8083/api/v3.0/shell-descriptors/{aasIdentifier}/submodel-descriptors/{submodelIdentifier}` | @@ -29,7 +29,7 @@ The Registry Service provides the endpoint for a given AAS-ID or Submodel-ID. Su | Function | Description | Example URL | |----------------------------------|--------------------------------------------------------------|-----------------------------------------------------------------------------------| | **GetAllSubmodelDescriptors** | Return all submodel descriptors | `GET http://localhost:8083/api/v3.0/submodel-descriptors` | -| **PostSubmodelDescriptor** | Register/create a new submodel descriptor | `Post http://localhost:8083/api/v3.0/submodel-descriptors` | +| **PostSubmodelDescriptor** | Register/create a new submodel descriptor | `POST http://localhost:8083/api/v3.0/submodel-descriptors` | | **GetSubmodelDescriptorById** | Return a specific submodel descriptor | `GET http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | | **PutSubmodelDescriptorById** | Update a specific submodel descriptor | `PUT http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | | **DeleteSubmodelDescriptorById** | Delete a specific submodel descriptor | `DELETE http://localhost:8083/api/v3.0/submodel-descriptors/{submodelIdentifier}` | @@ -37,24 +37,16 @@ The Registry Service provides the endpoint for a given AAS-ID or Submodel-ID. Su -## Specification Compliance - -- Complies with: **Asset Administration Shell Registry Service Specification v3.1.0_SSP-001** and **Submodel Registry Service Specification v3.1.0_SSP-001** - ## Configuration -The service can be configured to use either: - -- **In-memory storage** (default): Temporary data storage that resets on service restart. -- **MongoDB storage**: Persistent backend storage using MongoDB. +The container can be configured via environment variables: -### Configuration via Environment Variables +- `API_BASE_PATH` determines the base path under which all other API paths are made available. Default: `/api/v3.0` +- `STORAGE_TYPE` can be one of `LOCAL_FILE_READ_ONLY` or `LOCAL_FILE_BACKEND`: + - When set to `LOCAL_FILE_READ_ONLY` (the default), the server will read and serve JSON files from the storage directory. The files are not modified, all changes done via the API are only stored in memory. + - When instead set to `LOCAL_FILE_BACKEND`, the server makes use of the [LocalFileBackend](https://github.com/eclipse-basyx/basyx-python-sdk/tree/main/backend/basyx_backend/local_file), where AAS and Submodels descriptors are persistently stored as JSON files. +- `STORAGE_PATH` sets the directory to read the files from *within the container*. If you bind your files to a directory different from the default `/storage`, you can use this variable to adjust the server accordingly. -| Variable | Description | Default | -|----------------|--------------------------------------------|-------------------------| -| `STORAGE_TYPE` | `inmemory` or `mongodb` | `inmemory` | -| `MONGODB_URI` | MongoDB connection URI | `mongodb://localhost:27017` | -| `MONGODB_DBNAME` | Name of the MongoDB database | `basyx_registry` | ## Deployment via Docker From a581603fe43218c5b2be9ace50ab88ddd40148ce Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 15 Jul 2025 11:40:34 +0200 Subject: [PATCH 50/52] add docker deployment for discovery service --- discovery_server/Dockerfile | 50 +++++++++++++ discovery_server/README.md | 48 +++++++++++++ discovery_server/app/main.py | 25 +++++++ discovery_server/compose.yml | 9 +++ discovery_server/entrypoint.sh | 71 +++++++++++++++++++ discovery_server/requirements.txt | 2 + discovery_server/stop-supervisor.sh | 8 +++ .../storage/AssetIdsFullExample.json | 62 ++++++++++++++++ discovery_server/storage/ListOfAssetIds.json | 6 ++ discovery_server/supervisord.ini | 27 +++++++ discovery_server/uwsgi.ini | 9 +++ 11 files changed, 317 insertions(+) create mode 100644 discovery_server/Dockerfile create mode 100644 discovery_server/README.md create mode 100644 discovery_server/app/main.py create mode 100644 discovery_server/compose.yml create mode 100644 discovery_server/entrypoint.sh create mode 100644 discovery_server/requirements.txt create mode 100644 discovery_server/stop-supervisor.sh create mode 100644 discovery_server/storage/AssetIdsFullExample.json create mode 100644 discovery_server/storage/ListOfAssetIds.json create mode 100644 discovery_server/supervisord.ini create mode 100644 discovery_server/uwsgi.ini diff --git a/discovery_server/Dockerfile b/discovery_server/Dockerfile new file mode 100644 index 000000000..e0c1f1f14 --- /dev/null +++ b/discovery_server/Dockerfile @@ -0,0 +1,50 @@ +FROM python:3.11-alpine + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV PYTHONPATH="${PYTHONPATH}:/app" + +# If we have more dependencies for the server it would make sense +# to refactor uswgi to the pyproject.toml +RUN apk update && \ + apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ + pip install uwsgi && \ + apk del git bash + + +COPY discovery_server/uwsgi.ini /etc/uwsgi/ +COPY discovery_server/supervisord.ini /etc/supervisor/conf.d/supervisord.ini +COPY discovery_server/stop-supervisor.sh /etc/supervisor/stop-supervisor.sh +RUN chmod +x /etc/supervisor/stop-supervisor.sh + +# Makes it possible to use a different configuration +ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini +# object stores aren't thread-safe yet +# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 +ENV UWSGI_CHEAPER=0 +ENV UWSGI_PROCESSES=1 +ENV NGINX_MAX_UPLOAD=1M +ENV NGINX_WORKER_PROCESSES=1 +ENV LISTEN_PORT=80 +ENV CLIENT_BODY_BUFFER_SIZE=1M + +# Copy the entrypoint that will generate Nginx additional configs +COPY discovery_server/entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +ENV SETUPTOOLS_SCM_PRETEND_VERSION=1.0.0 + + +COPY ./discovery_server/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY ./sdk /sdk +COPY ./server /app/server +COPY ./discovery_server/app /app + +WORKDIR /app +RUN pip install ../sdk + +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] \ No newline at end of file diff --git a/discovery_server/README.md b/discovery_server/README.md new file mode 100644 index 000000000..5083f48a7 --- /dev/null +++ b/discovery_server/README.md @@ -0,0 +1,48 @@ +# Eclipse BaSyx Python SDK - Discovery Service + +This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Discovery Service**. +It provides basic discovery functionality for AAS IDs and their corresponding assets, as specified in the official [Discovery Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/DiscoveryServiceSpecification/V3.1.0_SSP-001). + +## Overview + +The Discovery Service stores and retrieves relations between AAS identifiers and asset identifiers. It acts as a lookup service for resolving asset-related queries to corresponding AAS. + +## Features + +| Function | Description | Example URL | +|------------------------------------------|----------------------------------------------------------|-----------------------------------------------------------------------| +| **search_all_aas_ids_by_asset_link** | Find AAS identifiers by providing asset link values | `POST http://localhost:8084/api/v3.0/lookup/shellsByAssetLink` | +| **get_all_specific_asset_ids_by_aas_id** | Return specific asset ids associated with an AAS ID | `GET http://localhost:8084/api/v3.0/lookup/shells/{aasIdentifier}` | +| **post_all_asset_links_by_id** | Register specific asset ids linked to an AAS | `POST http://localhost:8084/api/v3.0/lookup/shells/{aasIdentifier}` | +| **delete_all_asset_links_by_id** | Delete all asset links associated with a specific AAS ID | `DELETE http://localhost:8084/api/v3.0/lookup/shells/{aasIdentifier}` | +| + +## Configuration + +The service can be configured to use either: + +- **In-memory storage** (default): Temporary data storage that resets on service restart. +- **MongoDB storage**: Persistent backend storage using MongoDB. + +### Configuration via Environment Variables + +| Variable | Description | Default | +|------------------|--------------------------------------------|-----------------------------| +| `STORAGE_TYPE` | `inmemory` or `mongodb` | `inmemory` | +| `MONGODB_URI` | MongoDB connection URI | `mongodb://localhost:27017` | +| `MONGODB_DBNAME` | Name of the MongoDB database | `basyx_registry` | + +## Deployment via Docker + +A `Dockerfile` and `docker-compose.yml` are provided for simple deployment. +The container image can be built and run via: +```bash +docker compose up --build +``` +## Test + +Examples of asset links and specific asset IDs for testing purposes are provided as JSON files in the [storage](./storage) folder. + +## Acknowledgments + +This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker](https://github.com/tiangolo/uwsgi-nginx-docker) repository. diff --git a/discovery_server/app/main.py b/discovery_server/app/main.py new file mode 100644 index 000000000..0092e6918 --- /dev/null +++ b/discovery_server/app/main.py @@ -0,0 +1,25 @@ +import os +import sys +from server.app.interfaces.discovery import DiscoveryAPI, MongoDiscoveryStore,InMemoryDiscoveryStore + +storage_type = os.getenv("STORAGE_TYPE", "inmemory") +base_path = os.getenv("API_BASE_PATH") + +wsgi_optparams = {} + +if base_path is not None: + wsgi_optparams["base_path"] = base_path + +if storage_type == "inmemory": + application = DiscoveryAPI(InMemoryDiscoveryStore(), **wsgi_optparams) + +elif storage_type == "mongodb": + uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") + dbname = os.getenv("MONGODB_DBNAME", "basyx_registry") + + application = DiscoveryAPI(MongoDiscoveryStore(uri,dbname), **wsgi_optparams) + +else: + print(f"STORAGE_TYPE must be either inmemory or mongodb! Current value: {storage_type}", + file=sys.stderr) + diff --git a/discovery_server/compose.yml b/discovery_server/compose.yml new file mode 100644 index 000000000..56be002e1 --- /dev/null +++ b/discovery_server/compose.yml @@ -0,0 +1,9 @@ +services: + app: + build: + context: .. + dockerfile: discovery_server/Dockerfile + ports: + - "8084:80" + environment: + - STORAGE_TYPE=inmemory diff --git a/discovery_server/entrypoint.sh b/discovery_server/entrypoint.sh new file mode 100644 index 000000000..722394409 --- /dev/null +++ b/discovery_server/entrypoint.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env sh +set -e + +# Get the maximum upload file size for Nginx, default to 0: unlimited +USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} + +# Get the number of workers for Nginx, default to 1 +USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} + +# Set the max number of connections per worker for Nginx, if requested +# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below +NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} + +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +# Get the client_body_buffer_size for Nginx, default to 1M +USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} + +# Create the conf.d directory if it doesn't exist +if [ ! -d /etc/nginx/conf.d ]; then + mkdir -p /etc/nginx/conf.d +fi + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content='user nginx;\n' + # Set the number of worker processes in Nginx + content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" + content=$content'error_log /var/log/nginx/error.log warn;\n' + content=$content'pid /var/run/nginx.pid;\n' + content=$content'events {\n' + content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" + content=$content'}\n' + content=$content'http {\n' + content=$content' include /etc/nginx/mime.types;\n' + content=$content' default_type application/octet-stream;\n' + content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" + content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" + content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" + content=$content' access_log /var/log/nginx/access.log main;\n' + content=$content' sendfile on;\n' + content=$content' keepalive_timeout 65;\n' + content=$content' include /etc/nginx/conf.d/*.conf;\n' + content=$content'}\n' + content=$content'daemon off;\n' + # Set the max number of open file descriptors for Nginx workers, if requested + if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then + content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" + fi + # Save generated /etc/nginx/nginx.conf + printf "$content" > /etc/nginx/nginx.conf + + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server' location / {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf + + # # Generate additional configuration + printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf + printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf + printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf +fi + +exec "$@" diff --git a/discovery_server/requirements.txt b/discovery_server/requirements.txt new file mode 100644 index 000000000..376baed5f --- /dev/null +++ b/discovery_server/requirements.txt @@ -0,0 +1,2 @@ +Werkzeug +pymongo diff --git a/discovery_server/stop-supervisor.sh b/discovery_server/stop-supervisor.sh new file mode 100644 index 000000000..9a953c94b --- /dev/null +++ b/discovery_server/stop-supervisor.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +printf "READY\n" + +while read line; do + echo "Processing Event: $line" >&2 + kill $PPID +done < /dev/stdin diff --git a/discovery_server/storage/AssetIdsFullExample.json b/discovery_server/storage/AssetIdsFullExample.json new file mode 100644 index 000000000..720d106fc --- /dev/null +++ b/discovery_server/storage/AssetIdsFullExample.json @@ -0,0 +1,62 @@ +[ + { + "semanticId": { + "type": "ExternalReference", + "keys": [ + { + "type": "GlobalReference", + "value": "ud800;udbff3udbffUud800Bud800qudbffhudbffTd6^dnTudbff5?Aoudbff36Xud800>udbffUudbff\"Hjeud800Fudbff;udbffC?5q]udbff8aIudbffkp[?sud800kXljub;Gudbffqud8003ud8005udbff[>Z6d_udbffO=hxs R9<_pudbffo" + } + ], + "referredSemanticId": { + "type": "ExternalReference", + "keys": [ + { + "type": "GlobalReference", + "value": "ooOud800pqudbfffud800b:4udbffiudbffudbffd_ud800sJudbffOudbffiB:udbff@pEudbffM;8ud800mS;udbff3ud800q8udbff^udbffmDhFttgudbffrudbffhudbffrEud800e" + } + ] + } + } + ], + "name": "ud800Vud800?ud800tudbff1Ah_ud8003udbffZud800d5WAud800ScMIud800e>", + "value": "udbffBudbffSud800udbffn%ud800kudbffa:Tcfudbff?udbff?ud8005udbffZudbff_ud800iud800qq.@Zud800jmludbffFB<:Wfud800=audbffludbffailudbff?ud800uLudbff7ud800GJqG'ud800kudbffrudbff>>RudbffQudbff=udbffQS]UudbffOZS", + "externalSubjectId": { + "type": "ExternalReference", + "keys": [ + { + "type": "GlobalReference", + "value": "^7<\\agVu_%ud800:pD<-ud800j9udbffkiKCudbffVudbffjudbffDudbffiudbffZsud800WhLG:tQfLP" + } + ], + "referredSemanticId": { + "type": "ExternalReference", + "keys": [ + { + "type": "GlobalReference", + "value": "]Pud800DudbffY[0Y", + "value": "udbffBudbffSud800udbffn%ud800kudbffa:Tcfudbff?udbff?ud8005udbffZudbff_ud800iud800qq.@Zud800jmludbffFB<:Wfud800=audbffludbffailudbff?ud800uLudbff7ud800GJqG'ud800kudbffrudbff>>RudbffQudbff=udbffQS]UudbffOZS" + } +] \ No newline at end of file diff --git a/discovery_server/supervisord.ini b/discovery_server/supervisord.ini new file mode 100644 index 000000000..d73d98014 --- /dev/null +++ b/discovery_server/supervisord.ini @@ -0,0 +1,27 @@ +[supervisord] +nodaemon=true + +[program:uwsgi] +command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[program:nginx] +command=/usr/sbin/nginx +stdout_logfile=/var/log/nginx.out.log +stdout_logfile_maxbytes=0 +stderr_logfile=/var/log/nginx.err.log +stderr_logfile_maxbytes=0 +stopsignal=QUIT +startsecs = 0 +autorestart=false +# may make sense to have autorestart enabled in production + +[eventlistener:quit_on_failure] +events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL +command=/etc/supervisor/stop-supervisor.sh diff --git a/discovery_server/uwsgi.ini b/discovery_server/uwsgi.ini new file mode 100644 index 000000000..9c54ae1cc --- /dev/null +++ b/discovery_server/uwsgi.ini @@ -0,0 +1,9 @@ +[uwsgi] +wsgi-file = /app/main.py +socket = /tmp/uwsgi.sock +chown-socket = nginx:nginx +chmod-socket = 664 +hook-master-start = unix_signal:15 gracefully_kill_them_all +need-app = true +die-on-term = true +show-config = false From 42dd1894c83c74c4b92b4e50b43aac6f9f0d0959 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 15 Jul 2025 13:19:02 +0200 Subject: [PATCH 51/52] Update repository --- .gitignore | 4 +- Discovery Server/Dockerfile | 45 ------------------ Discovery Server/README.md | 63 ------------------------- Discovery Server/app/main.py | 25 ---------- Discovery Server/compose.yml | 7 --- Discovery Server/entrypoint.sh | 71 ----------------------------- Discovery Server/stop-supervisor.sh | 8 ---- Discovery Server/supervisord.ini | 27 ----------- Discovery Server/uwsgi.ini | 9 ---- registry_server/app/__init__.py | 0 test.py | 26 +++++++++++ 11 files changed, 28 insertions(+), 257 deletions(-) delete mode 100644 Discovery Server/Dockerfile delete mode 100644 Discovery Server/README.md delete mode 100644 Discovery Server/app/main.py delete mode 100644 Discovery Server/compose.yml delete mode 100644 Discovery Server/entrypoint.sh delete mode 100644 Discovery Server/stop-supervisor.sh delete mode 100644 Discovery Server/supervisord.ini delete mode 100644 Discovery Server/uwsgi.ini delete mode 100644 registry_server/app/__init__.py create mode 100644 test.py diff --git a/.gitignore b/.gitignore index c78c82036..47f31defc 100644 --- a/.gitignore +++ b/.gitignore @@ -32,5 +32,5 @@ compliance_tool/aas_compliance_tool/version.py # ignore the content of the server storage server/storage/ -test.py/ -Json2RegoRulesConverter.py +test.py +/storage/ diff --git a/Discovery Server/Dockerfile b/Discovery Server/Dockerfile deleted file mode 100644 index 6dc3c4cac..000000000 --- a/Discovery Server/Dockerfile +++ /dev/null @@ -1,45 +0,0 @@ -FROM python:3.11-alpine - -LABEL org.label-schema.name="Eclipse BaSyx" \ - org.label-schema.version="1.0" \ - org.label-schema.description="Docker image for the basyx-python-sdk server application" \ - org.label-schema.maintainer="Eclipse BaSyx" - -ENV PYTHONDONTWRITEBYTECODE=1 -ENV PYTHONUNBUFFERED=1 - -# If we have more dependencies for the server it would make sense -# to refactor uswgi to the pyproject.toml -RUN apk update && \ - apk add --no-cache nginx supervisor gcc musl-dev linux-headers python3-dev git bash && \ - pip install uwsgi && \ - pip install --no-cache-dir git+https://github.com/eclipse-basyx/basyx-python-sdk@main#subdirectory=sdk && \ - apk del git bash - - -COPY uwsgi.ini /etc/uwsgi/ -COPY supervisord.ini /etc/supervisor/conf.d/supervisord.ini -COPY stop-supervisor.sh /etc/supervisor/stop-supervisor.sh -RUN chmod +x /etc/supervisor/stop-supervisor.sh - -# Makes it possible to use a different configuration -ENV UWSGI_INI=/etc/uwsgi/uwsgi.ini -# object stores aren't thread-safe yet -# https://github.com/eclipse-basyx/basyx-python-sdk/issues/205 -ENV UWSGI_CHEAPER=0 -ENV UWSGI_PROCESSES=1 -ENV NGINX_MAX_UPLOAD=1M -ENV NGINX_WORKER_PROCESSES=1 -ENV LISTEN_PORT=80 -ENV CLIENT_BODY_BUFFER_SIZE=1M - -# Copy the entrypoint that will generate Nginx additional configs -COPY entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh - -ENTRYPOINT ["/entrypoint.sh"] - -COPY ./app /app -WORKDIR /app - -CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/Discovery Server/README.md b/Discovery Server/README.md deleted file mode 100644 index a562dbae0..000000000 --- a/Discovery Server/README.md +++ /dev/null @@ -1,63 +0,0 @@ -# Eclipse BaSyx Python SDK - Discovery Service - -This is a Python-based implementation of the **BaSyx Asset Administration Shell (AAS) Discovery Service**. -It provides basic discovery functionality for AAS IDs and their corresponding assets, as specified in the official [Discovery Service Specification v3.1.0_SSP-001](https://app.swaggerhub.com/apis/Plattform_i40/DiscoveryServiceSpecification/V3.1.0_SSP-001). - -## Overview - -The Discovery Service is a core component in the Asset Administration Shell ecosystem. Its main responsibility is to store and retrieve relations between AAS identifiers and asset identifiers. It acts as a lookup service for resolving asset-related queries to corresponding AAS. - -This implementation supports: - -- Adding links between AAS and assets -- Querying AAS by asset links -- Querying asset links by AAS ID -- Removing AAS-related asset links -- Configurable in-memory or MongoDB-based persistent storage - -## Features - -| Feature | Description | -|---------------------------------------------|-------------------------------------------------------| -| `add_asset_links` | Register specific asset ids linked to an AAS | -| `get_all_specific_asset_ids_by_aas_id` | Retrieve specific asset ids associated with an AAS | -| `search_aas_by_asset_link` | Find AAS identifiers by providing asset link values | -| `remove_asset_links_for_aas` | Delete all asset links associated with a specific AAS | - -## Specification Compliance - -- Complies with: **Discovery Service Specification v3.1.0_SSP-001** - -## Configuration - -The service can be configured to use either: - -- **In-memory storage** (default): Temporary data storage that resets on service restart. -- **MongoDB storage**: Persistent backend storage using MongoDB. - -### Configuration via Environment Variables - -| Variable | Description | Default | -|----------------|--------------------------------------------|-------------------------| -| `STORAGE_TYPE` | `inmemory` or `mongodb` | `inmemory` | -| `MONGODB_URI` | MongoDB connection URI | `mongodb://localhost:27017` | -| `MONGODB_DBNAME` | Name of the MongoDB database | `basyx_registry` | - -## Deployment via Docker - -A `Dockerfile` and `docker-compose.yml` are provided for simple deployment. - -## Acknowledgments - -This Dockerfile is inspired by the [tiangolo/uwsgi-nginx-docker][10] repository. - -[1]: https://github.com/eclipse-basyx/basyx-python-sdk/pull/238 -[2]: https://basyx-python-sdk.readthedocs.io/en/latest/backend/local_file.html -[3]: https://github.com/eclipse-basyx/basyx-python-sdk -[4]: https://app.swaggerhub.com/apis/Plattform_i40/AssetAdministrationShellRepositoryServiceSpecification/V3.0.1_SSP-001 -[5]: https://app.swaggerhub.com/apis/Plattform_i40/SubmodelRepositoryServiceSpecification/V3.0.1_SSP-001 -[6]: https://industrialdigitaltwin.org/content-hub/aasspecifications/idta_01002-3-0_application_programming_interfaces -[7]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/aasx.html#adapter-aasx -[8]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/json.html -[9]: https://basyx-python-sdk.readthedocs.io/en/latest/adapter/xml.html -[10]: https://github.com/tiangolo/uwsgi-nginx-docker diff --git a/Discovery Server/app/main.py b/Discovery Server/app/main.py deleted file mode 100644 index 19c97b416..000000000 --- a/Discovery Server/app/main.py +++ /dev/null @@ -1,25 +0,0 @@ -import os -import sys -from basyx.aas.adapter.discovery import DiscoveryAPI, MongoDiscoveryStore,InMemoryDiscoveryStore - -storage_type = os.getenv("STORAGE_TYPE", "inmemory") -base_path = os.getenv("API_BASE_PATH") - -wsgi_optparams = {} - -if base_path is not None: - wsgi_optparams["base_path"] = base_path - -if storage_type == "inmemory": - application = DiscoveryAPI(InMemoryDiscoveryStore(), **wsgi_optparams) - -elif storage_type in "mongodb": - uri = os.getenv("MONGODB_URI", "mongodb://localhost:27017") - dbname = os.getenv("MONGODB_DBNAME", "basyx_registry") - - application = DiscoveryAPI(MongoDiscoveryStore(uri,dbname), **wsgi_optparams) - -else: - print(f"STORAGE_TYPE must be either inmemory or mongodb! Current value: {storage_type}", - file=sys.stderr) - diff --git a/Discovery Server/compose.yml b/Discovery Server/compose.yml deleted file mode 100644 index 6e1d65404..000000000 --- a/Discovery Server/compose.yml +++ /dev/null @@ -1,7 +0,0 @@ -services: - app: - build: . - ports: - - "8084:80" - environment: - STORAGE_TYPE: inmemory diff --git a/Discovery Server/entrypoint.sh b/Discovery Server/entrypoint.sh deleted file mode 100644 index 722394409..000000000 --- a/Discovery Server/entrypoint.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env sh -set -e - -# Get the maximum upload file size for Nginx, default to 0: unlimited -USE_NGINX_MAX_UPLOAD=${NGINX_MAX_UPLOAD:-0} - -# Get the number of workers for Nginx, default to 1 -USE_NGINX_WORKER_PROCESSES=${NGINX_WORKER_PROCESSES:-1} - -# Set the max number of connections per worker for Nginx, if requested -# Cannot exceed worker_rlimit_nofile, see NGINX_WORKER_OPEN_FILES below -NGINX_WORKER_CONNECTIONS=${NGINX_WORKER_CONNECTIONS:-1024} - -# Get the listen port for Nginx, default to 80 -USE_LISTEN_PORT=${LISTEN_PORT:-80} - -# Get the client_body_buffer_size for Nginx, default to 1M -USE_CLIENT_BODY_BUFFER_SIZE=${CLIENT_BODY_BUFFER_SIZE:-1M} - -# Create the conf.d directory if it doesn't exist -if [ ! -d /etc/nginx/conf.d ]; then - mkdir -p /etc/nginx/conf.d -fi - -if [ -f /app/nginx.conf ]; then - cp /app/nginx.conf /etc/nginx/nginx.conf -else - content='user nginx;\n' - # Set the number of worker processes in Nginx - content=$content"worker_processes ${USE_NGINX_WORKER_PROCESSES};\n" - content=$content'error_log /var/log/nginx/error.log warn;\n' - content=$content'pid /var/run/nginx.pid;\n' - content=$content'events {\n' - content=$content" worker_connections ${NGINX_WORKER_CONNECTIONS};\n" - content=$content'}\n' - content=$content'http {\n' - content=$content' include /etc/nginx/mime.types;\n' - content=$content' default_type application/octet-stream;\n' - content=$content' log_format main '"'\$remote_addr - \$remote_user [\$time_local] \"\$request\" '\n" - content=$content' '"'\$status \$body_bytes_sent \"\$http_referer\" '\n" - content=$content' '"'\"\$http_user_agent\" \"\$http_x_forwarded_for\"';\n" - content=$content' access_log /var/log/nginx/access.log main;\n' - content=$content' sendfile on;\n' - content=$content' keepalive_timeout 65;\n' - content=$content' include /etc/nginx/conf.d/*.conf;\n' - content=$content'}\n' - content=$content'daemon off;\n' - # Set the max number of open file descriptors for Nginx workers, if requested - if [ -n "${NGINX_WORKER_OPEN_FILES}" ] ; then - content=$content"worker_rlimit_nofile ${NGINX_WORKER_OPEN_FILES};\n" - fi - # Save generated /etc/nginx/nginx.conf - printf "$content" > /etc/nginx/nginx.conf - - content_server='server {\n' - content_server=$content_server" listen ${USE_LISTEN_PORT};\n" - content_server=$content_server' location / {\n' - content_server=$content_server' include uwsgi_params;\n' - content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' - content_server=$content_server' }\n' - content_server=$content_server'}\n' - # Save generated server /etc/nginx/conf.d/nginx.conf - printf "$content_server" > /etc/nginx/conf.d/nginx.conf - - # # Generate additional configuration - printf "client_max_body_size $USE_NGINX_MAX_UPLOAD;\n" > /etc/nginx/conf.d/upload.conf - printf "client_body_buffer_size $USE_CLIENT_BODY_BUFFER_SIZE;\n" > /etc/nginx/conf.d/body-buffer-size.conf - printf "add_header Access-Control-Allow-Origin *;\n" > /etc/nginx/conf.d/cors-header.conf -fi - -exec "$@" diff --git a/Discovery Server/stop-supervisor.sh b/Discovery Server/stop-supervisor.sh deleted file mode 100644 index 9a953c94b..000000000 --- a/Discovery Server/stop-supervisor.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env sh - -printf "READY\n" - -while read line; do - echo "Processing Event: $line" >&2 - kill $PPID -done < /dev/stdin diff --git a/Discovery Server/supervisord.ini b/Discovery Server/supervisord.ini deleted file mode 100644 index d73d98014..000000000 --- a/Discovery Server/supervisord.ini +++ /dev/null @@ -1,27 +0,0 @@ -[supervisord] -nodaemon=true - -[program:uwsgi] -command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 -startsecs = 0 -autorestart=false -# may make sense to have autorestart enabled in production - -[program:nginx] -command=/usr/sbin/nginx -stdout_logfile=/var/log/nginx.out.log -stdout_logfile_maxbytes=0 -stderr_logfile=/var/log/nginx.err.log -stderr_logfile_maxbytes=0 -stopsignal=QUIT -startsecs = 0 -autorestart=false -# may make sense to have autorestart enabled in production - -[eventlistener:quit_on_failure] -events=PROCESS_STATE_STOPPED,PROCESS_STATE_EXITED,PROCESS_STATE_FATAL -command=/etc/supervisor/stop-supervisor.sh diff --git a/Discovery Server/uwsgi.ini b/Discovery Server/uwsgi.ini deleted file mode 100644 index 9c54ae1cc..000000000 --- a/Discovery Server/uwsgi.ini +++ /dev/null @@ -1,9 +0,0 @@ -[uwsgi] -wsgi-file = /app/main.py -socket = /tmp/uwsgi.sock -chown-socket = nginx:nginx -chmod-socket = 664 -hook-master-start = unix_signal:15 gracefully_kill_them_all -need-app = true -die-on-term = true -show-config = false diff --git a/registry_server/app/__init__.py b/registry_server/app/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/test.py b/test.py new file mode 100644 index 000000000..f66264eff --- /dev/null +++ b/test.py @@ -0,0 +1,26 @@ + +from server.app.interfaces.registry import * +if __name__ == "__main__": + from werkzeug.serving import run_simple + from basyx.aas.examples.data.example_aas import create_full_example + + run_simple("localhost", 8083, RegistryAPI(create_full_example()), + use_debugger=True, use_reloader=True) + +#from server.app.interfaces.discovery import * +#if __name__ == "__main__": +# from werkzeug.serving import run_simple + + # run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), + # use_debugger=True, use_reloader=True) +# from server.app.interfaces.repository import * +# if __name__ == "__main__": +# from werkzeug.serving import run_simple +# from basyx.aas.examples.data.example_aas import create_full_example +# +# run_simple("localhost", 8080, WSGIApp(create_full_example(), aasx.DictSupplementaryFileContainer()), +# use_debugger=True, use_reloader=True) + + + + From d37bc01ebbcdb5d943c53b1d5b9b65ed9fd105f1 Mon Sep 17 00:00:00 2001 From: Ornella33 Date: Tue, 15 Jul 2025 13:22:57 +0200 Subject: [PATCH 52/52] Ignore test.py --- test.py | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100644 test.py diff --git a/test.py b/test.py deleted file mode 100644 index f66264eff..000000000 --- a/test.py +++ /dev/null @@ -1,26 +0,0 @@ - -from server.app.interfaces.registry import * -if __name__ == "__main__": - from werkzeug.serving import run_simple - from basyx.aas.examples.data.example_aas import create_full_example - - run_simple("localhost", 8083, RegistryAPI(create_full_example()), - use_debugger=True, use_reloader=True) - -#from server.app.interfaces.discovery import * -#if __name__ == "__main__": -# from werkzeug.serving import run_simple - - # run_simple("localhost", 8084, DiscoveryAPI(InMemoryDiscoveryStore()), - # use_debugger=True, use_reloader=True) -# from server.app.interfaces.repository import * -# if __name__ == "__main__": -# from werkzeug.serving import run_simple -# from basyx.aas.examples.data.example_aas import create_full_example -# -# run_simple("localhost", 8080, WSGIApp(create_full_example(), aasx.DictSupplementaryFileContainer()), -# use_debugger=True, use_reloader=True) - - - -