diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 957b3c67..4bd938ce 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -19,4 +19,38 @@ ENV LANG ${LOCAL} RUN docker context use default RUN docker buildx use default -WORKDIR /workspace \ No newline at end of file +ARG home=/home/vscode + +USER vscode + +COPY ./ /workspace + +# ZSH +RUN mkdir -p ${home}/.local/share/fonts +RUN curl -fL https://raw.githubusercontent.com/ryanoasis/nerd-fonts/master/patched-fonts/DroidSansMono/DroidSansMNerdFontMono-Regular.otf --output ${home}/.local/share/fonts/DroidSansMNerdFontMono-Regular.otf +RUN git clone https://github.com/tarjoilija/zgen.git "${home}/.zgen" +RUN git clone --depth=1 https://github.com/romkatv/powerlevel10k.git ${ZSH_CUSTOM:-${home}/.oh-my-zsh/custom}/themes/powerlevel10k +RUN cp /workspace/.devcontainer/zshrc ${home}/.zshrc + +# ASDF +RUN git clone https://github.com/asdf-vm/asdf.git ${home}/.asdf +RUN echo '. "$HOME/.asdf/asdf.sh"' >> ${home}/.bashrc +RUN echo '. "$HOME/.asdf/completions/asdf.bash"' >> ${home}/.bashrc +RUN echo '. "$HOME/.asdf/asdf.sh"' >> ${home}/.zshrc +RUN echo '. "$HOME/.asdf/completions/asdf.bash"' >> ${home}/.zshrc +RUN export PATH=~/.asdf/bin:~/.asdf/shims:$PATH + +# CURL SSL DISABLE +RUN sudo update-ca-certificates --fresh +RUN echo "insecure" >> ${home}/.curlrc + +# RUN POETRY_VERSION=$(cat .tool-versions|grep 'poetry' | cut -d " " -f 2) && export PATH=${home}/.asdf/installs/poetry/$POETRY_VERSION/bin:$PATH + +RUN sudo chown -Rf vscode:vscode ${home} +RUN sudo chown -Rf vscode:vscode /workspace + +WORKDIR /workspace + +SHELL ["/bin/bash", "-c"] +RUN source ${home}/.asdf/asdf.sh && make install +RUN sudo rm -rf /workspace diff --git a/.devcontainer/boot.sh b/.devcontainer/boot.sh new file mode 100755 index 00000000..3ec54bba --- /dev/null +++ b/.devcontainer/boot.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -x +export PATH=~/.asdf/bin:~/.asdf/shims:$PATH +export PATH=~/.asdf/installs/poetry/$POETRY_VERSION/bin:$PATH +make configure-poetry +make dev diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 159614d7..555370d4 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,9 +2,9 @@ "name": "MyElectricalData development", "dockerComposeFile": ["docker-compose.yaml"], "shutdownAction": "stopCompose", - "postCreateCommand": "/bin/bash -lc ./.devcontainer/post-install.sh", + // "postCreateCommand": "/bin/zsh -lc ./.devcontainer/post-install.sh", // "postCreateCommand": "make init-devcontainer", - "postStartCommand": "make dev", + "postStartCommand": "/bin/zsh -lc ./.devcontainer/boot.sh", "service": "workspace", "workspaceFolder": "/workspace", "forwardPorts": [], @@ -56,6 +56,7 @@ "git.autofetch": true, "window.title": "${rootName}${dirty}${activeEditorShort}${separator}${separator}${profileName}${separator}${appName}", "http.proxyStrictSSL": false, + "terminal.integrated.defaultProfile.linux": "zsh", // PYTHON "python.testing.pytestEnabled": true, "python.analysis.autoImportCompletions": false, diff --git a/.devcontainer/docker-compose.tools.yaml b/.devcontainer/docker-compose.tools.yaml index 6a70916f..e00f4fb5 100644 --- a/.devcontainer/docker-compose.tools.yaml +++ b/.devcontainer/docker-compose.tools.yaml @@ -21,7 +21,7 @@ services: ports: - '8086:8086' volumes: - - ./../data/influxdb:/var/lib/influxdb2 + - ~/tmp/med/data/influxdb:/var/lib/influxdb2 healthcheck: test: ["CMD", "curl", "-f", "http://influxdb:8086"] interval: 25s @@ -43,7 +43,7 @@ services: volumes: - /etc/localtime:/etc/localtime:ro - /etc/timezone:/etc/timezone:ro - - ./../data/mosquitto:/data + - /tmp/med/data/mosquitto:/data - ./../mosquitto:/mosquitto/config/ ports: - 1883:1883 @@ -56,7 +56,7 @@ services: links: - mosquitto volumes: - - ./../data/:/mqtt-explorer/config + - /tmp/med/data/mqtt-explorer:/mqtt-explorer/config ports: - 4000:4000 @@ -73,7 +73,7 @@ services: ports: - "5432:5432" volumes: - - ./../data/postgresql/data:/var/lib/postgresql/data + - /tmp/med/data/postgresql:/var/lib/postgresql/data - ./../init.sql:/docker-entrypoint-initdb.d/init.sql mysql: @@ -87,7 +87,7 @@ services: ports: - "3306:3306" volumes: - - ./../data/mysql/data:/var/lib/mysql + - /tmp/med/data/data/mysql/data:/var/lib/mysql volumes: mydata: diff --git a/.devcontainer/post-install.sh b/.devcontainer/post-install.sh index 3a8ef1c8..e37e151c 100755 --- a/.devcontainer/post-install.sh +++ b/.devcontainer/post-install.sh @@ -22,4 +22,5 @@ echo "insecure" >> ${HOME}/.curlrc sudo update-ca-certificates --fresh echo "" echo "Install environment" -make install \ No newline at end of file +make install +chmod +x /workspace/.devcontainer/boot.sh \ No newline at end of file diff --git a/.flake8 b/.flake8 index 0219ee6e..04813902 100644 --- a/.flake8 +++ b/.flake8 @@ -40,4 +40,4 @@ exclude = max-complexity = 15 import-order-style = google application-import-names = flake8 -min_python_version = 3.12.2 \ No newline at end of file +min_python_version = 3.12.3 \ No newline at end of file diff --git a/.github/workflows/deploy_prerelease.yaml b/.github/workflows/deploy_prerelease.yaml index 5115182f..81aa0bf8 100644 --- a/.github/workflows/deploy_prerelease.yaml +++ b/.github/workflows/deploy_prerelease.yaml @@ -57,17 +57,17 @@ jobs: IMAGE_REPO: ${{ steps.informations.outputs.IMAGE_REPO }} IMAGE_NAME: ${{ steps.informations.outputs.IMAGE_NAME }} - Discord-Begin: - needs: [informations] - if: ${{ needs.informations.outputs.build == 'true' }} - uses: MyElectricalData/.github/.github/workflows/discord.yaml@main - with: - MESSAGE: '**MyElectricalData** : Version **${{ needs.informations.outputs.version }}** is in building state...' - secrets: - DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }} + # Discord-Begin: + # needs: [informations] + # if: ${{ needs.informations.outputs.build == 'true' }} + # uses: MyElectricalData/.github/.github/workflows/discord.yaml@main + # with: + # MESSAGE: '**MyElectricalData** : Version **${{ needs.informations.outputs.version }}** is in building state...' + # secrets: + # DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }} Build-And-Push-Image: - needs: [informations, Discord-Begin] + needs: [informations] # needs: [informations] if: ${{ needs.informations.outputs.build == 'true' }} uses: MyElectricalData/.github/.github/workflows/dockerhub-build-push.yaml@main @@ -77,6 +77,10 @@ jobs: PUSH: true TAG: ${{ needs.informations.outputs.version }} LATEST: false + BUILD_ARGS: | + BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') + BUILD_REF=$(git rev-parse --short HEAD) + BUILD_VERSION=${{ needs.informations.outputs.version }} secrets: DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/deploy_release.yaml b/.github/workflows/deploy_release.yaml index 389f5ee2..c5a5381b 100644 --- a/.github/workflows/deploy_release.yaml +++ b/.github/workflows/deploy_release.yaml @@ -78,6 +78,10 @@ jobs: PUSH: true TAG: ${{ needs.informations.outputs.version }} LATEST: true + BUILD_ARGS: | + BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') + BUILD_REF=$(git rev-parse --short HEAD) + BUILD_VERSION=${{ needs.informations.outputs.version }} secrets: DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.tool-versions b/.tool-versions index 8e79a078..ff309e46 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,3 +1,3 @@ poetry 1.7.1 -python 3.12.2 +python 3.12.3 pre-commit 3.6.1 \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index 75527ec0..897fd66b 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,15 +1,22 @@ { - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "1.0.0", - "configurations": [ - { - "name": "Python (Integrated Terminal)", - "type": "python", - "request": "launch", - "program": "/app/main.py", - "console": "integratedTerminal" - } - ] -} \ No newline at end of file + // Utilisez IntelliSense pour en savoir plus sur les attributs possibles. + // Pointez pour afficher la description des attributs existants. + // Pour plus d'informations, visitez : https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Débogueur Python : Fichier actuel", + "env": { + "PYTHONPATH": "${workspaceFolder}/src", + "DEV": "true", + "DEBUG": "true" + }, + "type": "debugpy", + "request": "launch", + "program": "${workspaceFolder}/src/main.py", + "console": "integratedTerminal", + "envFile": "${workspaceFolder}/.env" + // "preLaunchTask": "select-environment-mock" + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json index f24da4c8..ed2eebef 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,26 +1,34 @@ { - "sqltools.connections": [ - { - "previewLimit": 50, - "server": "127.0.0.1", - "port": 5432, - "driver": "PostgreSQL", - "name": "MED Import", - "group": "MED", - "database": "myelectricaldata", - "username": "myelectricaldata", - "password": "myelectricaldata" - } - ], - "github-actions.workflows.pinned.workflows": [], - "GitHooks.hooksDirectory": "/home/cvalentin/git/myelectricaldata/myelectricaldata_import/.git/hooks", - "files.exclude": { - "**/.git": true, - "**/.svn": true, - "**/.hg": true, - "**/CVS": true, - "**/.DS_Store": true, - "**/Thumbs.db": true - }, - "hide-files.files": [] + "sqltools.connections": [ + { + "previewLimit": 50, + "server": "127.0.0.1", + "port": 5432, + "driver": "PostgreSQL", + "name": "MED Import", + "group": "MED", + "database": "myelectricaldata", + "username": "myelectricaldata", + "password": "myelectricaldata" + } + ], + "github-actions.workflows.pinned.workflows": [], + "GitHooks.hooksDirectory": "/home/cvalentin/git/myelectricaldata/myelectricaldata_import/.git/hooks", + "files.exclude": { + "**/.git": true, + "**/.svn": true, + "**/.hg": true, + "**/CVS": true, + "**/.DS_Store": true, + "**/Thumbs.db": true, + ".venv": true, + ".pytest_cache": true + }, + "hide-files.files": [ + ".venv", + ".pytest_cache" + ], + "yaml.schemas": { + "https://json.schemastore.org/yamllint.json": "file:///home/cvalentin/git/myelectricaldata/myelectricaldata_import/data/config.yaml" + } } diff --git a/CHANGELOG.md b/CHANGELOG.md index 6fd0547a..6ac368e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,10 +2,289 @@ +## v1.0.0-rc.14 (2024-08-02) + +### Feature + +* feat: migrate to lifespan, clean ([`23ab2f1`](https://github.com/MyElectricalData/myelectricaldata_import/commit/23ab2f1aa2925030fd5244739c075331705aedb9)) + +### Fix + +* fix: remove "disable import" when debug is enable ([`681421f`](https://github.com/MyElectricalData/myelectricaldata_import/commit/681421facfede2976f2ddb947399d54157119a90)) + +### Unknown + +* Merge pull request #568 from MyElectricalData/feat/build-args + +Feat/build args ([`98e9962`](https://github.com/MyElectricalData/myelectricaldata_import/commit/98e99627b3ff8d9fb2dcd79832ae4ffeaa9e1801)) + + +## v1.0.0-rc.13 (2024-08-01) + +### Fix + +* fix: opentel trash + bug ([`8183fa2`](https://github.com/MyElectricalData/myelectricaldata_import/commit/8183fa241a8a17279e57fd8a6f098cfc3e617216)) + +### Unknown + +* Merge pull request #566 from MyElectricalData/feat/build-args + +fix: opentel trash + bug ([`dee45cd`](https://github.com/MyElectricalData/myelectricaldata_import/commit/dee45cd17e292c556cd30a5ea0ffb6051496b543)) + + +## v1.0.0-rc.12 (2024-07-31) + +### Fix + +* fix: med plan upper ([`9748bc7`](https://github.com/MyElectricalData/myelectricaldata_import/commit/9748bc7d7db62b540181d3e053c88d07943e9216)) + +### Unknown + +* Merge pull request #565 from MyElectricalData/feat/build-args + +fix: med plan upper ([`dc35599`](https://github.com/MyElectricalData/myelectricaldata_import/commit/dc35599665f807a3782fd9dd45f2b608cc302b1c)) + + +## v1.0.0-rc.11 (2024-07-31) + +### Fix + +* fix: influxdb method lower ([`2848e9b`](https://github.com/MyElectricalData/myelectricaldata_import/commit/2848e9bfb77e900a63e1581af7a97b2b83cd812a)) + +### Unknown + +* Merge pull request #564 from MyElectricalData/feat/build-args + +fix: influxdb method lower ([`8e1ed22`](https://github.com/MyElectricalData/myelectricaldata_import/commit/8e1ed22ab1699eac40171ca7bc999ad2884fb355)) + + +## v1.0.0-rc.10 (2024-07-31) + +### Feature + +* feat: rework ([`0afc73a`](https://github.com/MyElectricalData/myelectricaldata_import/commit/0afc73acb849ef13afd9e5b1f3801f8ba81886ed)) + +### Unknown + +* Merge pull request #563 from MyElectricalData/feat/build-args + +feat: rework ([`755edf5`](https://github.com/MyElectricalData/myelectricaldata_import/commit/755edf54f1ea925d2366549697c7a2eb4ed4ce9c)) + +* Merge pull request #562 from MyElectricalData/feat/build-args + +update setuptools ([`779cdd0`](https://github.com/MyElectricalData/myelectricaldata_import/commit/779cdd03b1cf34e33713a89d1c1ee737390e2507)) + +* update setuptools ([`42bfd46`](https://github.com/MyElectricalData/myelectricaldata_import/commit/42bfd46f09c7bf0b84693647439f9a1567035d6d)) + + +## v1.0.0-rc.9 (2024-07-29) + +### Feature + +* feat: generate release ([`0e9eab3`](https://github.com/MyElectricalData/myelectricaldata_import/commit/0e9eab38d3f3d763101e346b5a6c8e5b0c726c4c)) + +### Unknown + +* Merge pull request #561 from MyElectricalData/feat/build-args + +feat: generate release ([`a2f0bfd`](https://github.com/MyElectricalData/myelectricaldata_import/commit/a2f0bfd3bdae93ca7936466107198863b727a264)) + +* Merge pull request #560 from MyElectricalData/feat/build-args + +feat fix dep ([`d643dfe`](https://github.com/MyElectricalData/myelectricaldata_import/commit/d643dfed7cf3834cb9bf06f350ed25af24e3f37c)) + +* feat fix dep ([`0cf2579`](https://github.com/MyElectricalData/myelectricaldata_import/commit/0cf2579cc158ac24cf4aee54e86c7b961256683d)) + +* Merge pull request #559 from MyElectricalData/feat/build-args + +Fix rust uninstall ([`9f0e44d`](https://github.com/MyElectricalData/myelectricaldata_import/commit/9f0e44dab40d617b6dd79c92cdc79fb1db346ad5)) + +* Fix rust uninstall ([`2270051`](https://github.com/MyElectricalData/myelectricaldata_import/commit/227005129c09c148670ee14c53c70923f3d2b16a)) + + +## v1.0.0-rc.8 (2024-07-29) + +### Feature + +* feat: rework all config ([`239c4f1`](https://github.com/MyElectricalData/myelectricaldata_import/commit/239c4f1b01098d84961e8c59216cdf5be0ba5245)) + +* feat: rework config ([`a2a1308`](https://github.com/MyElectricalData/myelectricaldata_import/commit/a2a1308e7447e6440ae637290ce246ef0d6425b0)) + +### Fix + +* fix: build armv7 ([`4ad4860`](https://github.com/MyElectricalData/myelectricaldata_import/commit/4ad48601948a7cf0ed693cbea68ef63dad8935e5)) + +### Unknown + +* Merge pull request #558 from MyElectricalData/feat/build-args + +Feat/build args ([`299af44`](https://github.com/MyElectricalData/myelectricaldata_import/commit/299af4407e68bcdacbdc160f031e692d5b2c5ea7)) + + +## v1.0.0-rc.7 (2024-06-03) + +### Fix + +* fix: downgrade orjson ([`ba3d0d1`](https://github.com/MyElectricalData/myelectricaldata_import/commit/ba3d0d135721ae88766ab14d63915883d738b394)) + +### Unknown + +* Merge pull request #549 from MyElectricalData/feat/build-args + +fix: downgrade orjson ([`2f71601`](https://github.com/MyElectricalData/myelectricaldata_import/commit/2f716015c494b813987e38b30f5308e90cc3014a)) + + +## v1.0.0-rc.6 (2024-06-03) + +### Fix + +* fix: requirements ([`8b8cc78`](https://github.com/MyElectricalData/myelectricaldata_import/commit/8b8cc7802ae7c8a4c515af09de07868f9a92eec0)) + +### Unknown + +* Merge pull request #548 from MyElectricalData/feat/build-args + +fix: requirements ([`d7866df`](https://github.com/MyElectricalData/myelectricaldata_import/commit/d7866dfd710a45b8f81afc9d04275f1ecaa12702)) + + +## v1.0.0-rc.5 (2024-06-03) + +### Fix + +* fix: build arg + downgrade python to 3.11.9 (armv7 bug) ([`026ac3d`](https://github.com/MyElectricalData/myelectricaldata_import/commit/026ac3dffcb91304c42e3553f8f6aade5ced8728)) + +### Unknown + +* Merge pull request #547 from MyElectricalData/feat/build-args + +fix: build arg + downgrade python to 3.11.9 (armv7 bug) ([`de3b6dc`](https://github.com/MyElectricalData/myelectricaldata_import/commit/de3b6dc82420576ee0eb1a75e2e170d5e80ef890)) + + +## v1.0.0-rc.4 (2024-06-03) + +### Feature + +* feat: add build arg on staging ([`91d3e99`](https://github.com/MyElectricalData/myelectricaldata_import/commit/91d3e99b180c9914e5db50370a707b2e127a8a6b)) + +### Unknown + +* Merge pull request #546 from MyElectricalData/feat/build-args + +feat: add build arg on staging ([`98b4ab7`](https://github.com/MyElectricalData/myelectricaldata_import/commit/98b4ab7c71b4278f841713920b5bc37527367bad)) + + +## v1.0.0-rc.3 (2024-06-03) + +### Feature + +* feat: add build arg ([`779be47`](https://github.com/MyElectricalData/myelectricaldata_import/commit/779be474452004b385ebb7da7a82c6d213723ce4)) + +### Fix + +* fix: lock file ([`00fcfff`](https://github.com/MyElectricalData/myelectricaldata_import/commit/00fcfffdfcb2be522b83d39d9ac72ea4ba5cdab9)) + +### Unknown + +* Merge pull request #545 from MyElectricalData/feat/build-args + +Feat/build args ([`b4ddc19`](https://github.com/MyElectricalData/myelectricaldata_import/commit/b4ddc19aad94672f8b0eee12aac55f46d46c1155)) + +* merge: resync 0.13 ([`f73ca88`](https://github.com/MyElectricalData/myelectricaldata_import/commit/f73ca88751838fcdc44ce84b9d54d5cf990d4c0e)) + + +## v0.13.2 (2024-05-29) + +### Chore + +* chore: clean ([`a6c18d1`](https://github.com/MyElectricalData/myelectricaldata_import/commit/a6c18d18d658320e6da4b12ce611eea497b31332)) + +### Fix + +* fix: error ([`055823a`](https://github.com/MyElectricalData/myelectricaldata_import/commit/055823a19f7c37afbb9b91b97f09b16bf29179e2)) + +* fix: daily cost ([`a3ddb89`](https://github.com/MyElectricalData/myelectricaldata_import/commit/a3ddb894ca82f28dd37a56585e03357ac64995f0)) + +* fix: daily cost ([`190e1df`](https://github.com/MyElectricalData/myelectricaldata_import/commit/190e1df7e1b24f438a7bb47d3cd32244258242c3)) + + +## v0.13.1 (2024-05-28) + +### Chore + +* chore: upgrade version ([`0fa8fe2`](https://github.com/MyElectricalData/myelectricaldata_import/commit/0fa8fe29fe30e036b4229e9751c463cdebec61fe)) + + +## v0.13.1-rc.1 (2024-05-27) + +### Fix + +* fix: home assistant ws export ([`c31dfb4`](https://github.com/MyElectricalData/myelectricaldata_import/commit/c31dfb4ec66b0fb0d78b3cdf034f33875900d1d9)) + + +## v1.0.0-rc.2 (2024-05-22) + +### Fix + +* fix: update requirement ([`aa839fc`](https://github.com/MyElectricalData/myelectricaldata_import/commit/aa839fc14ce8139f40d81b6c6488b77aaeb4c102)) + +### Unknown + +* Merge pull request #535 from MyElectricalData/feat/rework-database + +fix: update requirement ([`cf56680`](https://github.com/MyElectricalData/myelectricaldata_import/commit/cf56680c26ada1c1edb30d22e126dc0a2b9c0898)) + + +## v1.0.0-rc.1 (2024-05-22) + +### Breaking + +* feat: rework database + +BREAKING CHANGE: Release 1.0.0 with database rework + +- Upgrade FastAPI to latest (0.111.0) ([`38a77c9`](https://github.com/MyElectricalData/myelectricaldata_import/commit/38a77c9e8fce2ce96db222bf43c09bec23a3e76b)) + +### Chore + +* chore: tnr ([`cbb3352`](https://github.com/MyElectricalData/myelectricaldata_import/commit/cbb3352bf93c27e38f83f9ba0358febba9b85730)) + +* chore: vsconfig ([`a115719`](https://github.com/MyElectricalData/myelectricaldata_import/commit/a1157195d4aa3c9b968503e547a9284fa142b499)) + +* chore: add devcontainer ([`b7a856a`](https://github.com/MyElectricalData/myelectricaldata_import/commit/b7a856ac735070bf2fd11e29f58f77457d14ee8d)) + +### Refactor + +* refactor: rework database ([`ae03761`](https://github.com/MyElectricalData/myelectricaldata_import/commit/ae037614902d2917c7c61795a8a08e0002420889)) + +* refactor: rework database ([`7ac2a64`](https://github.com/MyElectricalData/myelectricaldata_import/commit/7ac2a649449ec977545026b9dc1da265996f5bd8)) + +### Unknown + +* Merge pull request #534 from MyElectricalData/feat/rework-database + +feat: rework database ([`2b9204d`](https://github.com/MyElectricalData/myelectricaldata_import/commit/2b9204db8ecbd40970183eff9b8ffafb5c1d2a52)) + +* Merge pull request #533 from MyElectricalData/feat/rework-database + +BREAKING CHANGE: database rework ([`bb95b75`](https://github.com/MyElectricalData/myelectricaldata_import/commit/bb95b75a979aa458ae0b72f03bea92da44edc256)) + +* BREAKING CHANGE: database rework ([`ba40034`](https://github.com/MyElectricalData/myelectricaldata_import/commit/ba40034dcf908bffbba7c01df36ac30093bb9320)) + +* Merge pull request #532 from MyElectricalData/feat/rework-database + +Feat/rework database ([`e2a5c7b`](https://github.com/MyElectricalData/myelectricaldata_import/commit/e2a5c7b4d1be888c25129033d8c26e38a10df2da)) + +* Merge pull request #523 from hotfix31/fix-export-influxdb-production-only + +fix export to influxdb for production ([`d9639bf`](https://github.com/MyElectricalData/myelectricaldata_import/commit/d9639bfaaf8b1fef9b2b74422d4d3c885574bf79)) + + ## v0.13.0 (2024-05-22) ### Unknown +* fix export to influxdb for production ([`2959b05`](https://github.com/MyElectricalData/myelectricaldata_import/commit/2959b0576f3a605d6f5f8b727abbf425b0679f9f)) + * Merge pull request #517 from FabienPennequin/feature/homeassistant_ws_batch_size Send data to Home Assistant WS per batches ([`353c44f`](https://github.com/MyElectricalData/myelectricaldata_import/commit/353c44f0f48798499a288562f494eb78d2014947)) diff --git a/Dockerfile b/Dockerfile index 4c1450cf..06c494d5 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,11 @@ -FROM python:3.12.2-slim +FROM python:3.12.3-slim + +ARG TARGETPLATFORM +ENV TARGETPLATFORM=$TARGETPLATFORM + +ENV LANG fr_FR.UTF-8 +ENV LC_ALL fr_FR.UTF-8 +ENV TZ=Europe/Paris RUN apt-get update && \ apt-get install -y \ @@ -6,24 +13,60 @@ RUN apt-get update && \ git \ g++ \ gcc \ - libpq-dev -RUN sed -i -e 's/# fr_FR.UTF-8 UTF-8/fr_FR.UTF-8 UTF-8/' /etc/locale.gen + libpq-dev \ + curl + RUN sed -i -e 's/# fr_FR.UTF-8 UTF-8/fr_FR.UTF-8 UTF-8/' /etc/locale.gen RUN dpkg-reconfigure --frontend=noninteractive locales -RUN rm -rf /var/lib/apt/lists/* - -ENV LANG fr_FR.UTF-8 -ENV LC_ALL fr_FR.UTF-8 -ENV TZ=Europe/Paris RUN pip install --upgrade pip pip-tools setuptools +# INSTALL RUST FOR ARMv7 and orjson lib +RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \ + apt install -y curl git build-essential libc6-armhf-cross libc6-dev-armhf-cross gcc-arm-linux-gnueabihf libdbus-1-dev libdbus-1-dev:armhf && \ + curl -k -o rust-install.tar.gz https://static.rust-lang.org/dist/rust-1.78.0-armv7-unknown-linux-gnueabihf.tar.xz && \ + tar -xvf rust-install.tar.gz && \ + chmod +x rust-1.78.0-armv7-unknown-linux-gnueabihf/install.sh && \ + ./rust-1.78.0-armv7-unknown-linux-gnueabihf/install.sh; \ + elif [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then \ + apt install -y curl git build-essential libc6-armel-cross libc6-dev-armel-cross gcc-arm-linux-gnueabi libdbus-1-dev libdbus-1-dev:armel && \ + curl -k -o rust-install.tar.gz https://static.rust-lang.org/dist/rust-1.78.0-arm-unknown-linux-gnueabi.tar.xz && \ + tar -xvf rust-install.tar.gz && \ + chmod +x rust-1.78.0-arm-unknown-linux-gnueabi/install.sh && \ + ./rust-1.78.0-arm-unknown-linux-gnueabi/install.sh; \ + fi + COPY ./src /app RUN pip install -r /app/requirements.txt +# REMOVE RUST +RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ] || [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then \ + /usr/local/lib/rustlib/uninstall.sh; \ + fi + RUN mkdir /data RUN mkdir /log RUN apt-get clean +ARG BUILD_DATE +ARG BUILD_REF +ARG BUILD_VERSION +LABEL \ + maintainer="m4dm4rtig4n (https://github.com/alexbelgium)" \ + org.opencontainers.image.title="MyElectricalData official client" \ + org.opencontainers.image.description="Client to import data from MyElectricalData gateway." \ + org.opencontainers.image.authors="m4dm4rtig4n (https://github.com/m4dm4rtig4n)" \ + org.opencontainers.image.licenses="Apache License 2.0" \ + org.opencontainers.image.url="https://github.com/m4dm4rtig4n" \ + org.opencontainers.image.source="https://github.com/MyElectricalData/myelectricaldata_import" \ + org.opencontainers.image.documentation="https://github.com/MyElectricalData/myelectricaldata_import/blob/main/README.md" \ + org.opencontainers.image.created=${BUILD_DATE} \ + org.opencontainers.image.revision=${BUILD_REF} \ + org.opencontainers.image.version=${BUILD_VERSION} + +# CLEAN +RUN rm -rf /var/lib/apt/lists/* +RUN apt remove -y git libpq-dev gcc g++ + CMD ["python", "-u", "/app/main.py"] diff --git a/Makefile b/Makefile index 4f9d5cba..390f4631 100755 --- a/Makefile +++ b/Makefile @@ -110,7 +110,7 @@ run: init install-poetry disable-debug disable-dev @$(call poetry, --ansi python src/main.py, "Run main.py") ## Run in dev mode -dev: init install-poetry enable-debug enable-dev +dev: init install-poetry enable-dev @$(call poetry, --ansi python src/main.py, "Run main.py") ## Enable debug mode @@ -146,6 +146,8 @@ python-clean: ## TESTS ###################################### ## Run PyTest +test: pytest +tests: pytest pytest: init if [ ! $$? -ne 0 ]; then \ $(call poetry, tox -e pytest, "Run PyTest"); \ @@ -205,6 +207,11 @@ build: generate-dependencies @$(call title,"Build image in local") docker build ./ +reload-dev-container: + set -x + DOCKER_ID=$(shell docker ps | grep myelectricaldata| grep workspace | awk '{print $$NF}') + docker restart $$DOCKER_ID + ###################################### ## MAKEFILE FUNCTION ###################################### @@ -242,4 +249,11 @@ else: with open(f".env", 'w') as file: file.write("\n".join(env)) endef -export set_env \ No newline at end of file +export set_env + +###################################### +## OPENTRACING DEV TOOLS +otel-collector: jaeger +jaeger: ## ▶ Run Jaeger (opentrace collector & UI) in local. + docker-compose -f toolbox/tools/jaeger.yaml up -d + @$(call title, "Jaeger is running on http://localhost:16686") diff --git a/config.example.yaml b/config.example.yaml new file mode 100755 index 00000000..9baf9de1 --- /dev/null +++ b/config.example.yaml @@ -0,0 +1,97 @@ +backend: + uri: sqlite:////data/myelectricaldata.db +gateway: + url: myelectricaldata.fr + ssl: true +home_assistant: + enable: false + discovery_prefix: homeassistant +home_assistant_ws: + enable: false + ssl: false + token: '' + url: ws://localhost:8123 + purge: false + batch_size: 1000 + max_date: +influxdb: + enable: false + scheme: http + hostname: localhost + port: 8086 + token: my-token + org: myorg + bucket: mybucket + method: synchronous + timezone: UTC + wipe: false + batching_options: + batch_size: 1000 + flush_interval: 1000 + jitter_interval: 0 + retry_interval: 5000 + max_retry_time: '180_000' + max_retries: 5 + max_retry_delay: '125_000' + exponential_base: 2 +logging: + log_format: '%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s' + log_format_date: '%Y-%m-%d %H:%M:%S' + log2file: false + log_level: 20 + debug: false + log_http: false +mqtt: + enable: false + hostname: localhost + port: 1883 + username: '' + password: '' + prefix: myelectricaldata + client_id: myelectricaldata + retain: true + qos: 0 + cert: false +myelectricaldata: + MON_POINT_DE_LIVRAISON: + enable: true + name: MON_POINT_DE_LIVRAISON + token: VOTRE_TOKEN_MYELECTRICALDATA + cache: true + plan: BASE + consumption: true + consumption_detail: true + consumption_max_power: true + consumption_price_hc: 0 + consumption_price_hp: 0 + consumption_price_base: 0 + consumption_max_date: '' + consumption_detail_max_date: '' + production: false + production_detail: false + production_max_date: '' + production_detail_max_date: '' + production_price: 0 + offpeak_hours_0: '' + offpeak_hours_1: '' + offpeak_hours_2: '' + offpeak_hours_3: '' + offpeak_hours_4: '' + offpeak_hours_5: '' + offpeak_hours_6: '' + refresh_addresse: false + refresh_contract: false +opentelemetry: + enable: false + service_name: myelectricaldata + endpoint: http://localhost:4317 + environment: production + extension: + - sqlalchemy + - fastapi +server: # Configuration du serveur web. + cidr: 0.0.0.0 + port: 5000 + certfile: '' + keyfile: '' + cycle: 14400 diff --git a/config.exemple.yaml b/config.exemple.yaml deleted file mode 100755 index 7d704030..00000000 --- a/config.exemple.yaml +++ /dev/null @@ -1,117 +0,0 @@ -port: 5000 -debug: true -log2file: false -wipe_influxdb: false # Work only with influxdb > 2.X -# Par défaut le backend en local dans le /data/cache.db du conteneur. -# Mais il est possible de basculer sur une base de données externe de type SQLite ou PostgreSQL -# Exemple pour Postgres : -#storage_uri: postgresql://myelectricaldata:myelectricaldata@postgres:5432/myelectricaldata -home_assistant: # WITH MQTT DISCOVERY - enable: true - discovery: true - discovery_prefix: homeassistant -home_assistant_ws: # FOR ENERGY TAB - enable: false - ssl: true - token: HOME_ASSISTANT_TOKEN_GENERATE_IN_PROFILE_TABS_(BOTTOM) - url: myhomeassistant.domain.fr - max_date: "2021-06-01" - purge: false -ssl: - gateway: true - certfile: "" - keyfile: "" -influxdb: - enable: false - scheme: http - hostname: influxdb - port: 8086 - token: myelectricaldata - org: myelectricaldata - bucket: myelectricaldata - # ATTENTION, L'activation de l'importation asynchrone va réduire fortement le temps d'importation dans InfluxDB - # mais va augmenter la consommation mémoire & CPU et donc à activer uniquement sur un hardware robuste. - method: synchronous # Mode disponible : synchronous / asynchronous / batching - # batching_options permet uniquement de configurer la methode `batching`. - # Pour plus d'information : https://github.com/influxdata/influxdb-client-python#batching - batching_options: - batch_size: 1000 - flush_interval: 1000 - jitter_interval: 0 - retry_interval: 5000 - max_retry_time: 180_000 - max_retries: 5 - max_retry_delay: 125_000 - exponential_base: 2 -mqtt: - enable: false - hostname: mosquitto - port: 1883 - username: null - password: null - prefix: myelectricaldata - client_id: myelectricaldata # DOIT ETRE UNIQUE SUR LA TOTALITE DES CLIENTS CONNECTE AU SERVEUR MQTT - retain: true - qos: 0 -# ca_cert: /certs/ca.pem # Certificate Authority a utiliser pour etablir une connection SSL au server MQTT -# Configuration SSL optionnel. -#ssl: -# keyfile: "/data/key.pem" -# certfile: "/data/cert.pem" -myelectricaldata: - # Configuration de mon point de livraison (ne pas oublier d'adapter MON_PDL_1 avec votre numéro de PDL) - "MON_PDL_1": - enable: 'true' - token: TOKEN_DE_MON_PDL_1 - name: "Maison" - addresses: 'true' - cache: 'true' - consumption: 'true' - consumption_detail: 'true' - consumption_price_base: '0.145907' - consumption_price_hc: '0.124364' - consumption_price_hp: '0.164915' - consumption_max_date: "2021-06-01" - consumption_detail_max_date: "2021-06-01" - offpeak_hours_0: 22H00-6H00 # LUNDI - offpeak_hours_1: 22H00-6H00 # MARDI - offpeak_hours_2: 22H00-6H00 # MERCREDI - offpeak_hours_3: 22H00-6H00 # JEUDI - offpeak_hours_4: 22H00-6H00 # VENDREDI - offpeak_hours_5: 22H00-6H00;12H00-14H00 # SAMEDI - offpeak_hours_6: 22H00-6H00;12H00-14H00 # DIMANCHE - plan: HC/HP - production: 'false' - production_detail: 'false' - production_price: '0.0' - production_max_date: "2021-06-01" - production_detail_max_date: "2021-06-01" - refresh_addresse: 'false' - refresh_contract: 'false' -# "MON_PDL_2_AVEC_PRODUCTION": -# enable: 'true' -# token: TOKEN_DE_MON_PDL_2 -# name: "Maison de vacances" -# cache: 'true' -# consumption: 'true' -# consumption_detail: 'true' -# consumption_price_base: '0.175' -# consumption_price_hc: '0.175' -# consumption_price_hp: '0.175' -# consumption_max_date: "2021-06-01" -# consumption_detail_max_date: "2021-06-01" -# offpeak_hours_0: '' -# offpeak_hours_1: '' -# offpeak_hours_2: '' -# offpeak_hours_3: '' -# offpeak_hours_4: '' -# offpeak_hours_5: '' -# offpeak_hours_6: '' -# plan: BASE -# production: 'true' -# production_detail: 'true' -# production_price: '0.10' -# production_max_date: "2021-06-01" -# production_detail_max_date: "2021-06-01" -# refresh_addresse: 'false' -# refresh_contract: 'false' diff --git a/poetry.lock b/poetry.lock index 8792a565..726111cf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,91 +1,103 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "aiohappyeyeballs" +version = "2.3.4" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "aiohappyeyeballs-2.3.4-py3-none-any.whl", hash = "sha256:40a16ceffcf1fc9e142fd488123b2e218abc4188cf12ac20c67200e1579baa42"}, + {file = "aiohappyeyeballs-2.3.4.tar.gz", hash = "sha256:7e1ae8399c320a8adec76f6c919ed5ceae6edd4c3672f4d9eae2b27e37c80ff6"}, +] + [[package]] name = "aiohttp" -version = "3.9.5" +version = "3.10.0" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, + {file = "aiohttp-3.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:68ab608118e212f56feef44d4785aa90b713042da301f26338f36497b481cd79"}, + {file = "aiohttp-3.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:64a117c16273ca9f18670f33fc7fd9604b9f46ddb453ce948262889a6be72868"}, + {file = "aiohttp-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:54076a25f32305e585a3abae1f0ad10646bec539e0e5ebcc62b54ee4982ec29f"}, + {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71c76685773444d90ae83874433505ed800e1706c391fdf9e57cc7857611e2f4"}, + {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdda86ab376f9b3095a1079a16fbe44acb9ddde349634f1c9909d13631ff3bcf"}, + {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6dcd1d21da5ae1416f69aa03e883a51e84b6c803b8618cbab341ac89a85b9e"}, + {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ef0135d7ab7fb0284342fbbf8e8ddf73b7fee8ecc55f5c3a3d0a6b765e6d8b"}, + {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccab9381f38c669bb9254d848f3b41a3284193b3e274a34687822f98412097e9"}, + {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:947da3aee057010bc750b7b4bb65cbd01b0bdb7c4e1cf278489a1d4a1e9596b3"}, + {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5268b35fee7eb754fb5b3d0f16a84a2e9ed21306f5377f3818596214ad2d7714"}, + {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ff25d988fd6ce433b5c393094a5ca50df568bdccf90a8b340900e24e0d5fb45c"}, + {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:594b4b4f1dfe8378b4a0342576dc87a930c960641159f5ae83843834016dbd59"}, + {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c8820dad615cd2f296ed3fdea8402b12663ac9e5ea2aafc90ef5141eb10b50b8"}, + {file = "aiohttp-3.10.0-cp310-cp310-win32.whl", hash = "sha256:ab1d870403817c9a0486ca56ccbc0ebaf85d992277d48777faa5a95e40e5bcca"}, + {file = "aiohttp-3.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:563705a94ea3af43467167f3a21c665f3b847b2a0ae5544fa9e18df686a660da"}, + {file = "aiohttp-3.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13679e11937d3f37600860de1f848e2e062e2b396d3aa79b38c89f9c8ab7e791"}, + {file = "aiohttp-3.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c66a1aadafbc0bd7d648cb7fcb3860ec9beb1b436ce3357036a4d9284fcef9a"}, + {file = "aiohttp-3.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7e3545b06aae925f90f06402e05cfb9c62c6409ce57041932163b09c48daad6"}, + {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:effafe5144aa32f0388e8f99b1b2692cf094ea2f6b7ceca384b54338b77b1f50"}, + {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a04f2c8d41821a2507b49b2694c40495a295b013afb0cc7355b337980b47c546"}, + {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dbfac556219d884d50edc6e1952a93545c2786193f00f5521ec0d9d464040ab"}, + {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a65472256c5232681968deeea3cd5453aa091c44e8db09f22f1a1491d422c2d9"}, + {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941366a554e566efdd3f042e17a9e461a36202469e5fd2aee66fe3efe6412aef"}, + {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:927b4aca6340301e7d8bb05278d0b6585b8633ea852b7022d604a5df920486bf"}, + {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:34adb8412e736a5d0df6d1fccdf71599dfb07a63add241a94a189b6364e997f1"}, + {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:43c60d9b332a01ee985f080f639f3e56abcfb95ec1320013c94083c3b6a2e143"}, + {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3f49edf7c5cd2987634116e1b6a0ee2438fca17f7c4ee480ff41decb76cf6158"}, + {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9784246431eaf9d651b3cc06f9c64f9a9f57299f4971c5ea778fa0b81074ef13"}, + {file = "aiohttp-3.10.0-cp311-cp311-win32.whl", hash = "sha256:bec91402df78b897a47b66b9c071f48051cea68d853d8bc1d4404896c6de41ae"}, + {file = "aiohttp-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:25a9924343bf91b0c5082cae32cfc5a1f8787ac0433966319ec07b0ed4570722"}, + {file = "aiohttp-3.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:21dab4a704c68dc7bc2a1219a4027158e8968e2079f1444eda2ba88bc9f2895f"}, + {file = "aiohttp-3.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:872c0dcaccebd5733d535868fe2356aa6939f5827dcea7a8b9355bb2eff6f56e"}, + {file = "aiohttp-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f381424dbce313bb5a666a215e7a9dcebbc533e9a2c467a1f0c95279d24d1fa7"}, + {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca48e9f092a417c6669ee8d3a19d40b3c66dde1a2ae0d57e66c34812819b671"}, + {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbe2f6d0466f5c59c7258e0745c20d74806a1385fbb7963e5bbe2309a11cc69b"}, + {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03799a95402a7ed62671c4465e1eae51d749d5439dbc49edb6eee52ea165c50b"}, + {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5549c71c35b5f057a4eebcc538c41299826f7813f28880722b60e41c861a57ec"}, + {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6fa7a42b78d8698491dc4ad388169de54cca551aa9900f750547372de396277"}, + {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:77bbf0a2f6fefac6c0db1792c234f577d80299a33ce7125467439097cf869198"}, + {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:34eaf5cfcc979846d73571b1a4be22cad5e029d55cdbe77cdc7545caa4dcb925"}, + {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4f1de31a585344a106db43a9c3af2e15bb82e053618ff759f1fdd31d82da38eb"}, + {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3a1ea61d96146e9b9e5597069466e2e4d9e01e09381c5dd51659f890d5e29e7"}, + {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:73c01201219eb039a828bb58dcc13112eec2fed6eea718356316cd552df26e04"}, + {file = "aiohttp-3.10.0-cp312-cp312-win32.whl", hash = "sha256:33e915971eee6d2056d15470a1214e4e0f72b6aad10225548a7ab4c4f54e2db7"}, + {file = "aiohttp-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:2dc75da06c35a7b47a88ceadbf993a53d77d66423c2a78de8c6f9fb41ec35687"}, + {file = "aiohttp-3.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f1bc4d68b83966012813598fe39b35b4e6019b69d29385cf7ec1cb08e1ff829b"}, + {file = "aiohttp-3.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9b8b31c057a0b7bb822a159c490af05cb11b8069097f3236746a78315998afa"}, + {file = "aiohttp-3.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10f0d7894ddc6ff8f369e3fdc082ef1f940dc1f5b9003cd40945d24845477220"}, + {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72de8ffba4a27e3c6e83e58a379fc4fe5548f69f9b541fde895afb9be8c31658"}, + {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd36d0f0afc2bd84f007cedd2d9a449c3cf04af471853a25eb71f28bc2e1a119"}, + {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f64d503c661864866c09806ac360b95457f872d639ca61719115a9f389b2ec90"}, + {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31616121369bc823791056c632f544c6c8f8d1ceecffd8bf3f72ef621eaabf49"}, + {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f76c12abb88b7ee64b3f9ae72f0644af49ff139067b5add142836dab405d60d4"}, + {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6c99eef30a7e98144bcf44d615bc0f445b3a3730495fcc16124cb61117e1f81e"}, + {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:39e7ec718e7a1971a5d98357e3e8c0529477d45c711d32cd91999dc8d8404e1e"}, + {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1cef548ee4e84264b78879de0c754bbe223193c6313beb242ce862f82eab184"}, + {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f98f036eab11d2f90cdd01b9d1410de9d7eb520d070debeb2edadf158b758431"}, + {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc4376ff537f7d2c1e98f97f6d548e99e5d96078b0333c1d3177c11467b972de"}, + {file = "aiohttp-3.10.0-cp38-cp38-win32.whl", hash = "sha256:ebedc51ee6d39f9ea5e26e255fd56a7f4e79a56e77d960f9bae75ef4f95ed57f"}, + {file = "aiohttp-3.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:aad87626f31a85fd4af02ba7fd6cc424b39d4bff5c8677e612882649da572e47"}, + {file = "aiohttp-3.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1dc95c5e2a5e60095f1bb51822e3b504e6a7430c9b44bff2120c29bb876c5202"}, + {file = "aiohttp-3.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c83977f7b6f4f4a96fab500f5a76d355f19f42675224a3002d375b3fb309174"}, + {file = "aiohttp-3.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8cedc48d36652dd3ac40e5c7c139d528202393e341a5e3475acedb5e8d5c4c75"}, + {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b099fbb823efed3c1d736f343ac60d66531b13680ee9b2669e368280f41c2b8"}, + {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d583755ddb9c97a2da1322f17fc7d26792f4e035f472d675e2761c766f94c2ff"}, + {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a03a4407bdb9ae815f0d5a19df482b17df530cf7bf9c78771aa1c713c37ff1f"}, + {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb6e65f6ea7caa0188e36bebe9e72b259d3d525634758c91209afb5a6cbcba7"}, + {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6612c6ed3147a4a2d6463454b94b877566b38215665be4c729cd8b7bdce15b4"}, + {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b0c0148d2a69b82ffe650c2ce235b431d49a90bde7dd2629bcb40314957acf6"}, + {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0d85a173b4dbbaaad1900e197181ea0fafa617ca6656663f629a8a372fdc7d06"}, + {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:12c43dace645023583f3dd2337dfc3aa92c99fb943b64dcf2bc15c7aa0fb4a95"}, + {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:33acb0d9bf12cdc80ceec6f5fda83ea7990ce0321c54234d629529ca2c54e33d"}, + {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:91e0b76502205484a4d1d6f25f461fa60fe81a7987b90e57f7b941b0753c3ec8"}, + {file = "aiohttp-3.10.0-cp39-cp39-win32.whl", hash = "sha256:1ebd8ed91428ffbe8b33a5bd6f50174e11882d5b8e2fe28670406ab5ee045ede"}, + {file = "aiohttp-3.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:0433795c4a8bafc03deb3e662192250ba5db347c41231b0273380d2f53c9ea0b"}, + {file = "aiohttp-3.10.0.tar.gz", hash = "sha256:e8dd7da2609303e3574c95b0ec9f1fd49647ef29b94701a2862cceae76382e1d"}, ] [package.dependencies] +aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" attrs = ">=17.3.0" frozenlist = ">=1.1.1" @@ -93,7 +105,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiosignal" @@ -111,13 +123,13 @@ frozenlist = ">=1.1.0" [[package]] name = "alembic" -version = "1.13.1" +version = "1.13.2" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, ] [package.dependencies] @@ -130,13 +142,13 @@ tz = ["backports.zoneinfo"] [[package]] name = "anyio" -version = "4.4.0" +version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, ] [package.dependencies] @@ -144,9 +156,9 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] [[package]] name = "art" @@ -162,21 +174,31 @@ files = [ [package.extras] dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture (>=1.0)"] +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + [[package]] name = "astroid" -version = "2.15.8" +version = "3.2.4" description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.7.2" +python-versions = ">=3.8.0" files = [ - {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, - {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, ] -[package.dependencies] -lazy-object-proxy = ">=1.4.0" -wrapt = {version = ">=1.14,<2", markers = "python_version >= \"3.11\""} - [[package]] name = "asyncio" version = "3.4.3" @@ -253,26 +275,37 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + [[package]] name = "cachetools" -version = "5.3.3" +version = "5.4.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, ] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -423,13 +456,13 @@ files = [ [[package]] name = "conventional-pre-commit" -version = "3.2.0" +version = "3.3.0" description = "A pre-commit hook that checks commit messages for Conventional Commits formatting." optional = false python-versions = ">=3.8" files = [ - {file = "conventional_pre_commit-3.2.0-py3-none-any.whl", hash = "sha256:765ff6abed9a1d9866037d3007f154793ef851547da04ef0b6da26a1ef242e6e"}, - {file = "conventional_pre_commit-3.2.0.tar.gz", hash = "sha256:08369374ff458faec7ed0caa2b0f4f76c0800b49c513f6f0641e9258d0cf9775"}, + {file = "conventional_pre_commit-3.3.0-py3-none-any.whl", hash = "sha256:644dcbd285951cf3614b3e264e383aa966e10de86568e6caae81fa5f32dde024"}, + {file = "conventional_pre_commit-3.3.0.tar.gz", hash = "sha256:93920d55e9a37b5e6e2e9da660c7813daefb0f7ac3d5add76218dc9162f61aa1"}, ] [package.extras] @@ -437,68 +470,103 @@ dev = ["black", "build", "coverage", "flake8", "pre-commit", "pytest", "setuptoo [[package]] name = "coverage" -version = "7.5.3" +version = "7.6.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, ] [package.extras] toml = ["tomli"] +[[package]] +name = "deepdiff" +version = "7.0.1" +description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." +optional = false +python-versions = ">=3.8" +files = [ + {file = "deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3"}, + {file = "deepdiff-7.0.1.tar.gz", hash = "sha256:260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf"}, +] + +[package.dependencies] +ordered-set = ">=4.1.0,<4.2.0" + +[package.extras] +cli = ["click (==8.1.7)", "pyyaml (==6.0.1)"] +optimize = ["orjson"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + [[package]] name = "dill" version = "0.3.8" @@ -525,6 +593,26 @@ files = [ {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + [[package]] name = "docker" version = "6.1.3" @@ -546,24 +634,76 @@ websocket-client = ">=0.32.0" [package.extras] ssh = ["paramiko (>=2.4.3)"] +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + [[package]] name = "fastapi" -version = "0.109.2" +version = "0.111.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, - {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, + {file = "fastapi-0.111.1-py3-none-any.whl", hash = "sha256:4f51cfa25d72f9fbc3280832e84b32494cf186f50158d364a8765aabf22587bf"}, + {file = "fastapi-0.111.1.tar.gz", hash = "sha256:ddd1ac34cb1f76c2e2d7f8545a4bcb5463bce4834e81abf0b189e0c359ab2413"}, ] [package.dependencies] +email_validator = ">=2.0.0" +fastapi-cli = ">=0.0.2" +httpx = ">=0.23.0" +jinja2 = ">=2.11.2" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.36.3,<0.37.0" +python-multipart = ">=0.0.7" +starlette = ">=0.37.2,<0.38.0" typing-extensions = ">=4.8.0" +uvicorn = {version = ">=0.12.0", extras = ["standard"]} + +[package.extras] +all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-cli" +version = "0.0.4" +description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_cli-0.0.4-py3-none-any.whl", hash = "sha256:a2552f3a7ae64058cdbb530be6fa6dbfc975dc165e4fa66d224c3d396e25e809"}, + {file = "fastapi_cli-0.0.4.tar.gz", hash = "sha256:e2e9ffaffc1f7767f488d6da34b6f5a377751c996f397902eb6abb99a67bde32"}, +] + +[package.dependencies] +typer = ">=0.12.3" [package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["fastapi", "uvicorn[standard] (>=0.15.0)"] + +[[package]] +name = "fastapi-lifespan-manager" +version = "0.1.4" +description = "FastAPI Lifespan Manager" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "fastapi_lifespan_manager-0.1.4-py3-none-any.whl", hash = "sha256:9d18e2c01cd91432ed05c7f94a02bb43e9ad68a5a55ccc813005f0b5f79e8bd9"}, + {file = "fastapi_lifespan_manager-0.1.4.tar.gz", hash = "sha256:1dc4a776cd4305bf7baf4791f148216c21ab966916216d72b5dc2a27a21d8d02"}, +] + +[package.dependencies] +fastapi = ">=0.93.0" [[package]] name = "fastapi-utils" @@ -583,34 +723,34 @@ sqlalchemy = ">=1.3.12,<2.0.0" [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.0.0" +version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" +pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] @@ -705,6 +845,43 @@ files = [ [package.dependencies] flake8 = ">=5.0" +[[package]] +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-sqlalchemy" +version = "3.0.5" +description = "Add SQLAlchemy support to your Flask application." +optional = false +python-versions = ">=3.7" +files = [ + {file = "flask_sqlalchemy-3.0.5-py3-none-any.whl", hash = "sha256:cabb6600ddd819a9f859f36515bb1bd8e7dbf30206cc679d2b081dff9e383283"}, + {file = "flask_sqlalchemy-3.0.5.tar.gz", hash = "sha256:c5765e58ca145401b52106c0f46178569243c5da25556be2c231ecc60867c5b1"}, +] + +[package.dependencies] +flask = ">=2.2.5" +sqlalchemy = ">=1.4.18" + [[package]] name = "frozenlist" version = "1.4.1" @@ -791,6 +968,23 @@ files = [ {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] +[[package]] +name = "googleapis-common-protos" +version = "1.63.2" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + [[package]] name = "greenlet" version = "3.0.3" @@ -862,6 +1056,64 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "grpcio" +version = "1.65.2" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.65.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:51231a22aea830be1d955de5a15da4391b3ac8e1d7868f362c74c15a0e9f5c89"}, + {file = "grpcio-1.65.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:87da0fb85ba42257e450561b0264e36abe47faae07476621ae65d8f5f60f22cd"}, + {file = "grpcio-1.65.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:3a6b36e20b02ca830b15b5eb4abb437de1d42ba93353d1f76b00337108f7ce8e"}, + {file = "grpcio-1.65.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03fdd86ff7d9957b822b9bf1fe0ae1e21e258e9c1d5535a5e9c67de0ad45b6a8"}, + {file = "grpcio-1.65.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e5a67bbf8a1b3be5535802f6e9f507d1d8d38fb32de81ec7f03706d95a9126"}, + {file = "grpcio-1.65.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2ce639f2a2951aedbe9a3636f5730288f9b77c2627f116265d7d2789555e5662"}, + {file = "grpcio-1.65.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b10349ceebec745a47e4339ef7c4878c9b53b82ae4b0883e16544625016d6242"}, + {file = "grpcio-1.65.2-cp310-cp310-win32.whl", hash = "sha256:f931fe9b244dc23c7478c513c3ed94ded93da8bd1a95a4d97b21abdef644304a"}, + {file = "grpcio-1.65.2-cp310-cp310-win_amd64.whl", hash = "sha256:0c9c865d2fdf40e7e952038a0b5e0f32b01da84ecf04943b08e8917c8ccc9cf8"}, + {file = "grpcio-1.65.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:f4b7a7d68313e252e09550bd03d9d11e460dae681cf95588a131b6b3e07d1e30"}, + {file = "grpcio-1.65.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9ba9d4b3d4fc00b8083bb47a8c40a74ba3ea330713fdd59cf53c926c9a16b002"}, + {file = "grpcio-1.65.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b7bfcbee6b32f0e4786b7813692b3907c9e444f529126b8520cac9914479b98c"}, + {file = "grpcio-1.65.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aa50787bc8036bd5ea9b7ebbbd2c49c78122eb9ff98d3c217a7c146313c5030"}, + {file = "grpcio-1.65.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd7dc770926cc66050242eb6c63ca8ce12cd69010bf4ff7ea6e721d4f4b11e4d"}, + {file = "grpcio-1.65.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c45977fdc675a8961875adab7f04b785f65d3fd9c737cd60b5e3a9b1392ad444"}, + {file = "grpcio-1.65.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a0cd7297abf0a02a9399edebe8c662058c7f0768bfbe859837707d389ad327f"}, + {file = "grpcio-1.65.2-cp311-cp311-win32.whl", hash = "sha256:60fe2f90875f2bef105158e370fbbefadd179f8cd689bc2cee6844aca4ccb7bb"}, + {file = "grpcio-1.65.2-cp311-cp311-win_amd64.whl", hash = "sha256:e0b2bf34340999c6d938107ec2cc9bce1ea59bf08e4694cfa47e782bdbd361f4"}, + {file = "grpcio-1.65.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:71fa3b7a6cef62a00014205d0e707610cfd50ae54f617d296017f10c6a9fad0d"}, + {file = "grpcio-1.65.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8856187a359a55babfa4d49ad96f2dd7edd8be3a36b813c7a9e41ef3d763400f"}, + {file = "grpcio-1.65.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:cb48342de1c3be59e6de79c6bbc01cf05562c571a3ed32f7c2e149e7934824cf"}, + {file = "grpcio-1.65.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b193e116e085ad4d7ef1518d79e9fedfa7688f4967f64a6246b5b196a26326a"}, + {file = "grpcio-1.65.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ce7f4c766fecc34455357b31b1e316506ea6ac48abbb9a650843d20337a2036"}, + {file = "grpcio-1.65.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:76125096d2a090d4acdce0f06f9511cebe1bcfbc0bd040e495563d7a8747dda1"}, + {file = "grpcio-1.65.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4fba3ae83ef5acd111c2dd92233ff167411db84e1ff17a00c34b5428355526c5"}, + {file = "grpcio-1.65.2-cp312-cp312-win32.whl", hash = "sha256:7fd639b0988ed5114d4b2a72ea453aafcb1439dd433c61834886b92afed9c6c1"}, + {file = "grpcio-1.65.2-cp312-cp312-win_amd64.whl", hash = "sha256:b6bba0f973ef6fe7434834f1b63d16bab4b50879d5bb0ca6eb0495c87d5cbc78"}, + {file = "grpcio-1.65.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:510bf7ec7f44e9420bb17970fb450522666d8b1c09cdf59b735de0c2dc806b79"}, + {file = "grpcio-1.65.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aacfd499d23130578184057008ea5329732a5ac59a4fcb73c0467d86723d23c8"}, + {file = "grpcio-1.65.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:67c5e5aa92b5832ae7a3399bce5b8562fb28686446732bfa17f97d5082e8501d"}, + {file = "grpcio-1.65.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7b752471e7ff1472ddbf3035a34fd8e24f2eac4fedbdab311e8f3e0dee889f7"}, + {file = "grpcio-1.65.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3101fa25b93f185e8cc698f8c2abee897891e6bae4f13472f66df21e8ae40d46"}, + {file = "grpcio-1.65.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:01600b1b02fdc9d648630d3de0a4cbf7ebe5f94b40ec1f65e3fd4b94a3b052cf"}, + {file = "grpcio-1.65.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8886d24345bf4b1693e9c09cf6a520f0baedd2af2a876f91bb508b24d0d46041"}, + {file = "grpcio-1.65.2-cp38-cp38-win32.whl", hash = "sha256:0b2ae6868864e4b06bff89cf91730a63141327158bf0677428ef315ea1dbdb0b"}, + {file = "grpcio-1.65.2-cp38-cp38-win_amd64.whl", hash = "sha256:c2900ad06fd8f5ad8832b1ee287caccb4a957e971b2b7983e0cd7a8e7c7098fb"}, + {file = "grpcio-1.65.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:06a7ea12a81e5e2fb17528556c7f828b90bd2aec3a645f5cd5f35f80aa59ac6a"}, + {file = "grpcio-1.65.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5edea0ea18e9fd5326d385a4c92a1fed605454e9a2c57ff131df0a08004b7e69"}, + {file = "grpcio-1.65.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:d388f093010a014d3b3ddf8185ff45c5279fd825d0b20e21c8076515ae61db31"}, + {file = "grpcio-1.65.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5225b8ce980b598187f64436ed95ea149966d538253c28668347d331968e2386"}, + {file = "grpcio-1.65.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:892f03939df46d0bfcf89fe1dbcc8818f93ad6f3377587e8db6c2b1f598736c2"}, + {file = "grpcio-1.65.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77fddf42bbca65ee4db679d0608e1ffa8b22b7f516c79665b7620be2f6357c85"}, + {file = "grpcio-1.65.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3a3139414399078560a84203f9fe3592483d902a2af84062c571be6191143a9f"}, + {file = "grpcio-1.65.2-cp39-cp39-win32.whl", hash = "sha256:8d6fd1206433428d0a4ba771eac70579b41a265fe835a4d8a5214c7235e69926"}, + {file = "grpcio-1.65.2-cp39-cp39-win_amd64.whl", hash = "sha256:478725160e2cfc1bfa5ab3e7bb7c896cc182c8f57255d780007cfd6fb46e97b5"}, + {file = "grpcio-1.65.2.tar.gz", hash = "sha256:e2c9bbb84d5517f2bccdb1836b8ee267a1757acb3cb3e575065c103220b577ac"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.65.2)"] + [[package]] name = "h11" version = "0.14.0" @@ -873,15 +1125,108 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "identify" -version = "2.5.36" +version = "2.6.0" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, - {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, ] [package.extras] @@ -898,15 +1243,34 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "importlib-metadata" +version = "8.0.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + [[package]] name = "influxdb-client" -version = "1.43.0" +version = "1.44.0" description = "InfluxDB 2.0 Python client library" optional = false python-versions = ">=3.7" files = [ - {file = "influxdb_client-1.43.0-py3-none-any.whl", hash = "sha256:f079e63018f521024118bc0141b6403c65506711e2e6e93500f8e69f1675dc38"}, - {file = "influxdb_client-1.43.0.tar.gz", hash = "sha256:ae2614d891baed52c0ae8f6194a04ee5b1c6422f6061318a3639fe63b7671b25"}, + {file = "influxdb_client-1.44.0-py3-none-any.whl", hash = "sha256:e4c1ac9c9925c4693d63e988e22f65d2ddc1867f8910813b7f4721633175f2a0"}, + {file = "influxdb_client-1.44.0.tar.gz", hash = "sha256:da9bc0cc49de4a0ac844d833c1efa65227ec5a2254e63cdbe07b5d532c0c37f8"}, ] [package.dependencies] @@ -965,6 +1329,17 @@ files = [ [package.extras] colors = ["colorama (>=0.4.6)"] +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + [[package]] name = "jinja2" version = "3.1.4" @@ -982,52 +1357,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "lazy-object-proxy" -version = "1.10.0" -description = "A fast and thorough lazy object proxy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, - {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, -] - [[package]] name = "mako" version = "1.3.5" @@ -1062,6 +1391,30 @@ files = [ docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.5" @@ -1142,6 +1495,17 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mergedeep" version = "1.3.4" @@ -1292,24 +1656,296 @@ files = [ [[package]] name = "nodeenv" -version = "1.9.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, - {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "opentelemetry-api" +version = "1.26.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.26.0-py3-none-any.whl", hash = "sha256:7d7ea33adf2ceda2dd680b18b1677e4152000b37ca76e679da71ff103b943064"}, + {file = "opentelemetry_api-1.26.0.tar.gz", hash = "sha256:2bd639e4bed5b18486fef0b5a520aaffde5a18fc225e808a1ac4df363f43a1ce"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=8.0.0" + +[[package]] +name = "opentelemetry-distro" +version = "0.47b0" +description = "OpenTelemetry Python Distro" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_distro-0.47b0-py3-none-any.whl", hash = "sha256:fc0da3d272ebb1716cefca0b072a006f274dd2025a92db5753756c770deded9c"}, + {file = "opentelemetry_distro-0.47b0.tar.gz", hash = "sha256:715615724bd5c528a2433c0caeb373e4581f8dc7b4bc270179407a1cca0ad99e"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.47b0" +opentelemetry-sdk = ">=1.13,<2.0" + +[package.extras] +otlp = ["opentelemetry-exporter-otlp (==1.26.0)"] + +[[package]] +name = "opentelemetry-exporter-otlp" +version = "1.26.0" +description = "OpenTelemetry Collector Exporters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp-1.26.0-py3-none-any.whl", hash = "sha256:f839989f54bda85ee33c5dae033c44dcec9ccbb0dafc6a43d585df44da1d2036"}, + {file = "opentelemetry_exporter_otlp-1.26.0.tar.gz", hash = "sha256:cf0e093f080011951d9f97431a83869761e4d4ebe83a4195ee92d7806223299c"}, +] + +[package.dependencies] +opentelemetry-exporter-otlp-proto-grpc = "1.26.0" +opentelemetry-exporter-otlp-proto-http = "1.26.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.26.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.26.0-py3-none-any.whl", hash = "sha256:ee4d8f8891a1b9c372abf8d109409e5b81947cf66423fd998e56880057afbc71"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.26.0.tar.gz", hash = "sha256:bdbe50e2e22a1c71acaa0c8ba6efaadd58882e5a5978737a44a4c4b10d304c92"}, +] + +[package.dependencies] +opentelemetry-proto = "1.26.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.26.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0-py3-none-any.whl", hash = "sha256:e2be5eff72ebcb010675b818e8d7c2e7d61ec451755b8de67a140bc49b9b0280"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0.tar.gz", hash = "sha256:a65b67a9a6b06ba1ec406114568e21afe88c1cdb29c464f2507d529eb906d8ae"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.26.0" +opentelemetry-proto = "1.26.0" +opentelemetry-sdk = ">=1.26.0,<1.27.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.26.0" +description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_http-1.26.0-py3-none-any.whl", hash = "sha256:ee72a87c48ec977421b02f16c52ea8d884122470e0be573905237b540f4ee562"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.26.0.tar.gz", hash = "sha256:5801ebbcf7b527377883e6cbbdda35ee712dc55114fff1e93dfee210be56c908"}, ] +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.26.0" +opentelemetry-proto = "1.26.0" +opentelemetry-sdk = ">=1.26.0,<1.27.0" +requests = ">=2.7,<3.0" + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.47b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation-0.47b0-py3-none-any.whl", hash = "sha256:88974ee52b1db08fc298334b51c19d47e53099c33740e48c4f084bd1afd052d5"}, + {file = "opentelemetry_instrumentation-0.47b0.tar.gz", hash = "sha256:96f9885e450c35e3f16a4f33145f2ebf620aea910c9fd74a392bbc0f807a350f"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +setuptools = ">=16.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.47b0" +description = "ASGI instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_asgi-0.47b0-py3-none-any.whl", hash = "sha256:b798dc4957b3edc9dfecb47a4c05809036a4b762234c5071212fda39ead80ade"}, + {file = "opentelemetry_instrumentation_asgi-0.47b0.tar.gz", hash = "sha256:e78b7822c1bca0511e5e9610ec484b8994a81670375e570c76f06f69af7c506a"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.47b0" +opentelemetry-semantic-conventions = "0.47b0" +opentelemetry-util-http = "0.47b0" + +[package.extras] +instruments = ["asgiref (>=3.0,<4.0)"] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.47b0" +description = "OpenTelemetry FastAPI Instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_fastapi-0.47b0-py3-none-any.whl", hash = "sha256:5ac28dd401160b02e4f544a85a9e4f61a8cbe5b077ea0379d411615376a2bd21"}, + {file = "opentelemetry_instrumentation_fastapi-0.47b0.tar.gz", hash = "sha256:0c7c10b5d971e99a420678ffd16c5b1ea4f0db3b31b62faf305fbb03b4ebee36"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.47b0" +opentelemetry-instrumentation-asgi = "0.47b0" +opentelemetry-semantic-conventions = "0.47b0" +opentelemetry-util-http = "0.47b0" + +[package.extras] +instruments = ["fastapi (>=0.58,<1.0)", "fastapi-slim (>=0.111.0,<0.112.0)"] + +[[package]] +name = "opentelemetry-instrumentation-requests" +version = "0.47b0" +description = "OpenTelemetry requests instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_requests-0.47b0-py3-none-any.whl", hash = "sha256:77fdd13f64fef2cb44665fe6975eadb993d78f96612e55a502e79b34ef7fee47"}, + {file = "opentelemetry_instrumentation_requests-0.47b0.tar.gz", hash = "sha256:f85ed52cbca21bff226e0e7f1888e5b9bc386657ecf4b0440f328e5b3aba8436"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.47b0" +opentelemetry-semantic-conventions = "0.47b0" +opentelemetry-util-http = "0.47b0" + +[package.extras] +instruments = ["requests (>=2.0,<3.0)"] + +[[package]] +name = "opentelemetry-instrumentation-sqlalchemy" +version = "0.47b0" +description = "OpenTelemetry SQLAlchemy instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_sqlalchemy-0.47b0-py3-none-any.whl", hash = "sha256:997b2c4a624ebcba45b9bda27882622d0ab3028d66a5fb50cdcf3581af04b3d1"}, + {file = "opentelemetry_instrumentation_sqlalchemy-0.47b0.tar.gz", hash = "sha256:bbeab06fc421ddae16bb69ca287abb81a131d3dff97de60b02c092887794103d"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.47b0" +opentelemetry-semantic-conventions = "0.47b0" +packaging = ">=21.0" +wrapt = ">=1.11.2" + +[package.extras] +instruments = ["sqlalchemy"] + +[[package]] +name = "opentelemetry-proto" +version = "1.26.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_proto-1.26.0-py3-none-any.whl", hash = "sha256:6c4d7b4d4d9c88543bcf8c28ae3f8f0448a753dc291c18c5390444c90b76a725"}, + {file = "opentelemetry_proto-1.26.0.tar.gz", hash = "sha256:c5c18796c0cab3751fc3b98dee53855835e90c0422924b484432ac852d93dc1e"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.26.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.26.0-py3-none-any.whl", hash = "sha256:feb5056a84a88670c041ea0ded9921fca559efec03905dddeb3885525e0af897"}, + {file = "opentelemetry_sdk-1.26.0.tar.gz", hash = "sha256:c90d2868f8805619535c05562d699e2f4fb1f00dbd55a86dcefca4da6fa02f85"}, +] + +[package.dependencies] +opentelemetry-api = "1.26.0" +opentelemetry-semantic-conventions = "0.47b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.47b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl", hash = "sha256:4ff9d595b85a59c1c1413f02bba320ce7ea6bf9e2ead2b0913c4395c7bbc1063"}, + {file = "opentelemetry_semantic_conventions-0.47b0.tar.gz", hash = "sha256:a8d57999bbe3495ffd4d510de26a97dadc1dace53e0275001b2c1b2f67992a7e"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.26.0" + +[[package]] +name = "opentelemetry-util-http" +version = "0.47b0" +description = "Web util for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_util_http-0.47b0-py3-none-any.whl", hash = "sha256:3d3215e09c4a723b12da6d0233a31395aeb2bb33a64d7b15a1500690ba250f19"}, + {file = "opentelemetry_util_http-0.47b0.tar.gz", hash = "sha256:352a07664c18eef827eb8ddcbd64c64a7284a39dd1655e2f16f577eb046ccb32"}, +] + +[[package]] +name = "ordered-set" +version = "4.1.0" +description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, + {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, +] + +[package.extras] +dev = ["black", "mypy", "pytest"] + [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -1397,13 +2033,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.7.1" +version = "3.8.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, - {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, ] [package.dependencies] @@ -1415,18 +2051,38 @@ virtualenv = ">=20.10.0" [[package]] name = "prompt-toolkit" -version = "3.0.45" +version = "3.0.47" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.45-py3-none-any.whl", hash = "sha256:a29b89160e494e3ea8622b09fa5897610b437884dcdcd054fdc1308883326c2a"}, - {file = "prompt_toolkit-3.0.45.tar.gz", hash = "sha256:07c60ee4ab7b7e90824b61afa840c8f5aad2d46b3e2e10acc33d8ecc94a49089"}, + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, ] [package.dependencies] wcwidth = "*" +[[package]] +name = "protobuf" +version = "4.25.4" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, + {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, + {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, + {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, + {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, + {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, + {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, + {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, + {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, + {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, + {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, +] + [[package]] name = "psutil" version = "5.9.8" @@ -1538,58 +2194,65 @@ files = [ [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] name = "pydantic" -version = "1.10.15" +version = "1.10.17" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, - {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, - {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, - {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, - {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, - {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, - {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, - {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, - {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, - {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, + {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, + {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, + {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, + {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, + {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, + {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, + {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, + {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, + {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, ] [package.dependencies] @@ -1627,22 +2290,36 @@ files = [ {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pylint" -version = "2.17.7" +version = "3.2.6" description = "python code static checker" optional = false -python-versions = ">=3.7.2" +python-versions = ">=3.8.0" files = [ - {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, - {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, + {file = "pylint-3.2.6-py3-none-any.whl", hash = "sha256:03c8e3baa1d9fb995b12c1dbe00aa6c4bcef210c2a2634374aedeb22fb4a8f8f"}, + {file = "pylint-3.2.6.tar.gz", hash = "sha256:a5d01678349454806cff6d886fb072294f56a58c4761278c97fb557d708e1eb3"}, ] [package.dependencies] -astroid = ">=2.15.8,<=2.17.0-dev0" +astroid = ">=3.2.4,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = {version = ">=0.3.6", markers = "python_version >= \"3.11\""} -isort = ">=4.2.5,<6" +dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""} +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomlkit = ">=0.10.1" @@ -1651,6 +2328,39 @@ tomlkit = ">=0.10.1" spelling = ["pyenchant (>=3.2,<4.0)"] testutils = ["gitpython (>3)"] +[[package]] +name = "pylint-flask-sqlalchemy" +version = "0.2.0" +description = "A Pylint plugin for improving code analysis when editing code using Flask-SQLAlchemy" +optional = false +python-versions = ">=3.5" +files = [ + {file = "pylint_flask_sqlalchemy-0.2.0-py3-none-any.whl", hash = "sha256:27f1fffb940adc9ac8f00ff7eb91ec39ca070c0f3bcec0d7f1c702a993d40505"}, + {file = "pylint_flask_sqlalchemy-0.2.0.tar.gz", hash = "sha256:8ede5baba1a465d8ba39d8383ffcf0889d7a6afeff44bd24177fcf529ba8aa81"}, +] + +[package.dependencies] +Flask-SQLAlchemy = ">=2" +pylint = "*" + +[package.extras] +dev = ["black", "bump2version", "gitchangelog", "mypy", "pystache"] + +[[package]] +name = "pylint-sqlalchemy" +version = "0.3.0" +description = "pylint plugin to fix incompatibility issues with sqlalchemy" +optional = false +python-versions = "*" +files = [ + {file = "pylint-sqlalchemy-0.3.0.tar.gz", hash = "sha256:53ff5e721fddf239577ff89aa9e605d419d44a6553834e36cb1f93742ee9be35"}, + {file = "pylint_sqlalchemy-0.3.0-py2.py3-none-any.whl", hash = "sha256:466b1bddd3aaf561134e39ee8a56b5511ea077ff781f9e7d57898401164a91e9"}, +] + +[package.dependencies] +astroid = ">=1.5" +pylint = ">=1.6" + [[package]] name = "pymysql" version = "1.1.1" @@ -1686,21 +2396,21 @@ image = ["Pillow (>=8.0.0)"] [[package]] name = "pyproject-api" -version = "1.6.1" +version = "1.7.1" description = "API to interact with the python pyproject.toml based projects" optional = false python-versions = ">=3.8" files = [ - {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, - {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, + {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, + {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, ] [package.dependencies] -packaging = ">=23.1" +packaging = ">=24.1" [package.extras] -docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] +docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] [[package]] name = "pytest" @@ -1767,17 +2477,17 @@ cli = ["click (>=5.0)"] [[package]] name = "python-multipart" -version = "0.0.6" +version = "0.0.7" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.7" files = [ - {file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"}, - {file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"}, + {file = "python_multipart-0.0.7-py3-none-any.whl", hash = "sha256:b1fef9a53b74c795e2347daac8c54b252d9e0df9c619712691c1cc8021bd3c49"}, + {file = "python_multipart-0.0.7.tar.gz", hash = "sha256:288a6c39b06596c1b988bb6794c6fbc80e6c369e35e5062637df256bee0c9af9"}, ] [package.extras] -dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] +dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] [[package]] name = "pytz" @@ -1903,13 +2613,13 @@ typing-extensions = ">=4.1.1,<5.0.0" [[package]] name = "requests" -version = "2.32.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1922,6 +2632,101 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + [[package]] name = "ruff" version = "0.2.2" @@ -1950,18 +2755,30 @@ files = [ [[package]] name = "setuptools" -version = "70.0.0" +version = "72.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, + {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] [[package]] name = "six" @@ -1998,57 +2815,55 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.52" +version = "1.4.53" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, - {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b61ac5457d91b5629a3dea2b258deb4cdd35ac8f6fa2031d2b9b2fff5b3396da"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a96aa8d425047551676b0e178ddb0683421e78eda879ab55775128b2e612cae"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e10ac36f0b994235c13388b39598bf27219ec8bdea5be99bdac612b01cbe525"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:437592b341a3229dd0443c9c803b0bf0a466f8f539014fef6cdb9c06b7edb7f9"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:784272ceb5eb71421fea9568749bcbe8bd019261a0e2e710a7efa76057af2499"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win32.whl", hash = "sha256:122d7b5722df1a24402c6748bbb04687ef981493bb559d0cc0beffe722e0e6ed"}, + {file = "SQLAlchemy-1.4.53-cp310-cp310-win_amd64.whl", hash = "sha256:4604d42b2abccba266d3f5bbe883684b5df93e74054024c70d3fbb5eea45e530"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fb8e15dfa47f5de11ab073e12aadd6b502cfb7ac4bafd18bd18cfd1c7d13dbbc"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8be4df55e8fde3006d9cb1f6b3df2ba26db613855dc4df2c0fcd5ec15cb3b7"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b11640251f9a9789fd96cd6e5d176b1c230230c70ad40299bcbcc568451b4c"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win32.whl", hash = "sha256:cd534c716f86bdf95b7b984a34ee278c91d1b1d7d183e7e5ff878600b1696046"}, + {file = "SQLAlchemy-1.4.53-cp311-cp311-win_amd64.whl", hash = "sha256:6dd06572872ca13ef5a90306a3e5af787498ddaa17fb00109b1243642646cd69"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2774c24c405136c3ef472e2352bdca7330659d481fbf2283f996c0ef9eb90f22"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68a614765197b3d13a730d631a78c3bb9b3b72ba58ed7ab295d58d517464e315"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d13d4dfbc6e52363886b47cf02cf68c5d2a37c468626694dc210d7e97d4ad330"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win32.whl", hash = "sha256:197065b91456574d70b6459bfa62bc0b52a4960a29ef923c375ec427274a3e05"}, + {file = "SQLAlchemy-1.4.53-cp312-cp312-win_amd64.whl", hash = "sha256:421306c4b936b0271a3ce2dc074928d5ece4a36f9c482daa5770f44ecfc3a883"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:13fc34b35d8ddb3fbe3f8fcfdf6c2546e676187f0fb20f5774da362ddaf8fa2d"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626be971ff89541cfd3e70b54be00b57a7f8557204decb6223ce0428fec058f3"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:991e42fdfec561ebc6a4fae7161a86d129d6069fa14210b96b8dd752afa7059c"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:95123f3a1e0e8020848fd32ba751db889a01a44e4e4fef7e58c87ddd0b2fca59"}, + {file = "SQLAlchemy-1.4.53-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c58e011e9e6373b3a091d83f20601fb335a3b4bace80bfcb914ac168aad3b70d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:670c7769bf5dcae9aff331247b5d82fe635c63731088a46ce68ba2ba519ef36e"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07ba54f09033d387ae9df8d62cbe211ed7304e0bfbece1f8c55e21db9fae5c11"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a38834b4c183c33daf58544281395aad2e985f0b47cca1e88ea5ada88344e63"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:616492f5315128a847f293a7c552f3561ac7e996d2aa5dc46bef4fb0d3781f1d"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0cf8c0af9563892c6632f7343bc393dfce6eeef8e4d10c5fadba9c0390520bd"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win32.whl", hash = "sha256:c05fe05941424c2f3747a8952381b7725e24cba2ca00141380e54789d5b616b6"}, + {file = "SQLAlchemy-1.4.53-cp37-cp37m-win_amd64.whl", hash = "sha256:93e90aa3e3b2f8e8cbae4d5509f8e0cf82972378d323c740a8df1c1e9f484172"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:9d7368df54d3ed45a18955f6cec38ebe075290594ac0d5c87a8ddaff7e10de27"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d8ac4158ef68eea8bb0f6dd0583127d9aa8720606964ba8eee20b254f9c83a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16bb9fa4d00b4581b14d9f0e2224dc7745b854aa4687738279af0f48f7056c98"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fe5168d0249c23f537950b6d75935ff2709365a113e29938a979aec36668ecf"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8608d162d3bd29d807aab32c3fb6e2f8e225a43d1c54c917fed38513785380"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win32.whl", hash = "sha256:a9d4d132198844bd6828047135ce7b887687c92925049a2468a605fc775c7a1a"}, + {file = "SQLAlchemy-1.4.53-cp38-cp38-win_amd64.whl", hash = "sha256:c15d1f1fcf1f9bec0499ae1d9132b950fcc7730f2d26d10484c8808b4e077816"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:edf094a20a386ff2ec73de65ef18014b250259cb860edc61741e240ca22d6981"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a9c3514ff19d9d30d8a8d378b24cd1dfa5528d20891481cb5f196117db6a48"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaaeedbceb4dfd688fff2faf25a9a87a391f548811494f7bff7fa701b639abc3"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d021699b9007deb7aa715629078830c99a5fec2753d9bdd5ff33290d363ef755"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0465b8a68f8f4de754c1966c45b187ac784ad97bc9747736f913130f0e1adea0"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win32.whl", hash = "sha256:5f67b9e9dcac3241781e96575468d55a42332157dee04bdbf781df573dff5f85"}, + {file = "SQLAlchemy-1.4.53-cp39-cp39-win_amd64.whl", hash = "sha256:a8c2f2a0b2c4e3b86eb58c9b6bb98548205eea2fba9dae4edfd29dc6aebbe95a"}, + {file = "SQLAlchemy-1.4.53.tar.gz", hash = "sha256:5e6ab710c4c064755fd92d1a417bef360228a19bdf0eee32b03aa0f5f8e9fe0d"}, ] [package.dependencies] @@ -2059,17 +2874,17 @@ aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] +mssql-pymssql = ["pymssql", "pymssql"] +mssql-pyodbc = ["pyodbc", "pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] +mysql-connector = ["mysql-connector-python", "mysql-connector-python"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] @@ -2077,13 +2892,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.36.3" +version = "0.37.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, - {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, ] [package.dependencies] @@ -2094,13 +2909,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "taskipy" -version = "1.12.2" +version = "1.13.0" description = "tasks runner for python projects" optional = false -python-versions = ">=3.6,<4.0" +python-versions = "<4.0,>=3.6" files = [ - {file = "taskipy-1.12.2-py3-none-any.whl", hash = "sha256:ffdbb0bb0db54c0ec5c424610a3a087eea22706d4d1f6e3e8b4f12ebba05f98f"}, - {file = "taskipy-1.12.2.tar.gz", hash = "sha256:eadfdc20d6bb94d8018eda32f1dbf584cf4aa6cffb71ba5cc2de20d344f8c4fb"}, + {file = "taskipy-1.13.0-py3-none-any.whl", hash = "sha256:56f42b7e508d9aed2c7b6365f8d3dab62dbd0c768c1ab606c819da4fc38421f7"}, + {file = "taskipy-1.13.0.tar.gz", hash = "sha256:2b52f0257958fed151f1340f7de93fcf0848f7a358ad62ba05c31c2ca04f89fe"}, ] [package.dependencies] @@ -2122,40 +2937,40 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.5" +version = "0.13.0" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, + {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] [[package]] name = "tox" -version = "4.15.0" +version = "4.16.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.15.0-py3-none-any.whl", hash = "sha256:300055f335d855b2ab1b12c5802de7f62a36d4fd53f30bd2835f6a201dda46ea"}, - {file = "tox-4.15.0.tar.gz", hash = "sha256:7a0beeef166fbe566f54f795b4906c31b428eddafc0102ac00d20998dd1933f6"}, + {file = "tox-4.16.0-py3-none-any.whl", hash = "sha256:61e101061b977b46cf00093d4319438055290ad0009f84497a07bf2d2d7a06d0"}, + {file = "tox-4.16.0.tar.gz", hash = "sha256:43499656f9949edb681c0f907f86fbfee98677af9919d8b11ae5ad77cb800748"}, ] [package.dependencies] -cachetools = ">=5.3.2" +cachetools = ">=5.3.3" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.13.1" -packaging = ">=23.2" -platformdirs = ">=4.1" -pluggy = ">=1.3" -pyproject-api = ">=1.6.1" -virtualenv = ">=20.25" +filelock = ">=3.15.4" +packaging = ">=24.1" +platformdirs = ">=4.2.2" +pluggy = ">=1.5" +pyproject-api = ">=1.7.1" +virtualenv = ">=20.26.3" [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] +docs = ["furo (>=2024.5.6)", "sphinx (>=7.3.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.2)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] [[package]] name = "tox-docker" @@ -2173,26 +2988,54 @@ docker = ">=4.0,<7.0" packaging = "*" tox = ">=3.0.0,<5.0" +[[package]] +name = "typer" +version = "0.12.3" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, + {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + [[package]] name = "typing-extensions" -version = "4.12.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, - {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "unidecode" +version = "1.3.8" +description = "ASCII transliterations of Unicode text" +optional = false +python-versions = ">=3.5" +files = [ + {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, + {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, ] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -2214,20 +3057,71 @@ files = [ [package.dependencies] click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} [package.extras] standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +[[package]] +name = "uvloop" +version = "0.19.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, + {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + [[package]] name = "virtualenv" -version = "20.26.2" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -2265,6 +3159,107 @@ files = [ docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] +[[package]] +name = "watchfiles" +version = "0.22.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"}, + {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"}, + {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"}, + {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"}, + {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"}, + {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"}, + {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"}, + {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"}, + {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"}, + {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d47e9ef1a94cc7a536039e46738e17cce058ac1593b2eccdede8bf72e45f372a"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28f393c1194b6eaadcdd8f941307fc9bbd7eb567995232c830f6aef38e8a6e88"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd64f3a4db121bc161644c9e10a9acdb836853155a108c2446db2f5ae1778c3d"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2abeb79209630da981f8ebca30a2c84b4c3516a214451bfc5f106723c5f45843"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc382083afba7918e32d5ef12321421ef43d685b9a67cc452a6e6e18920890e"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d048ad5d25b363ba1d19f92dcf29023988524bee6f9d952130b316c5802069cb"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:103622865599f8082f03af4214eaff90e2426edff5e8522c8f9e93dc17caee13"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e1f3cf81f1f823e7874ae563457828e940d75573c8fbf0ee66818c8b6a9099"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8597b6f9dc410bdafc8bb362dac1cbc9b4684a8310e16b1ff5eee8725d13dcd6"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b04a2cbc30e110303baa6d3ddce8ca3664bc3403be0f0ad513d1843a41c97d1"}, + {file = "watchfiles-0.22.0-cp38-none-win32.whl", hash = "sha256:b610fb5e27825b570554d01cec427b6620ce9bd21ff8ab775fc3a32f28bba63e"}, + {file = "watchfiles-0.22.0-cp38-none-win_amd64.whl", hash = "sha256:fe82d13461418ca5e5a808a9e40f79c1879351fcaeddbede094028e74d836e86"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3973145235a38f73c61474d56ad6199124e7488822f3a4fc97c72009751ae3b0"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:280a4afbc607cdfc9571b9904b03a478fc9f08bbeec382d648181c695648202f"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0d883351a34c01bd53cfa75cd0292e3f7e268bacf2f9e33af4ecede7e21d1d"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9165bcab15f2b6d90eedc5c20a7f8a03156b3773e5fb06a790b54ccecdb73385"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc1b9b56f051209be458b87edb6856a449ad3f803315d87b2da4c93b43a6fe72"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc1fc25a1dedf2dd952909c8e5cb210791e5f2d9bc5e0e8ebc28dd42fed7562"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc92d2d2706d2b862ce0568b24987eba51e17e14b79a1abcd2edc39e48e743c8"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97b94e14b88409c58cdf4a8eaf0e67dfd3ece7e9ce7140ea6ff48b0407a593ec"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96eec15e5ea7c0b6eb5bfffe990fc7c6bd833acf7e26704eb18387fb2f5fd087"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:28324d6b28bcb8d7c1041648d7b63be07a16db5510bea923fc80b91a2a6cbed6"}, + {file = "watchfiles-0.22.0-cp39-none-win32.whl", hash = "sha256:8c3e3675e6e39dc59b8fe5c914a19d30029e36e9f99468dddffd432d8a7b1c93"}, + {file = "watchfiles-0.22.0-cp39-none-win_amd64.whl", hash = "sha256:25c817ff2a86bc3de3ed2df1703e3d24ce03479b27bb4527c57e722f8554d971"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"}, + {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "watchgod" +version = "0.8.2" +description = "Simple, modern file watching and code reload in python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchgod-0.8.2-py3-none-any.whl", hash = "sha256:2f3e8137d98f493ff58af54ea00f4d1433a6afe2ed08ab331a657df468c6bfce"}, + {file = "watchgod-0.8.2.tar.gz", hash = "sha256:cb11ff66657befba94d828e3b622d5fb76f22fbda1376f355f3e6e51e97d9450"}, +] + +[package.dependencies] +anyio = ">=3.0.0,<4" + [[package]] name = "wcwidth" version = "0.2.13" @@ -2292,6 +3287,104 @@ docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "websockets" +version = "12.0" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +] + +[[package]] +name = "werkzeug" +version = "3.0.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + [[package]] name = "wrapt" version = "1.16.0" @@ -2474,7 +3567,22 @@ files = [ idna = ">=2.0" multidict = ">=4.0" +[[package]] +name = "zipp" +version = "3.19.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [metadata] lock-version = "2.0" -python-versions = "3.12.2" -content-hash = "24f0aae7895cd93ec401e36a5bfa6a8477e6607b4f43ab65a49dd2cabc15a130" +python-versions = "3.12.3" +content-hash = "09b0ca76fd3a9f6a0ea42c71ac2a72bbb2ab5251caba9321f4f4def7486610a6" diff --git a/pyproject.toml b/pyproject.toml index dc96da14..59badd1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,14 +6,14 @@ build-backend = "poetry.core.masonry.api" # POETRY CONFIGURATION [tool.poetry] name = "myelectricaldata-import" -version = "0.13.2" +version = "1.0.0-rc.14" description = "MyElectricalData official client" authors = ["Clément VALENTIN "] readme = "README.md" packages = [{ include = "src" }] [tool.poetry.dependencies] -python = "3.12.2" +python = "3.12.3" paho-mqtt = "^1.6.1" python-dateutil = "^2.8.2" requests = "^2.31.0" @@ -28,9 +28,9 @@ waitress = "^2.1.2" rauth = "^0.7.3" psycopg2-binary = "^2.9.9" pymysql = "^1.1.0" -fastapi = "^0.109.0" +fastapi = "^0.111.0" uvicorn = "^0.25.0" -python-multipart = "^0.0.6" +python-multipart = "^0.0.7" pypdf = "^3.17.4" asyncio = "^3.4.3" aiohttp = "^3.9.1" @@ -38,6 +38,16 @@ websocket-client = "^1.7.0" sqlalchemy = "^1.0.0" fastapi-utils = "^0.2.1" pytz = "^2023.3.post1" +opentelemetry-distro = ">0.46b0" +opentelemetry-exporter-otlp = ">1.25.0" +opentelemetry-instrumentation-requests = ">0.46b0" +opentelemetry-instrumentation-fastapi = ">0.46b0" +opentelemetry-instrumentation-sqlalchemy = ">0.46b0" +ruamel-yaml = "^0.18.6" +unidecode = "^1.3.8" +deepdiff = "^7.0.1" +setuptools = ">72.0.0" +fastapi-lifespan-manager = "^0.1.4" [tool.poetry.group.dev.dependencies] pytest = "^7.4.4" @@ -56,17 +66,20 @@ pep8-naming = "^0.13.3" coverage = "^7.4.0" pre-commit = "^3.6.0" mock = "^5.1.0" -pylint = "^2.9.6" +pylint = "^3.1.0" tox-docker = "^4.1.0" vulture = "^2.11" ruff = "^0.2.2" isort = "^5.13.2" conventional-pre-commit = "^3.1.0" pytest-dotenv = "^0.5.2" +taskipy = "^1.12.2" +pylint-flask-sqlalchemy = "^0.2.0" +pylint-sqlalchemy = "^0.3.0" +watchgod = "^0.8.2" ############################################# # SEMESTIC RELEASE CONFIGURATION -taskipy = "^1.12.2" [tool.semantic_release] assets = [] commit_message = "{version}\n\nAutomatically generated by python-semantic-release" @@ -132,7 +145,7 @@ upload_to_vcs_release = true ############################################# # PYTEST CONFIGURATION [tool.pytest.ini_options] -norecursedirs = ".git .tox *.egg* old docs dist build" +norecursedirs = ".git .tox *.egg* old docs dist build data" log_cli_level = "DEBUG" addopts = "-rw" pythonpath = [ diff --git a/src/__init__.py b/src/__init__.py index e69de29b..ded90369 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -0,0 +1 @@ +"""Init file for MyElectricalData.""" diff --git a/src/__version__.py b/src/__version__.py index 8b0390b3..5e9984b4 100644 --- a/src/__version__.py +++ b/src/__version__.py @@ -1,3 +1,3 @@ """Application version update by Semantic Release.""" -VERSION = "0.13.2" +VERSION = "1.0.0-rc.14" diff --git a/src/config.py b/src/config.py deleted file mode 100755 index 99e3d6d1..00000000 --- a/src/config.py +++ /dev/null @@ -1,10 +0,0 @@ -LOG_FORMAT = "%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s" -LOG_FORMAT_DATE = "%Y-%m-%d %H:%M:%S" - -URL = "https://myelectricaldata.fr" - -MAX_IMPORT_TRY = 20 -cycle_minimun = 3600 - -DAILY_MAX_DAYS = 1094 -DETAIL_MAX_DAYS = 728 diff --git a/src/config/backend.py b/src/config/backend.py new file mode 100644 index 00000000..fb1f6f40 --- /dev/null +++ b/src/config/backend.py @@ -0,0 +1,54 @@ +"""Backend configuration.""" +import inspect + +from utils import edit_config + + +class Backend: + """Backend configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + # LOCAL PROPERTIES + self._uri: str = None + # PROPERTIES + self.key = "backend" + self.json: dict = {} + self.comments = { + "backend": "SQLite (sqlite:///data/myelectricaldata.db) ou PostgreSQL (postgresql://USER:PASSWORD@HOSTNAME:PORT/DBNAME)" + } + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return {"uri": "sqlite:////data/myelectricaldata.db"} + + def load(self): + """Load configuration from file.""" + try: + sub_key = "uri" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + + @property + def uri(self) -> str: + """CIDR Listen address.""" + return self._uri + + @uri.setter + def uri(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/gateway.py b/src/config/gateway.py new file mode 100644 index 00000000..06226c63 --- /dev/null +++ b/src/config/gateway.py @@ -0,0 +1,67 @@ +"""Gateway configuration.""" +import inspect + +from utils import edit_config, str2bool + + +class Gateway: + """Gateway configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write = write + # LOCAL PROPERTIES + self._url: str = None + self._ssl: bool = None + # PROPERTIES + self.key: str = "gateway" + self.json: dict = {} + self.comments = {"gateway": "MyElectricalData configuration."} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return {"url": "myelectricaldata.fr", "ssl": True} + + def load(self): + """Load configuration from file.""" + try: + sub_key = "url" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "ssl" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + + @property + def url(self) -> str: + """Gateway URL.""" + return self._url + + @url.setter + def url(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def ssl(self) -> bool: + """Enable HTTPS to all gateway call.""" + return self._ssl + + @ssl.setter + def ssl(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/home_assistant.py b/src/config/home_assistant.py new file mode 100644 index 00000000..ab491783 --- /dev/null +++ b/src/config/home_assistant.py @@ -0,0 +1,73 @@ +"""Server configuration.""" +import inspect + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class HomeAssistant: + """Home Assistant configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write: dict = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._enable: bool = None + self._discovery_prefix: str = None + # PROPERTIES + self.key: str = "home_assistant" + self.json: dict = {} + self.comments = {"home_assistant": 'Configuration pour le "MQTT Discovery" de Home Assistant.'} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return configuration as dictionary.""" + return {"enable": False, "discovery_prefix": "homeassistant"} + + def load(self) -> dict: + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "discovery_prefix" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """Home Assistant enable.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def discovery_prefix(self) -> str: + """Home Assistant MQTT discovery prefix.""" + return self._discovery_prefix + + @discovery_prefix.setter + def discovery_prefix(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/home_assistant_ws.py b/src/config/home_assistant_ws.py new file mode 100644 index 00000000..1b3ff996 --- /dev/null +++ b/src/config/home_assistant_ws.py @@ -0,0 +1,159 @@ +"""Server configuration.""" +import inspect + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class HomeAssistantWs: + """Home Assistant Websocket configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._enable: bool = None + self._ssl: bool = None + self._token: str = None + self._url: str = None + self._purge: bool = None + self._batch_size: int = None + self._max_date: str = None + # PROPERTIES + self.key: str = "home_assistant_ws" + self.json: dict = {} + self.comments = { + "home_assistant_ws": "Home Assistant Websocket configuration pour l'importation des données dans " + 'l\'onglet "Energy".' + } + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": False, + "ssl": False, + "token": "", + "url": "ws://localhost:8123", + "purge": False, + "batch_size": 1000, + "max_date": None, + } + + def load(self): # noqa: PLR0912 + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "ssl" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "token" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "url" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "purge" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "batch_size" + self.change(sub_key, int(self.config[self.key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "max_date" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """Enable/Disable service.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def ssl(self) -> bool: + """Enable SSL (https).""" + return self._ssl + + @ssl.setter + def ssl(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def token(self) -> str: + """Home Assistant long life Token (profile).""" + return self._token + + @token.setter + def token(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def url(self) -> str: + """Home assistant Url.""" + return self._url + + @url.setter + def url(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def purge(self) -> bool: + """Home assistant Purge data.""" + return self._purge + + @purge.setter + def purge(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def batch_size(self) -> int: + """Home assistant WS batch_size.""" + return self._batch_size + + @batch_size.setter + def batch_size(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def max_date(self) -> str: + """Home assistant WS Max date import.""" + return self._max_date + + @max_date.setter + def max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/influxdb.py b/src/config/influxdb.py new file mode 100644 index 00000000..c095d16d --- /dev/null +++ b/src/config/influxdb.py @@ -0,0 +1,402 @@ +"""InfluxDB configuration.""" +import inspect +import sys + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class BatchOptions: + """InfluxDB Batch Option.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write = write + # LOCAL PROPERTIES + self._batch_size: int = None + self._flush_interval: int = None + self._jitter_interval: int = None + self._retry_interval: int = None + self._max_retry_time: str = None + self._max_retries: int = None + self._max_retry_delay: str = None + self._exponential_base: int = None + # PROPERTIES + self.key: str = "influxdb" + self.sub_key: str = "batching_options" + self.json: dict = {} + self.comments = { + "influxdb": ( + "Permet d'exporter vos données vers un serveur InfluxDB et d'exploiter vos " + "données avec Grafana (ou autre)." + ) + } + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "batch_size": 1000, + "flush_interval": 1000, + "jitter_interval": 0, + "retry_interval": 5000, + "max_retry_time": "180_000", + "max_retries": 5, + "max_retry_delay": "125_000", + "exponential_base": 2, + } + + def load(self): # noqa: PLR0912 + """Load configuration from file.""" + try: + sub_key = "batch_size" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "flush_interval" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "jitter_interval" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "retry_interval" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "max_retry_time" + self.change(sub_key, self.config[self.key][self.sub_key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "max_retries" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "max_retry_delay" + self.change(sub_key, self.config[self.key][self.sub_key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "exponential_base" + self.change(sub_key, int(self.config[self.key][self.sub_key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: {self.sub_key: self.json}}, comments=self.comments) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {self.sub_key: {key: value}}}) + + @property + def batch_size(self) -> int: + """Batch size.""" + return self._batch_size + + @batch_size.setter + def batch_size(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def flush_interval(self) -> int: + """Flush interval.""" + return self._flush_interval + + @flush_interval.setter + def flush_interval(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def jitter_interval(self) -> int: + """Jitter interval.""" + return self._jitter_interval + + @jitter_interval.setter + def jitter_interval(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def retry_interval(self) -> int: + """Retry interval.""" + return self._retry_interval + + @retry_interval.setter + def retry_interval(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def max_retry_time(self) -> str: + """Max retry time.""" + return self._max_retry_time + + @max_retry_time.setter + def max_retry_time(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def max_retries(self) -> int: + """Max retries.""" + return self._max_retries + + @max_retries.setter + def max_retries(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def max_retry_delay(self) -> str: + """Max retry delay.""" + return self._max_retry_delay + + @max_retry_delay.setter + def max_retry_delay(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def exponential_base(self) -> int: + """Exponential base.""" + return self._exponential_base + + @exponential_base.setter + def exponential_base(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + +class Method: + """InfluxDB Method.""" + + def __init__(self) -> None: + self.synchronous: str = "synchronous" + self.asynchronous: str = "asynchronous" + self.batching: str = "batching" + + +class InfluxDB: + """InfluxDB configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config: dict = config + self.write: dict = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._batching_options: BatchOptions = BatchOptions(self.config, self.write) + self._enable: bool = self.default()["enable"] + self._scheme: str = self.default()["scheme"] + self._hostname: str = self.default()["hostname"] + self._port: int = self.default()["port"] + self._token: str = self.default()["token"] + self._org: str = self.default()["org"] + self._bucket: str = self.default()["bucket"] + self._method: Method = self.default()["method"] + self._timezone: str = self.default()["timezone"] + self._wipe: str = self.default()["wipe"] + # PROPERTIES + self.key: str = "influxdb" + self.json: dict = {"batching_options": self._batching_options.json} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": False, + "scheme": "http", + "hostname": "localhost", + "port": 8086, + "token": "my-token", + "org": "myorg", + "bucket": "mybucket", + "method": Method().synchronous, + "timezone": "UTC", + "wipe": False, + "batching_options": self._batching_options.json, + } + + def load(self): # noqa: PLR0912, C901, PLR0915 + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "scheme" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "hostname" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "port" + self.change(sub_key, int(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "token" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "org" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "bucket" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "method" + current_method = self.config[self.key][sub_key].lower() + method = Method() + method_available = "" + for value in method.__dict__.values(): + method_available += f"{value}, " + if current_method not in method.__dict__.values(): + sys.exit( + f'[InfluxDB] Erreur de configuration, la méthode "{current_method}" ' + f"n'éxiste pas. ({method_available[:-2]})" + ) + self.change(sub_key, self.config[self.key][sub_key].lower(), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "timezone" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "wipe" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config({self.key: self.json}) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """InfluxDB enable.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def scheme(self) -> str: + """InfluxDB scheme.""" + return self._scheme + + @scheme.setter + def scheme(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def hostname(self) -> str: + """InfluxDB hostname.""" + return self._hostname + + @hostname.setter + def hostname(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def port(self) -> int: + """InfluxDB port.""" + return self._port + + @port.setter + def port(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def token(self) -> str: + """InfluxDB token.""" + return self._token + + @token.setter + def token(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def org(self) -> str: + """InfluxDB org.""" + return self._org + + @org.setter + def org(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def bucket(self) -> str: + """InfluxDB bucket.""" + return self._bucket + + @bucket.setter + def bucket(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def method(self) -> str: + """InfluxDB method.""" + return self._method + + @method.setter + def method(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def timezone(self) -> str: + """InfluxDB timezone.""" + return self._timezone + + @timezone.setter + def timezone(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def wipe(self) -> str: + """InfluxDB wipe.""" + return self._wipe + + @wipe.setter + def wipe(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def batching_options(self) -> str: + """Batching options.""" + return self._batching_options + + @batching_options.setter + def batching_options(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/log.py b/src/config/log.py new file mode 100644 index 00000000..4ee25849 --- /dev/null +++ b/src/config/log.py @@ -0,0 +1,154 @@ +"""Logging configuration.""" +import inspect +import logging + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class Logging: + """Logging configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._log_format: str = None + self._log_format_date: str = None + self._log2file: bool = None + self._debug: bool = None + self._log_level: int = None + self._log_http: bool = None + # PROPERTIES + self.key = "logging" + self.json: dict = {} + self.comments = {"logging": 'Permet de "custom" la gestion des logs de l\'application.'} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "log_format": "%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s", + "log_format_date": "%Y-%m-%d %H:%M:%S", + "log2file": False, + "log_level": logging.INFO, + "debug": False, + "log_http": False, + } + + def load(self): # noqa: PLR0912 + """Load configuration from file.""" + try: + sub_key = "log_format" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "log_format_date" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "log2file" + if "log2file" in self.config: + self.change(sub_key, str2bool(self.config["log2file"]), False) + del self.config["log2file"] + else: + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "debug" + if "debug" in self.config: + self.change(sub_key, str2bool(self.config["debug"]), False) + del self.config["debug"] + else: + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + self._log_level = logging.DEBUG if self._debug else logging.INFO + except Exception: + self.log_level = self.default()["log_level"] + try: + sub_key = "log_http" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def log_format(self) -> str: + """Log format.""" + return self._log_format + + @log_format.setter + def log_format(self, value): + self._log_format = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def log_format_date(self) -> str: + """Log format date.""" + return self._log_format_date + + @log_format_date.setter + def log_format_date(self, value): + self._log_format_date = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def log2file(self) -> bool: + """Log to file.""" + return self._log2file + + @log2file.setter + def log2file(self, value): + self._log2file = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def debug(self) -> bool: + """Debug mode.""" + return self._debug + + @debug.setter + def debug(self, value): + self._debug = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def log_level(self) -> int: + """Log level.""" + return self._log_level + + @log_level.setter + def log_level(self, value): + self._log_level = value + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def log_http(self) -> bool: + """Log HTTP requests.""" + return self._log_http + + @log_http.setter + def log_http(self, value): + self._log_http = value + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/main.py b/src/config/main.py new file mode 100755 index 00000000..1d121411 --- /dev/null +++ b/src/config/main.py @@ -0,0 +1,309 @@ +"""Configuration class loader and checker.""" +import locale +import logging +import sys +from os import getenv +from typing import List + +from deepdiff import DeepDiff +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor +from opentelemetry.instrumentation.requests import RequestsInstrumentor +from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor +from opentelemetry.sdk.trace import Resource, TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + +from __version__ import VERSION +from config.backend import Backend +from config.gateway import Gateway +from config.home_assistant import HomeAssistant +from config.home_assistant_ws import HomeAssistantWs +from config.influxdb import InfluxDB +from config.log import Logging +from config.mqtt import MQTT +from config.myelectricaldata import MyElectricalData, UsagePointId +from config.optel import OpTel +from config.server import Server +from const import URL_CONFIG_FILE +from database.usage_points import DatabaseUsagePoints +from utils import barcode_message, edit_config, load_config, logo, str2bool, title + +locale.setlocale(locale.LC_ALL, "fr_FR.UTF-8") + + +class Configuration: + """Configuration Templates.""" + + def __init__(self) -> None: + self.dev = str2bool(getenv("DEV", "False")) + + file_config = load_config() + self.application_path = file_config.application_path + self.application_path_data = file_config.application_path_data + self.application_path_log = file_config.application_path_log + self.config_file = file_config.config_file + self.config = file_config.config + + # Load config + self.opentelemetry: OpTel = OpTel(self.config) + self.logging: Logging = Logging(self.config) + self.myelectricaldata: MyElectricalData = MyElectricalData(self.config) + self.influxdb: InfluxDB = InfluxDB(self.config) + self.home_assistant_ws: HomeAssistantWs = HomeAssistantWs(self.config) + self.home_assistant: HomeAssistant = HomeAssistant(self.config) + self.mqtt: MQTT = MQTT(self.config) + self.gateway: Gateway = Gateway(self.config) + self.backend: Backend = Backend(self.config) + self.server: Server = Server(self.config) + + +class Config: + """Represent the configuration settings for the application.""" + + def __init__(self): + self.config = Configuration() + self.default = {} + + # SHORTCUT + self.application_path = self.config.application_path + self.application_path_data = self.config.application_path_data + self.application_path_log = self.config.application_path_log + self.config_file = self.config.config_file + self.application_path = self.config.application_path + self.dev = self.config.dev + self.opentelemetry = self.config.opentelemetry + self.logging = self.config.logging + self.myelectricaldata = self.config.myelectricaldata + self.influxdb = self.config.influxdb + self.home_assistant_ws = self.config.home_assistant_ws + self.home_assistant = self.config.home_assistant + self.mqtt = self.config.mqtt + self.gateway = self.config.gateway + self.backend = self.config.backend + self.server = self.config.server + + # ENVIRONMENT VARIABLE + self.debug = str2bool(getenv("DEBUG", "False")) + + self.tracer = None + self.load_logging() + self.setup_tracing() + logo(VERSION) + + comments = None + for key in self.config.config: + attr = getattr(self.config, key, None) + if attr is not None and getattr(attr, "__dict__", False): + comments = attr.__dict__["comments"] if "comments" in attr.__dict__ else None + self.default[key] = attr.default() + + self.check_config() + if self.dev: + barcode_message("DEV MODE") + exemple_file = "config.example.yaml" + edit_config(data=self.default, file=exemple_file, comments=comments, wipe=True) + edit_config( + data=self.default, + file=f"{self.application_path}/templates/{exemple_file}", + comments=comments, + wipe=True, + ) + title([f"Generate {exemple_file}", f" => {exemple_file} generated"]) + self.display() + self.clean_database() + + def clean_database(self): + """Clean database.""" + title("Nettoyage de la base de données...") + usage_point_list: List[UsagePointId] = [] + if self.myelectricaldata.usage_point_config is not None: + for upi, _ in self.myelectricaldata.usage_point_config.items(): + usage_point_list.append(upi) + for usage_point in DatabaseUsagePoints().get_all(): + if usage_point.usage_point_id not in usage_point_list: + DatabaseUsagePoints(usage_point.usage_point_id).delete() + + def check_config(self): + """Check current config file.""" + # CHECK CLASSIC KEYS + diff_config = DeepDiff(self.default, self.config.config, ignore_order=True, exclude_paths=["myelectricaldata"]) + found = "" + for diff in diff_config.get("dictionary_item_added", {}): + found += f"\n - {str(diff.replace("root", "")[2:-2]).replace("']['", ".")}" + + # CHECK MYELETRICALDATA KEYS + for usage_point_id, data in self.config.config['myelectricaldata'].items(): + usage_point_default = UsagePointId(self.config, usage_point_id, False).default() + diff_config = DeepDiff(usage_point_default, data, ignore_order=True) + for diff in diff_config.get("dictionary_item_added", {}): + key = str(diff.replace("root", "")[2:-2]).replace("']['", ".") + found += f"\n - myelectricaldata.{usage_point_id}.{key}" + if found: + logging.critical(f"\nDes valeurs inutiles ont étaient détectées dans le fichier de configuration :{found}") + logging.critical( + f""" + Impossible de charger le fichier de configuration. + + Vous pouvez récupérer un exemple de configuration ici: + {URL_CONFIG_FILE} +""" + ) + sys.exit(1) + + def load_logging(self): + """Configure logging.""" + + class NewLineFormatter(logging.Formatter): + """Split carrier return in multiple messages.""" + + def __init__(self, fmt, datefmt=None): + """Init given the log line format and date format.""" + logging.Formatter.__init__(self, fmt, datefmt) + + def format(self, record): + """Override format function.""" + msg = logging.Formatter.format(self, record) + + if record.message != "": + parts = msg.split(record.message) + msg = msg.replace("\n", "\n" + parts[0]) + + return msg + + root_logger = logging.getLogger() + if len(root_logger.handlers) > 0: + root_logger.removeHandler(root_logger.handlers[0]) + + if self.config.logging.log2file: + logging.basicConfig( + filename=f"{self.config.application_path_log}/myelectricaldata.log", + format=self.config.logging.log_format, + datefmt=self.config.logging.log_format_date, + level=self.config.logging.log_level, + ) + console = logging.StreamHandler() + console.setLevel(self.config.logging.log_level) + formatter = logging.Formatter(self.config.logging.log_format, datefmt=self.config.logging.log_format_date) + console.setFormatter(formatter) + logging.getLogger("").addHandler(console) + else: + logging.basicConfig( + format=self.config.logging.log_format, + datefmt=self.config.logging.log_format_date, + level=self.config.logging.log_level, + ) + formatter = NewLineFormatter(self.config.logging.log_format, datefmt=self.config.logging.log_format_date) + lg = logging.getLogger() + lg.handlers[0].setFormatter(formatter) + lg.setLevel(self.config.logging.log_level) + + if self.config.logging.debug: + logging.debug(" => Starting in Debug mode : %s", self.config.logging.debug) + + def display(self): + """Display the configuration settings. + + This method logs the configuration settings to the console, hiding sensitive information such as passwords + and tokens. + + Args: + None + + Returns: + None + """ + + def message(key, value="", indent=4): + """Hidden password.""" + value = value if key not in ["token", "password"] else "** hidden **" + logging.info("%s| %s: %s", " " * indent, key, value) + + logging.info("Affichage de la configuration :") + for key, value in self.config.config.items(): + title_key = key.replace("_", " ").capitalize() + if not isinstance(value, dict): + logging.info(f"* {title_key}: {value}") + else: + logging.info(f"* {title_key}:") + for sub_key, sub_value in value.items(): + if not isinstance(sub_value, dict): + message(sub_key, sub_value) + else: + message(sub_key) + for sub_sub_key, sub_sub_value in sub_value.items(): + message(sub_sub_key, sub_sub_value, 8) + + def usage_point_id_config(self, usage_point_id) -> UsagePointId: + """Return the configuration for a specific usage point. + + Args: + usage_point_id (str): The ID of the usage point. + + Returns: + dict: A dictionary containing the configuration for the specified usage point. + """ + if usage_point_id in self.config.myelectricaldata.usage_point_config: + return self.config.myelectricaldata.usage_point_config[usage_point_id] + return False + + def set_usage_point_config(self, usage_point_id, key, value): + """Set the configuration for a specific usage point. + + Args: + usage_point_id (str): The ID of the usage point. + key (str): The configuration key. + value (str): The configuration value. + """ + if usage_point_id not in self.config.myelectricaldata.usage_point_config: + setattr(self.config.myelectricaldata.usage_point_config[usage_point_id], key, value) + else: + logging.error("Usage point ID not found in configuration") + + def ssl_config(self): + """Return the SSL configuration if it exists, otherwise returns an empty dictionary.""" + if self.config.server.keyfile is not None and self.config.server.certfile is not None: + return { + "ssl_keyfile": self.config.server.keyfile, + "ssl_certfile": self.config.server.certfile, + } + return {} + + def setup_tracing(self): + """OTEL setup.""" + if self.config.opentelemetry.enable: # no pragma: no cover + RequestsInstrumentor().instrument() + resource_attributes = { + "service.name": self.config.opentelemetry.service_name, + "telemetry.version": VERSION, + "service.version": VERSION, + "env": self.config.opentelemetry.environment, + "Deployment.environment": self.config.opentelemetry.environment, + } + resource = Resource.create(resource_attributes) + provider = TracerProvider(resource=resource) + processor = BatchSpanProcessor( + OTLPSpanExporter(endpoint=self.config.opentelemetry.endpoint, insecure=True), + export_timeout_millis=5, + ) + provider.add_span_processor(processor) + trace.set_tracer_provider(provider) + else: + trace.set_tracer_provider(trace.NoOpTracerProvider()) + self.tracer = trace.get_tracer_provider().get_tracer("main") + self.tracing_sqlalchemy() + + def tracing_sqlalchemy(self): + """SQLAchemy Tracing.""" + if self.config.opentelemetry.enable and "sqlalchemy" in self.config.opentelemetry.extension: + logging.debug("[OpenTelemetry] SQLAchemy loaded") + SQLAlchemyInstrumentor().instrument(enable_commenter=True, commenter_options={}) + + def tracing_fastapi(self, app): + """FastAPI Tracing.""" + if self.config.opentelemetry.enable and "fastapi" in self.config.opentelemetry.extension: + logging.debug("[OpenTelemetry] FastAPI loaded") + FastAPIInstrumentor.instrument_app(app) + +if __name__ == "config.main": + APP_CONFIG = Config() diff --git a/src/config/mqtt.py b/src/config/mqtt.py new file mode 100644 index 00000000..e8a73e96 --- /dev/null +++ b/src/config/mqtt.py @@ -0,0 +1,204 @@ +"""MQTT configuration.""" +import inspect + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class MQTT: + """MQTT Option.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._enable: bool = None + self._hostname: str = None + self._port: int = None + self._username: str = None + self._password: str = None + self._prefix: str = None + self._client_id: str = None + self._retain: bool = None + self._qos: int = None + self._cert: str = None + # PROPERTIES + self.key = "mqtt" + self.json: dict = {} + self.comments = {"mqtt": "Configuration du serveur MQTT (nécéssaire pour Home Assistant)."} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": False, + "hostname": "localhost", + "port": 1883, + "username": "", + "password": "", + "prefix": "myelectricaldata", + "client_id": "myelectricaldata", + "retain": True, + "qos": 0, + "cert": False, + } + + def load(self): # noqa: C901, PLR0912, PLR0915 + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "hostname" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "port" + self.change(sub_key, int(self.config[self.key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "username" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "password" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "prefix" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "client_id" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "retain" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "qos" + self.change(sub_key, int(self.config[self.key][sub_key], False)) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "cert" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """Enable/Disable MQTT.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def hostname(self) -> str: + """MQTT hostname.""" + return self._hostname + + @hostname.setter + def hostname(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def port(self) -> int: + """MQTT port.""" + return self._port + + @port.setter + def port(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def username(self) -> str: + """MQTT username.""" + return self._username + + @username.setter + def username(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def password(self) -> str: + """MQTT password.""" + return self._password + + @password.setter + def password(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def prefix(self) -> str: + """MQTT prefix.""" + return self._prefix + + @prefix.setter + def prefix(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def client_id(self) -> str: + """MQTT client_id.""" + return self._client_id + + @client_id.setter + def client_id(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def retain(self) -> bool: + """MQTT retain.""" + return self._retain + + @retain.setter + def retain(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def qos(self) -> int: + """MQTT qos.""" + return self._qos + + @qos.setter + def qos(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def cert(self) -> str: + """MQTT cert.""" + return self._cert + + @cert.setter + def cert(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/myelectricaldata.py b/src/config/myelectricaldata.py new file mode 100644 index 00000000..adfa48ea --- /dev/null +++ b/src/config/myelectricaldata.py @@ -0,0 +1,547 @@ +"""MyElectricalData configuration.""" +import inspect +import sys +from datetime import datetime + +from const import TIMEZONE_UTC +from database.usage_points import DatabaseUsagePoints +from utils import edit_config, str2bool + + +class Plan: + """Plan templates.""" + + def __init__(self) -> None: + self.base: str = "BASE" + self.hchp: str = "HC/HP" + self.tempo: str = "TEMPO" + + +class UsagePointId: + """UsagePoint templates.""" + + def __init__(self, config: dict, usage_point_id: str, write: bool = True) -> None: + self.usage_point_id: str = usage_point_id + self.config: dict = config + self.write: bool = write + self.db = DatabaseUsagePoints(self.usage_point_id) + # LOCAL PROPERTIES + self._enable: bool = None + self._name: str = None + self._token: str = None + self._cache: bool = None + self._plan: Plan = None + self._consumption: bool = None + self._consumption_detail: bool = None + self._consumption_max_power: bool = None + self._consumption_price_hc: float = None + self._consumption_price_hp: float = None + self._consumption_price_base: float = None + self._consumption_max_date: str = None + self._consumption_detail_max_date: str = None + self._production: bool = None + self._production_detail: bool = None + self._production_max_date: str = None + self._production_detail_max_date: str = None + self._production_price: float = None + self._offpeak_hours_0: str = None + self._offpeak_hours_1: str = None + self._offpeak_hours_2: str = None + self._offpeak_hours_3: str = None + self._offpeak_hours_4: str = None + self._offpeak_hours_5: str = None + self._offpeak_hours_6: str = None + self._refresh_addresse: bool = None + self._refresh_contract: bool = None + # PROPERTIES + self.key: str = "myelectricaldata" + self.json: dict = {} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": True, + "name": self.usage_point_id, + "token": "VOTRE_TOKEN_MYELECTRICALDATA", + "cache": True, + "plan": Plan().base, + "consumption": True, + "consumption_detail": True, + "consumption_max_power": True, + "consumption_price_hc": 0, + "consumption_price_hp": 0, + "consumption_price_base": 0, + "consumption_max_date": "", + "consumption_detail_max_date": "", + "production": False, + "production_detail": False, + "production_max_date": "", + "production_detail_max_date": "", + "production_price": 0, + "offpeak_hours_0": "", + "offpeak_hours_1": "", + "offpeak_hours_2": "", + "offpeak_hours_3": "", + "offpeak_hours_4": "", + "offpeak_hours_5": "", + "offpeak_hours_6": "", + "refresh_addresse": False, + "refresh_contract": False, + } + + def load(self): # noqa: C901, PLR0912, PLR0915 + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "name" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "token" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "cache" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "plan" + current_plan = self.config[self.key][self.usage_point_id][sub_key].upper() + plan = Plan() + plan_available = "" + for value in plan.__dict__.values(): + plan_available += f"{value}, " + if current_plan not in plan.__dict__.values(): + sys.exit( + f'[MyElectricalData][{self.usage_point_id}] Erreur de configuration, le plan "{current_plan} ' + f"n'éxiste pas. ({plan_available[:-2]})" + ) + + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key].upper(), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_detail" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_max_power" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_price_hc" + self.change(sub_key, float(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_price_hp" + self.change(sub_key, float(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_price_base" + self.change(sub_key, float(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + self.sub_key = "consumption_max_date" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "consumption_detail_max_date" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production_detail" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production_max_date" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production_detail_max_date" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "production_price" + self.change(sub_key, float(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_0" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_1" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_2" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_3" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_4" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_5" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "offpeak_hours_6" + self.change(sub_key, self.config[self.key][self.usage_point_id][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "refresh_addresse" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "refresh_contract" + self.change(sub_key, str2bool(self.config[self.key][self.usage_point_id][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config({self.key: {self.usage_point_id: self.json}}) + data = {} + for key, value in self.json.items(): + data[key] = self.check_format(key, value) + self.db.set(data) + + def check_format(self, key, value): + """Check if value is a datetime and return in datetime format (if datetime).""" + try: + if value == "": + return None + if key in [ + "consumption_max_date", + "consumption_detail_max_date", + "production_max_date", + "production_detail_max_date", + ]: + return datetime.strptime(value, "%Y-%m-%d").replace(tzinfo=TIMEZONE_UTC) + return value + except Exception: + return None + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {self.usage_point_id: {key: value}}}) + self.db.set_value(key, self.check_format(key, value)) + + @property + def enable(self) -> bool: + """Enable/Disable UsagePoint.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def name(self) -> str: + """UsagePoint name.""" + return self._name + + @name.setter + def name(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def token(self) -> str: + """UsagePoint token.""" + return self._token + + @token.setter + def token(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def cache(self) -> bool: + """Enable/Disable cache.""" + return self._cache + + @cache.setter + def cache(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def plan(self) -> str: + """UsagePoint plan.""" + return self._plan + + @plan.setter + def plan(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption(self) -> bool: + """Enable/Disable consumption.""" + return self._consumption + + @consumption.setter + def consumption(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_detail(self) -> bool: + """Enable/Disable consumption detail.""" + return self._consumption_detail + + @consumption_detail.setter + def consumption_detail(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_max_power(self) -> bool: + """Enable/Disable consumption max power.""" + return self._consumption_max_power + + @consumption_max_power.setter + def consumption_max_power(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_price_hc(self) -> float: + """Consumption price HC.""" + return self._consumption_price_hc + + @consumption_price_hc.setter + def consumption_price_hc(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_price_hp(self) -> float: + """Consumption price HP.""" + return self._consumption_price_hp + + @consumption_price_hp.setter + def consumption_price_hp(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_price_base(self) -> float: + """Consumption price BASE.""" + return self._consumption_price_base + + @consumption_price_base.setter + def consumption_price_base(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_max_date(self) -> str: + """Consumption max date.""" + return self._consumption_max_date + + @consumption_max_date.setter + def consumption_max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def consumption_detail_max_date(self) -> str: + """Consumption detail max date.""" + return self._consumption_detail_max_date + + @consumption_detail_max_date.setter + def consumption_detail_max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production(self) -> bool: + """Enable/Disable production.""" + return self._production + + @production.setter + def production(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production_detail(self) -> bool: + """Enable/Disable production detail.""" + return self._production_detail + + @production_detail.setter + def production_detail(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production_max_date(self) -> str: + """Production max date.""" + return self._production_max_date + + @production_max_date.setter + def production_max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production_detail_max_date(self) -> str: + """Production detail max date.""" + return self._production_detail_max_date + + @production_detail_max_date.setter + def production_detail_max_date(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def production_price(self) -> float: + """Production price.""" + return self._production_price + + @production_price.setter + def production_price(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_0(self) -> str: + """Offpeak hours 0.""" + return self._offpeak_hours_0 + + @offpeak_hours_0.setter + def offpeak_hours_0(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_1(self) -> str: + """Offpeak hours 1.""" + return self._offpeak_hours_1 + + @offpeak_hours_1.setter + def offpeak_hours_1(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_2(self) -> str: + """Offpeak hours 2.""" + return self._offpeak_hours_2 + + @offpeak_hours_2.setter + def offpeak_hours_2(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_3(self) -> str: + """Offpeak hours 3.""" + return self._offpeak_hours_3 + + @offpeak_hours_3.setter + def offpeak_hours_3(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_4(self) -> str: + """Offpeak hours 4.""" + return self._offpeak_hours_4 + + @offpeak_hours_4.setter + def offpeak_hours_4(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_5(self) -> str: + """Offpeak hours 5.""" + return self._offpeak_hours_5 + + @offpeak_hours_5.setter + def offpeak_hours_5(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def offpeak_hours_6(self) -> str: + """Offpeak hours 6.""" + return self._offpeak_hours_6 + + @offpeak_hours_6.setter + def offpeak_hours_6(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def refresh_addresse(self) -> bool: + """Enable/Disable refresh addresse.""" + return self._refresh_addresse + + @refresh_addresse.setter + def refresh_addresse(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def refresh_contract(self) -> bool: + """Enable/Disable refresh contract.""" + return self._refresh_contract + + @refresh_contract.setter + def refresh_contract(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + +class MyElectricalData: + """MyElectricalData configuration.""" + + def __init__(self, config: dict) -> None: + self.config = config + self.key = "myelectricaldata" + self.usage_point_config = {} + self.json: dict = {} + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return {"MON_POINT_DE_LIVRAISON": UsagePointId(self.config, "MON_POINT_DE_LIVRAISON", write=False).default()} + + def load(self): + """Load configuration from file.""" + if self.config is None or "myelectricaldata" not in self.config: + self.config = {"myelectricaldata": self.default()} + + for usage_point_id in self.config["myelectricaldata"]: + usage_point_config: UsagePointId = UsagePointId(self.config, str(usage_point_id)) + self.usage_point_config[usage_point_id] = usage_point_config + self.json[usage_point_id] = usage_point_config.json + + def new(self, usage_point_id: str): + """Create new usage point.""" + usage_point_config: UsagePointId = UsagePointId(self.config, str(usage_point_id)) + self.usage_point_config[usage_point_id] = usage_point_config + self.json[usage_point_id] = usage_point_config.json diff --git a/src/config/optel.py b/src/config/optel.py new file mode 100644 index 00000000..5a788c1a --- /dev/null +++ b/src/config/optel.py @@ -0,0 +1,124 @@ +"""OpenTelemetry configuration.""" +import inspect + +from database.config import DatabaseConfig +from utils import edit_config, str2bool + + +class OpTel: + """OpenTelemetry configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._enable: bool = None + self._service_name: str = None + self._endpoint: str = None + self._environment: str = None + self._extension: list = [] + # PROPERTIES + self.key = "opentelemetry" + self.json: dict = {} + self.comments = {"opentelemetry": "Pour les utilisateurs avancées."} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "enable": False, + "service_name": "myelectricaldata", + "endpoint": "http://localhost:4317", + "environment": "production", + "extension": ["fastapi", "sqlalchemy"], + } + + def load(self): + """Load configuration from file.""" + try: + sub_key = "enable" + self.change(sub_key, str2bool(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "service_name" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "endpoint" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "environment" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "extension" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def enable(self) -> bool: + """Enable/Disable service.""" + return self._enable + + @enable.setter + def enable(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def service_name(self) -> str: + """Service name.""" + return self._service_name + + @service_name.setter + def service_name(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def endpoint(self) -> str: + """Endpoint.""" + return self._endpoint + + @endpoint.setter + def endpoint(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def environment(self) -> str: + """Environment.""" + return self._environment + + @environment.setter + def environment(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def extension(self) -> list: + """Extension (fastapi, sqlalchemy).""" + return self._extension + + @extension.setter + def extension(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/config/server.py b/src/config/server.py new file mode 100644 index 00000000..dedc896c --- /dev/null +++ b/src/config/server.py @@ -0,0 +1,125 @@ +"""Server configuration.""" +import inspect + +from const import CYCLE_MINIMUN +from database.config import DatabaseConfig +from utils import edit_config + + +class Server: + """Server configuration.""" + + def __init__(self, config: dict, write: bool = True) -> None: + self.config = config + self.write = write + self.db = DatabaseConfig() + # LOCAL PROPERTIES + self._cidr: str = None + self._port: int = None + self._certfile: str = None + self._keyfile: str = None + self._cycle: int = None + # PROPERTIES + self.key = "server" + self.json: dict = {} + self.comments = {"server": "Configuration du serveur web."} + # FUNCTION + self.load() + + def default(self) -> dict: + """Return default configuration as dictionary.""" + return { + "cidr": "0.0.0.0", # noqa: S104 + "port": 5000, + "certfile": "", + "keyfile": "", + "cycle": 14400, + } + + def load(self): + """Load configuration.""" + try: + sub_key = "cidr" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "port" + self.change(sub_key, int(self.config[self.key][sub_key]), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "certfile" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "keyfile" + self.change(sub_key, self.config[self.key][sub_key], False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + try: + sub_key = "cycle" + self.change(sub_key, int(max(self.config[self.key][sub_key], CYCLE_MINIMUN)), False) + except Exception: + self.change(sub_key, self.default()[sub_key], False) + + # Save configuration + if self.write: + edit_config(data={self.key: self.json}, comments=self.comments) + self.db.set(self.key, self.json) + + def change(self, key: str, value: str, write_file: bool = True) -> None: + """Change configuration.""" + setattr(self, f"_{key}", value) + self.json[key] = value + if write_file: + edit_config({self.key: {key: value}}) + current_config = self.db.get(self.key) + new_config = {**current_config, **{key: value}} + self.db.set(self.key, new_config) + + @property + def cidr(self): + """CIDR Listen address.""" + return self._cidr + + @cidr.setter + def cidr(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def port(self): + """Server listen port.""" + return self._port + + @port.setter + def port(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def certfile(self): + """HTTPs custom certificat.""" + return self._certfile + + @certfile.setter + def certfile(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def keyfile(self): + """HTTPs custom keyfile.""" + return self.keyfile + + @keyfile.setter + def keyfile(self, value): + self.change(inspect.currentframe().f_code.co_name, value) + + @property + def cycle(self): + """Jobs cycle.""" + return self._cycle + + @cycle.setter + def cycle(self, value): + self.change(inspect.currentframe().f_code.co_name, value) diff --git a/src/const.py b/src/const.py new file mode 100755 index 00000000..a4d2166f --- /dev/null +++ b/src/const.py @@ -0,0 +1,35 @@ +"""Configuration file for myelectricaldata.""" + + +import pytz + +LOG_FORMAT = "%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s" +LOG_FORMAT_DATE = "%Y-%m-%d %H:%M:%S" + +URL = "https://myelectricaldata.fr" +URL_CONFIG_FILE = "https://github.com/MyElectricalData/myelectricaldata_import/blob/main/config.example.yaml" + +USAGE_POINT_ID_LENGTH = 14 + +MAX_IMPORT_TRY = 20 +CYCLE_MINIMUN = 3600 + +DAILY_MAX_DAYS = 1094 +DETAIL_MAX_DAYS = 728 + +TEMPO_BEGIN = 600 +TEMPO_END = 2200 + +# Return code +CODE_200_SUCCESS = 200 +CODE_204_NO_CONTENT = 204 +CODE_400_BAD_REQUEST = 400 +CODE_404_NOT_FOUND = 404 +CODE_409_CONFLICT = 409 +CODE_403_FORBIDDEN = 403 +CODE_422_UNPROCESSABLE_ENTITY = 422 +CODE_429_TOO_MANY_REQUEST = 429 +CODE_500_INTERNAL_SERVER_ERROR = 500 + +TIMEZONE = pytz.timezone("Europe/Paris") +TIMEZONE_UTC = pytz.timezone("UTC") diff --git a/src/database/__init__.py b/src/database/__init__.py index e69de29b..b99b2b65 100644 --- a/src/database/__init__.py +++ b/src/database/__init__.py @@ -0,0 +1,6 @@ +"""Module to manage database data.""" +from database.main import Database + +DB = Database() +DB.init_database() +DB.unlock() diff --git a/src/database/addresses.py b/src/database/addresses.py new file mode 100644 index 00000000..79e81ed6 --- /dev/null +++ b/src/database/addresses.py @@ -0,0 +1,81 @@ +"""Manage Addresses table in database.""" +from sqlalchemy import delete, select + +from db_schema import ( + Addresses, + UsagePoints, +) + +from . import DB + + +class DatabaseAddresses: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id): + """Initialize DatabaseConfig.""" + self.session = DB.session() + self.usage_point_id = usage_point_id + + def get( + self, + ) -> Addresses: + """Retrieve the address associated with the given usage point ID.""" + query = ( + select(Addresses) + .join(UsagePoints.relation_addressess) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ) + data = self.session.scalars(query).one_or_none() + self.session.close() + return data + + def set(self, data, count=0): + """Set the address associated with the given usage point ID. + + Args: + data (dict): The address data. + count (int, optional): The count value. Defaults to 0. + """ + query = ( + select(Addresses) + .join(UsagePoints.relation_addressess) + .where(Addresses.usage_point_id == self.usage_point_id) + ) + addresses = self.session.scalars(query).one_or_none() + if addresses is not None: + addresses.street = data["street"] + addresses.locality = data["locality"] + addresses.postal_code = data["postal_code"] + addresses.insee_code = data["insee_code"] + addresses.city = data["city"] + addresses.country = data["country"] + addresses.geo_points = data["geo_points"] + addresses.count = count + else: + self.session.add( + Addresses( + usage_point_id=self.usage_point_id, + street=data["street"], + locality=data["locality"], + postal_code=data["postal_code"], + insee_code=data["insee_code"], + city=data["city"], + country=data["country"], + geo_points=data["geo_points"], + count=count, + ) + ) + self.session.flush() + self.session.close() + + def delete(self): + """Delete the address associated with the given usage point ID. + + Returns: + bool: True if the address is successfully deleted, False otherwise. + """ + self.session.execute(delete(Addresses).where(Addresses.usage_point_id == self.usage_point_id)) + self.session.flush() + self.session.close() + return True diff --git a/src/database/config.py b/src/database/config.py new file mode 100644 index 00000000..00d847b9 --- /dev/null +++ b/src/database/config.py @@ -0,0 +1,36 @@ +"""Manage Config table in database.""" + +import json + +from sqlalchemy import select + +from db_schema import Config as ConfigTable + +from . import DB + + +class DatabaseConfig: + """Manage configuration for the database.""" + + def __init__(self): + """Initialize DatabaseConfig.""" + self.session = DB.session() + + def get(self, key): + """Get data from config table.""" + query = select(ConfigTable).where(ConfigTable.key == key) + data = self.session.scalars(query).one_or_none() + self.session.close() + return data + + def set(self, key, value): + """Set data from config table.""" + query = select(ConfigTable).where(ConfigTable.key == key) + config = self.session.scalars(query).one_or_none() + if config: + config.value = json.dumps(value) + else: + self.session.add(ConfigTable(key=key, value=json.dumps(value))) + self.session.flush() + self.session.close() + DB.refresh_object() diff --git a/src/database/contracts.py b/src/database/contracts.py new file mode 100644 index 00000000..6b3b1322 --- /dev/null +++ b/src/database/contracts.py @@ -0,0 +1,107 @@ +"""Manage Contracts table in database.""" + +from sqlalchemy import delete, select + +from db_schema import ( + Contracts, + UsagePoints, +) + +from . import DB + + +class DatabaseContracts: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id): + """Initialize DatabaseConfig.""" + self.session = DB.session() + self.usage_point_id = usage_point_id + + def get(self) -> Contracts: + """Retrieve the contract associated with the given usage point ID. + + Returns: + Contracts: The contract object if found, None otherwise. + """ + query = ( + select(Contracts) + .join(UsagePoints.relation_contract) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ) + data = self.session.scalars(query).one_or_none() + self.session.close() + return data + + def set(self, data: dict, count: int = 0) -> None: + """Set the contract details for the given usage point ID. + + Args: + usage_point_id (int): The ID of the usage point. + data (dict): A dictionary containing the contract details. + count (int, optional): The count value. Defaults to 0. + + Returns: + None + """ + query = ( + select(Contracts) + .join(UsagePoints.relation_contract) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ) + contract: Contracts = self.session.scalars(query).one_or_none() + if contract is not None: + contract.usage_point_status = data["usage_point_status"] + contract.meter_type = data["meter_type"] + contract.segment = data["segment"] + contract.subscribed_power = data["subscribed_power"] + contract.last_activation_date = data["last_activation_date"] + contract.distribution_tariff = data["distribution_tariff"] + contract.offpeak_hours_0 = data["offpeak_hours_0"] + contract.offpeak_hours_1 = data["offpeak_hours_1"] + contract.offpeak_hours_2 = data["offpeak_hours_2"] + contract.offpeak_hours_3 = data["offpeak_hours_3"] + contract.offpeak_hours_4 = data["offpeak_hours_4"] + contract.offpeak_hours_5 = data["offpeak_hours_5"] + contract.offpeak_hours_6 = data["offpeak_hours_6"] + contract.contract_status = data["contract_status"] + contract.last_distribution_tariff_change_date = data["last_distribution_tariff_change_date"] + contract.count = count + else: + self.session.add( + Contracts( + usage_point_id=self.usage_point_id, + usage_point_status=data["usage_point_status"], + meter_type=data["meter_type"], + segment=data["segment"], + subscribed_power=data["subscribed_power"], + last_activation_date=data["last_activation_date"], + distribution_tariff=data["distribution_tariff"], + offpeak_hours_0=data["offpeak_hours_0"], + offpeak_hours_1=data["offpeak_hours_1"], + offpeak_hours_2=data["offpeak_hours_2"], + offpeak_hours_3=data["offpeak_hours_3"], + offpeak_hours_4=data["offpeak_hours_4"], + offpeak_hours_5=data["offpeak_hours_5"], + offpeak_hours_6=data["offpeak_hours_6"], + contract_status=data["contract_status"], + last_distribution_tariff_change_date=data["last_distribution_tariff_change_date"], + count=count, + ) + ) + self.session.flush() + self.session.close() + + def delete(self): + """Delete the contract associated with the given usage point ID. + + Args: + usage_point_id (int): The ID of the usage point. + + Returns: + bool: True if the address is successfully deleted, False otherwise. + """ + self.session.execute(delete(Contracts).where(Contracts.usage_point_id == self.usage_point_id)) + self.session.flush() + self.session.close() + return True diff --git a/src/database/daily.py b/src/database/daily.py new file mode 100644 index 00000000..c72638fa --- /dev/null +++ b/src/database/daily.py @@ -0,0 +1,433 @@ +"""Manage Config table in database.""" + +import hashlib +import logging +from datetime import datetime, timedelta + +from sqlalchemy import asc, delete, desc, func, select, update + +from const import MAX_IMPORT_TRY, TIMEZONE +from db_schema import ConsumptionDaily, ProductionDaily, UsagePoints + +from . import DB + + +class DatabaseDaily: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id, measurement_direction="consumption"): + """Initialize DatabaseConfig.""" + self.session = DB.session() + self.usage_point_id = usage_point_id + self.measurement_direction = measurement_direction + if self.measurement_direction == "consumption": + self.table = ConsumptionDaily + self.relation = UsagePoints.relation_consumption_daily + else: + self.table = ProductionDaily + self.relation = UsagePoints.relation_production_daily + + def get_all(self): + """Retrieve all daily data for a given usage point and measurement direction.""" + data = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .order_by(self.table.date.desc()) + ).all() + self.session.close() + return data + + def get_datatable( + self, + order_column="date", + order_dir="asc", + search=None, + ): + """Retrieve datatable for a given usage point, search term, and measurement direction. + + Args: + order_column (str, optional): The column to order the datatable by. Defaults to "date". + order_dir (str, optional): The direction to order the datatable. Defaults to "asc". + search (str, optional): The search term. Defaults to None. + + Returns: + list: The datatable. + """ + yesterday = datetime.combine(datetime.now(tz=TIMEZONE) - timedelta(days=1), datetime.max.time()) + sort = asc(order_column) if order_dir == "desc" else desc(order_column) + if search is not None and search != "": + result = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where((self.table.date.like(f"%{search}%")) | (self.table.value.like(f"%{search}%"))) + .where(self.table.date <= yesterday) + .order_by(sort) + ) + else: + result = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where(self.table.date <= yesterday) + .order_by(sort) + ) + return result.all() + + def get_count(self): + """Retrieve the count of daily data for a given usage point and measurement direction. + + Returns: + int: The count of daily data. + """ + data = self.session.scalars( + select([func.count()]) + .select_from(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ).one_or_none() + self.session.close() + return data + + def get_date(self, date: datetime): + """Retrieve the data for a given usage point, date, and measurement direction. + + Args: + date (str): The date. + + Returns: + object: The data. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + data = self.session.scalars(select(self.table).join(self.relation).where(self.table.id == unique_id)).first() + self.session.flush() + self.session.close() + return data + + def get_state(self, date: datetime): + """Check the state of daily data for a given usage point, date, and measurement direction. + + Args: + date (str): The date. + + Returns: + bool: True if the daily data exists, False otherwise. + """ + date = date.astimezone(TIMEZONE) + if self.get_date(date) is not None: + return True + return False + + def get_last_date(self): + """Retrieve the last date for a given usage point and measurement direction. + + Returns: + str: The last date. + """ + current_data = self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(self.table.date) + ).first() + self.session.flush() + self.session.close() + if current_data is None: + return False + return current_data.date + + def get_last(self): + """Retrieve the last data point for a given usage point and measurement direction. + + Returns: + object: The last data point. + """ + current_data = self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .where(self.table.value != 0) + .order_by(self.table.date.desc()) + ).first() + self.session.flush() + self.session.close() + if current_data is None: + return False + return current_data + + def get_first_date(self): + """Retrieve the first date for a given usage point and measurement direction. + + Returns: + str: The first date. + """ + query = ( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(self.table.date.desc()) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).first() + if current_data is None: + return False + return current_data.date + + def get_fail_count(self, date: datetime): + """Retrieve the fail count for a given usage point, date, and measurement direction. + + Args: + date (str): The date. + + Returns: + int: The fail count. + """ + date = date.astimezone(TIMEZONE) + result = self.get_date(date) + if hasattr(result, "fail_count"): + return result.fail_count + return 0 + + def fail_increment(self, date: datetime): + """Increment the fail count for a given usage point, date, and measurement direction. + + Args: + date (str): The date. + + Returns: + int: The updated fail count. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + daily = self.session.scalars(query).one_or_none() + if daily is not None: + fail_count = int(daily.fail_count) + 1 + if fail_count >= MAX_IMPORT_TRY: + blacklist = 1 + fail_count = 0 + else: + blacklist = 0 + daily.id = unique_id + daily.usage_point_id = self.usage_point_id + daily.date = date + daily.value = 0 + daily.blacklist = blacklist + daily.fail_count = fail_count + else: + fail_count = 0 + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + blacklist=0, + fail_count=0, + ) + ) + self.session.flush() + return fail_count + + def get_range(self, begin: datetime, end: datetime): + """Retrieve the range of data for a given usage point, begin date, end date, and measurement direction. + + Args: + begin (str): The begin date. + end (str): The end date. + + Returns: + list: The list of data within the specified range. + """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) + query = ( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .where(self.table.date >= begin) + .where(self.table.date <= end) + .order_by(self.table.date.desc()) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).all() + if current_data is None: + return False + else: + return current_data + + def get(self, begin: datetime, end: datetime): + """Retrieve the data for a given usage point, begin date, end date, and measurement direction. + + Args: + begin (str): The begin date. + end (str): The end date. + + Returns: + dict: A dictionary containing the retrieved data. + """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) + delta = end - begin + result = {"missing_data": False, "date": {}, "count": 0} + for i in range(delta.days + 1): + check_date = begin + timedelta(days=i) + check_date = datetime.combine(check_date, datetime.min.time()) + query_result = self.get_date(check_date) + check_date = check_date.strftime("%Y-%m-%d") + if query_result is None: + # NEVER QUERY + result["date"][check_date] = { + "status": False, + "blacklist": 0, + "value": 0, + } + result["missing_data"] = True + else: + consumption = query_result.value + blacklist = query_result.blacklist + if consumption == 0: + # ENEDIS RETURN NO DATA + result["date"][check_date] = { + "status": False, + "blacklist": blacklist, + "value": consumption, + } + result["missing_data"] = True + else: + # SUCCESS or BLACKLIST + result["date"][check_date] = { + "status": True, + "blacklist": blacklist, + "value": consumption, + } + return result + + def insert( + self, + date: datetime, + value, + blacklist=0, + fail_count=0, + ): + """Insert daily data for a given usage point, date, value, blacklist, fail count, and measurement direction. + + Args: + date (str): The date of the data. + value (float): The value of the data. + blacklist (int, optional): The blacklist status. Defaults to 0. + fail_count (int, optional): The fail count. Defaults to 0. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + daily = self.session.scalars(query).one_or_none() + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + if daily is not None: + daily.id = unique_id + daily.usage_point_id = self.usage_point_id + daily.date = date + daily.value = value + daily.blacklist = blacklist + daily.fail_count = fail_count + else: + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=value, + blacklist=blacklist, + fail_count=fail_count, + ) + ) + self.session.flush() + + def reset( + self, + date=None, + ): + """Reset the daily data for a given usage point, date, and measurement type. + + Args: + date (str, optional): The date of the data. Defaults to None. + + Returns: + bool: True if the data was reset, False otherwise. + """ + if date is not None: + date = date.astimezone(TIMEZONE) + data = self.get_date(date) + if data is not None: + values = { + self.table.value: 0, + self.table.blacklist: 0, + self.table.fail_count: 0, + } + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(update(self.table, values=values).where(self.table.id == unique_id)) + self.session.flush() + return True + return False + + def delete(self, date=None): + """Delete the daily data for a given usage point, date, and measurement direction. + + Args: + date (str, optional): The date of the data. Defaults to None. + + Returns: + bool: True if the data was deleted, False otherwise. + """ + if date is not None: + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(delete(self.table).where(self.table.id == unique_id)) + else: + self.session.execute(delete(self.table).where(self.table.usage_point_id == self.usage_point_id)) + self.session.flush() + return True + + def blacklist(self, date, action=True): + """Blacklist or unblacklist the daily data for a given usage point, date, and measurement direction. + + Args: + date (str): The date of the data. + action (bool, optional): The action to perform. True to blacklist, False to unblacklist. Defaults to True. + + Returns: + bool: True if the data was blacklisted or unblacklisted, False otherwise. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + daily = self.session.scalars(query).one_or_none() + if daily is not None: + daily.blacklist = action + else: + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + blacklist=action, + fail_count=0, + ) + ) + self.session.flush() + return True + + def get_date_range(self): + """Get the date range for a given usage point. + + Returns: + dict: A dictionary containing the begin and end dates of the date range. + """ + return { + "begin": self.get_last_date(), + "end": self.get_first_date(), + } diff --git a/src/database/detail.py b/src/database/detail.py new file mode 100644 index 00000000..a95fb6c2 --- /dev/null +++ b/src/database/detail.py @@ -0,0 +1,523 @@ +"""Manage Config table in database.""" + +import hashlib +import logging +from datetime import datetime, timedelta + +from sqlalchemy import asc, delete, desc, func, select + +from const import MAX_IMPORT_TRY, TIMEZONE +from db_schema import ConsumptionDetail, ProductionDetail, UsagePoints + +from . import DB + + +class DatabaseDetail: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id, measurement_direction="consumption"): + """Initialize DatabaseConfig.""" + self.session = DB.session() + self.min_entry = 100 + self.usage_point_id = usage_point_id + self.measurement_direction = measurement_direction + if self.measurement_direction == "consumption": + self.table = ConsumptionDetail + self.relation = UsagePoints.relation_consumption_detail + else: + self.table = ProductionDetail + self.relation = UsagePoints.relation_production_detail + + def get_all( + self, + begin=None, + end=None, + order_dir="desc", + ): + """Retrieve all records from the database. + + Args: + begin (datetime, optional): The start date of the range. Defaults to None. + end (datetime, optional): The end date of the range. Defaults to None. + order_dir (str, optional): The order direction. Defaults to "desc". + + Returns: + list: A list of records. + """ + if begin is not None: + begin = begin.astimezone(TIMEZONE) + if end is not None: + end = end.astimezone(TIMEZONE) + sort = asc("date") if order_dir == "desc" else desc("date") + if begin is None and end is None: + return self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(sort) + ).all() + elif begin is not None and end is None: + return self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .filter(self.table.date >= begin) + .order_by(sort) + ).all() + elif end is not None and begin is None: + return self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .filter(self.table.date <= end) + .order_by(sort) + ).all() + else: + return self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .filter(self.table.date <= end) + .filter(self.table.date >= begin) + .order_by(sort) + ).all() + + def get_datatable( + self, + order_column="date", + order_dir="asc", + search=None, + ): + """Retrieve datatable from the database. + + Args: + order_column (str, optional): The column to order the datatable by. Defaults to "date". + order_dir (str, optional): The order direction. Defaults to "asc". + search (str, optional): The search query to filter the datatable. Defaults to None. + + Returns: + list: A list of datatable records. + """ + yesterday = datetime.combine(datetime.now(tz=TIMEZONE) - timedelta(days=1), datetime.max.time()) + sort = asc(order_column) if order_dir == "desc" else desc(order_column) + if search is not None and search != "": + result = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where((self.table.date.like(f"%{search}%")) | (self.table.value.like(f"%{search}%"))) + .where(self.table.date <= yesterday) + .order_by(sort) + ) + else: + result = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where(self.table.date <= yesterday) + .order_by(sort) + ) + return result.all() + + def get_count(self): + """Retrieve the count of records for a specific usage point and measurement direction. + + Returns: + int: The count of records. + """ + return self.session.scalars( + select([func.count()]) + .select_from(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ).one_or_none() + + def get_date(self, date: datetime): + """Retrieve the data for a specific date from the database. + + Args: + date (str): The date in the format 'YYYY-MM-DD'. + + Returns: + object: The data for the specified date. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + return self.session.scalars(select(self.table).join(self.relation).where(self.table.id == unique_id)).first() + + def get_range( + self, + begin: datetime, + end: datetime, + order="desc", + ): + """Retrieve a range of data from the database. + + Args: + begin (datetime): The start of the range. + end (datetime): The end of the range. + order (str, optional): The order direction. Defaults to "desc". + + Returns: + list: A list of data records within the specified range. + """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) + if order == "desc": + order = self.table.date.desc() + else: + order = self.table.date.asc() + query = ( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .where(self.table.date >= begin) + .where(self.table.date <= end) + .order_by(order) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).all() + if current_data is None: + return False + return current_data + + def get(self, begin: datetime, end: datetime): + """Retrieve data for a specific range from the database. + + Args: + begin (datetime): The start of the range. + end (datetime): The end of the range. + + Returns: + dict: A dictionary containing the retrieved data. + """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) + delta = end - begin + result = {"missing_data": False, "date": {}, "count": 0} + for _ in range(delta.days + 1): + query_result = self.get_all( + begin=begin, + end=end, + ) + time_delta = abs(int((begin - end).total_seconds() / 60)) + total_internal = 0 + for query in query_result: + total_internal = total_internal + query.interval + total_time = abs(total_internal - time_delta) + if total_time > self.min_entry: + logging.info(f" - {total_time}m absente du relevé.") + result["missing_data"] = True + else: + for query in query_result: + result["date"][query.date] = { + "value": query.value, + "interval": query.interval, + "measure_type": query.measure_type, + "blacklist": query.blacklist, + } + return result + + def get_state(self, date: datetime): + """Get the state of a specific data record in the database. + + Args: + date (datetime): The date of the data record. + + Returns: + bool: True if the data record exists, False otherwise. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + current_data = self.session.scalars( + select(self.table).join(self.relation).where(self.table.id == unique_id) + ).one_or_none() + if current_data is None: + return False + else: + return True + + def insert( # noqa: PLR0913 + self, + date: datetime, + value, + interval, + blacklist=0, + fail_count=0, + ): + """Insert a new record into the database for the given consumption or production detail. + + Args: + date (datetime): The date of the record. + value (float): The value of the record. + interval (int): The interval of the record. + blacklist (int, optional): The blacklist status of the record. Defaults to 0. + fail_count (int, optional): The fail count of the record. Defaults to 0. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + detail = self.get_date(date) + if detail is not None: + detail.id = unique_id + detail.usage_point_id = self.usage_point_id + detail.date = date + detail.value = value + detail.interval = interval + detail.measure_type = self.measurement_direction + detail.blacklist = blacklist + detail.fail_count = fail_count + else: + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=value, + interval=interval, + measure_type=self.measurement_direction, + blacklist=blacklist, + fail_count=fail_count, + ) + ) + self.session.flush() + + def reset(self, date=None): + """Reset the values of a consumption or production detail record. + + Args: + date (datetime, optional): The date of the record. Defaults to None. + + Returns: + bool: True if the reset was successful, False otherwise. + """ + if date is not None: + date = date.astimezone(TIMEZONE) + detail = self.get_date(date) + if detail is not None: + detail.value = 0 + detail.interval = 0 + detail.blacklist = 0 + detail.fail_count = 0 + self.session.flush() + return True + return False + + def reset_range(self, begin: datetime, end: datetime): + """Reset the values of consumption or production detail records within a specified range. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + + Returns: + bool: True if the reset was successful, False otherwise. + """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) + detail = self.get_range(begin, end) + if detail is not None: + for row in detail: + row.value = 0 + row.interval = 0 + row.blacklist = 0 + row.fail_count = 0 + self.session.flush() + return True + return False + + def delete(self, date=None): + """Delete a consumption or production detail record. + + Args: + date (datetime, optional): The date of the record. Defaults to None. + + Returns: + bool: True if the deletion was successful, False otherwise. + """ + if date is not None: + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(delete(self.table).where(self.table.id == unique_id)) + else: + self.session.execute(delete(self.table).where(self.table.usage_point_id == self.usage_point_id)) + self.session.flush() + return True + + def delete_range(self, date: datetime): + """Delete a range of consumption or production detail records. + + Args: + date (datetime): The date of the records to be deleted. + + Returns: + bool: True if the deletion was successful, False otherwise. + """ + if date is not None: + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(delete(self.table).where(self.table.id == unique_id)) + else: + self.session.execute(delete(self.table).where(self.table.usage_point_id == self.usage_point_id)) + self.session.flush() + return True + + def get_ratio_hc_hp(self, begin: datetime, end: datetime): + """Calculate the ratio of high consumption (HC) to high production (HP) for a given usage point and time range. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + + Returns: + dict: A dictionary with the ratio of HC and HP. + """ + begin = begin.astimezone(TIMEZONE) + end = end.astimezone(TIMEZONE) + result = { + "HC": 0, + "HP": 0, + } + detail_data = self.get_all( + begin=begin, + end=end, + ) + for data in detail_data: + result[data.measure_type] = result[data.measure_type] + data.value + return result + + def get_fail_count(self, date): + """Get the fail count for a specific usage point, date, and measurement type. + + Args: + date (datetime): The date of the record. + + Returns: + int: The fail count for the specified usage point, date, and measurement type. + """ + date = date.astimezone(TIMEZONE) + data = self.get_date(date) + if not hasattr(data, "fail_count"): + return 0 + return self.get_date(date).fail_count + + def fail_increment(self, date: datetime): + """Increment the fail count for a specific usage point, date, and measurement type. + + Args: + date (datetime): The date of the record. + + Returns: + int: The updated fail count. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + detail = self.session.scalars(query).one_or_none() + if detail is not None: + fail_count = int(detail.fail_count) + 1 + if fail_count >= MAX_IMPORT_TRY: + blacklist = 1 + fail_count = 0 + else: + blacklist = 0 + detail.usage_point_id = self.usage_point_id + detail.date = date + detail.value = 0 + detail.interval = 0 + detail.measure_type = "HP" + detail.blacklist = blacklist + detail.fail_count = fail_count + else: + fail_count = 0 + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + interval=0, + measure_type="HP", + blacklist=0, + fail_count=0, + ) + ) + self.session.flush() + self.session.close() + return fail_count + + def get_last_date(self): + """Get the last date for a specific usage point and measurement type. + + Returns: + datetime: The last date for the specified usage point and measurement type. + """ + current_data = self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(self.table.date) + ).first() + if current_data is None: + return False + else: + return current_data.date + + def get_first_date(self): + """Get the first date for a specific usage point and measurement type. + + Returns: + datetime: The first date for the specified usage point and measurement type. + """ + query = ( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(self.table.date.desc()) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).first() + if current_data is None: + return False + else: + return current_data.date + + def get_date_range(self): + """Get the date range (begin and end dates) for a specific usage point. + + Returns: + dict: A dictionary containing the begin and end dates. + """ + return { + "begin": self.get_last_date(), + "end": self.get_first_date(), + } + + def blacklist(self, date: datetime, action=True): + """Blacklist or unblacklist the daily data for a given usage point, date, and measurement direction. + + Args: + date (str): The date of the data. + action (bool, optional): The action to perform. True to blacklist, False to unblacklist. Defaults to True. + + Returns: + bool: True if the data was blacklisted or unblacklisted, False otherwise. + """ + date = date.astimezone(TIMEZONE) + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + daily = self.session.scalars(query).one_or_none() + if daily is not None: + daily.blacklist = action + else: + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + blacklist=action, + fail_count=0, + ) + ) + self.session.flush() + return True diff --git a/src/database/ecowatt.py b/src/database/ecowatt.py new file mode 100644 index 00000000..292cade0 --- /dev/null +++ b/src/database/ecowatt.py @@ -0,0 +1,72 @@ +"""Manage Config table in database.""" + +from datetime import datetime + +from sqlalchemy import select + +from db_schema import Ecowatt + +from . import DB + + +class DatabaseEcowatt: + """Manage configuration for the database.""" + + def __init__(self): + """Initialize DatabaseConfig.""" + self.session = DB.session() + + def get(self, order="desc"): + """Retrieve Ecowatt data from the database. + + Args: + order (str, optional): The order in which to retrieve the data. Defaults to "desc". + + Returns: + list: A list of Ecowatt data. + """ + if order == "desc": + order = Ecowatt.date.desc() + else: + order = Ecowatt.date.asc() + return self.session.scalars(select(Ecowatt).order_by(order)).all() + + def get_range(self, begin, end, order="desc"): + """Retrieve a range of Ecowatt data from the database. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + order (str, optional): The order in which to retrieve the data. Defaults to "desc". + + Returns: + list: A list of Ecowatt data within the specified range. + """ + if order == "desc": + order = Ecowatt.date.desc() + else: + order = Ecowatt.date.asc() + return self.session.scalars( + select(Ecowatt).where(Ecowatt.date >= begin).where(Ecowatt.date <= end).order_by(order) + ).all() + + def set(self, date, value, message, detail): + """Set the Ecowatt data in the database. + + Args: + date (datetime): The date of the data. + value (float): The value of the data. + message (str): The message associated with the data. + detail (str): The detail information of the data. + """ + date = datetime.combine(date, datetime.min.time()) + ecowatt = self.get_range(date, date) + if ecowatt: + for item in ecowatt: + item.value = value + item.message = message + item.detail = detail + else: + self.session.add(Ecowatt(date=date, value=value, message=message, detail=detail)) + self.session.flush() + return True diff --git a/src/database/main.py b/src/database/main.py new file mode 100644 index 00000000..de8bef6f --- /dev/null +++ b/src/database/main.py @@ -0,0 +1,154 @@ +"""Manage all database operations.""" +import logging +import subprocess +import sys +import traceback +from datetime import datetime +from pathlib import Path + +from sqlalchemy import create_engine, inspect, select +from sqlalchemy.orm import scoped_session, sessionmaker +from sqlalchemy.pool import NullPool + +from const import TIMEZONE +from db_schema import ( + Config as ConfigSchema, +) +from utils import get_version, load_config + + +class Database: + """Represents a database connection and provides methods for database operations.""" + + def __init__(self): + """Initialize a Database object.""" + self.config = load_config() + self.application_path = self.config.application_path + self.application_path_data = self.config.application_path_data + self.db_file = f"{self.application_path_data}/myelectricaldata.db" + + # MIGRATE TO 1.0.0 + old_path = Path(f"{self.application_path_data}/cache.db") + if old_path.exists(): + old_path.rename(self.db_file) + + # DBURI CONFIGURATION + backend: dict = self.config.config.get("backend", None) + if backend is None or "uri" not in backend: + path = self.db_file + self.uri = f"sqlite:////{path}" + logging.critical(f"Create new database file : {path}") + elif backend["uri"].startswith("sqlite") or backend["uri"].startswith("postgresql"): + self.uri = backend["uri"] + if backend["uri"].startswith("sqlite"): + path = self.uri.split("///")[1] + if not Path(path).exists(): + logging.critical(f"Create new database file : {path}") + Path(self.db_file).touch() + else: + logging.critical("Database not supported (only SQLite & PostgresSQL)") + sys.exit(1) + + self.engine = create_engine( + self.uri, + echo=False, + query_cache_size=0, + isolation_level="READ UNCOMMITTED", + poolclass=NullPool, + ) + + subprocess.run( + f"cd {self.application_path}; DB_URL='{self.uri}' alembic upgrade head", + shell=True, # noqa: S602 + check=True, + ) + + self.session_factory = sessionmaker(self.engine, autocommit=True, autoflush=True) + self.session = scoped_session(self.session_factory) + self.inspector = inspect(self.engine) + self.lock_file = f"{self.application_path_data}/.lock" + + def init_database(self): + """Initialize the database with default values.""" + try: + logging.info("Configure Databases") + query = select(ConfigSchema).where(ConfigSchema.key == "day") + day = self.session().scalars(query).one_or_none() + if day: + day.value = datetime.now(tz=TIMEZONE).strftime("%Y-%m-%d") + else: + self.session().add(ConfigSchema(key="day", value=datetime.now(tz=TIMEZONE).strftime("%Y-%m-%d"))) + logging.info(" => day") + query = select(ConfigSchema).where(ConfigSchema.key == "call_number") + if not self.session().scalars(query).one_or_none(): + self.session().add(ConfigSchema(key="call_number", value="0")) + logging.info(" => call_number") + query = select(ConfigSchema).where(ConfigSchema.key == "max_call") + if not self.session().scalars(query).one_or_none(): + self.session().add(ConfigSchema(key="max_call", value="500")) + logging.info(" => max_call") + query = select(ConfigSchema).where(ConfigSchema.key == "version") + version = self.session().scalars(query).one_or_none() + if version: + version.value = get_version() + else: + self.session().add(ConfigSchema(key="version", value=get_version())) + logging.info(" => version") + query = select(ConfigSchema).where(ConfigSchema.key == "lock") + if not self.session().scalars(query).one_or_none(): + self.session().add(ConfigSchema(key="lock", value="0")) + logging.info(" => lock") + query = select(ConfigSchema).where(ConfigSchema.key == "lastUpdate") + if not self.session().scalars(query).one_or_none(): + self.session().add(ConfigSchema(key="lastUpdate", value=str(datetime.now(tz=TIMEZONE)))) + logging.info(" => lastUpdate") + logging.info(" Success") + except Exception as e: + traceback.print_exc() + logging.error(e) + logging.critical("Database initialize failed!") + + def purge_database(self): + """Purges the SQLite database.""" + logging.info("Reset SQLite Database") + if Path(f"{self.application_path_data}/cache.db").exists(): + Path(f"{self.application_path_data}/cache.db").unlink() + logging.info(" => Success") + else: + logging.info(" => No cache detected") + + def lock_status(self): + """Check the lock status of the database. + + Returns: + bool: True if the database is locked, False otherwise. + """ + if Path(self.lock_file).exists(): + return True + else: + return False + + def lock(self): + """Locks the database. + + Returns: + bool: True if the database is locked, False otherwise. + """ + with Path(self.lock_file).open("xt", encoding="UTF-8") as f: + f.write(str(datetime.now(tz=TIMEZONE))) + f.close() + return self.lock_status() + + def unlock(self): + """Unlocks the database. + + Returns: + bool: True if the database is unlocked, False otherwise. + """ + if Path(self.lock_file).exists(): + Path(self.lock_file).unlink() + return self.lock_status() + + def refresh_object(self): + """Refresh the ORM objects.""" + self.session().expire_all() diff --git a/src/database/max_power.py b/src/database/max_power.py new file mode 100644 index 00000000..2ace775d --- /dev/null +++ b/src/database/max_power.py @@ -0,0 +1,348 @@ +"""Manage Config table in database.""" + +import hashlib +import logging +from datetime import datetime, timedelta + +import pytz +from sqlalchemy import asc, delete, desc, func, select + +from const import MAX_IMPORT_TRY +from db_schema import ConsumptionDailyMaxPower, UsagePoints + +from . import DB + + +class DatabaseMaxPower: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id, measurement_direction="consumption"): + """Initialize DatabaseConfig.""" + self.session = DB.session() + self.usage_point_id = usage_point_id + self.measurement_direction = measurement_direction + + def get_all(self, order="desc"): + """Retrieve all consumption daily max power records from the database. + + Args: + order (str, optional): The order in which the records should be sorted. Defaults to "desc". + + Returns: + list: A list of consumption daily max power records. + """ + if order == "desc": + order = ConsumptionDailyMaxPower.date.desc() + else: + order = ConsumptionDailyMaxPower.date.asc() + return self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .order_by(order) + ).all() + + def get_range(self, begin, end): + """Retrieve the range of consumption daily max power records from the database. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + + Returns: + list: A list of consumption daily max power records within the specified range. + """ + query = ( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(ConsumptionDailyMaxPower.usage_point_id == self.usage_point_id) + .where(ConsumptionDailyMaxPower.date >= begin) + .where(ConsumptionDailyMaxPower.date <= end) + .order_by(ConsumptionDailyMaxPower.date.desc()) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).all() + if current_data is None: + return False + else: + return current_data + + def get_power(self, begin, end): + """Retrieve the power data for a given date range. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + + Returns: + dict: A dictionary containing the power data for each date within the range. + """ + delta = end - begin + result = {"missing_data": False, "date": {}, "count": 0} + for i in range(delta.days + 1): + check_date = begin + timedelta(days=i) + check_date = datetime.combine(check_date, datetime.min.time()) + query_result = self.get_date(check_date) + check_date = check_date.strftime("%Y-%m-%d") + if query_result is None: + # NEVER QUERY + result["date"][check_date] = { + "status": False, + "blacklist": 0, + "value": 0, + } + result["missing_data"] = True + else: + consumption = query_result.value + blacklist = query_result.blacklist + if consumption == 0: + # ENEDIS RETURN NO DATA + result["date"][check_date] = { + "status": False, + "blacklist": blacklist, + "value": consumption, + } + result["missing_data"] = True + else: + # SUCCESS or BLACKLIST + result["date"][check_date] = { + "status": True, + "blacklist": blacklist, + "value": consumption, + } + return result + + def get_last_date(self): + """Retrieve the last date of consumption daily max power record from the database. + + Returns: + datetime: The last date of consumption daily max power record. + """ + current_data = self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(ConsumptionDailyMaxPower.usage_point_id == self.usage_point_id) + .order_by(ConsumptionDailyMaxPower.date) + ).first() + if current_data is None: + return False + else: + return current_data.date + + def get_date(self, date): + """Retrieve the consumption daily max power record for a given date. + + Args: + date (datetime): The date for which to retrieve the record. + + Returns: + ConsumptionDailyMaxPower: The consumption daily max power record for the given date. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + return self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(ConsumptionDailyMaxPower.id == unique_id) + ).one_or_none() + + def insert(self, date, event_date, value, blacklist=0, fail_count=0): # noqa: PLR0913, D417 + """Insert the daily max power record into the database. + + Args: + date (datetime): The date of the record. + event_date (datetime): The event date of the record. + value (float): The value of the record. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + daily = self.get_date(date) + if daily is not None: + daily.id = unique_id + daily.usage_point_id = self.usage_point_id + daily.date = date + daily.event_date = event_date + daily.value = value + daily.blacklist = blacklist + daily.fail_count = fail_count + else: + self.session.add( + ConsumptionDailyMaxPower( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + event_date=event_date, + value=value, + blacklist=blacklist, + fail_count=fail_count, + ) + ) + self.session.flush() + + def get_daily_count(self): + """Retrieve the count of consumption daily max power records from the database. + + Returns: + int: The count of consumption daily max power records. + """ + return self.session.scalars( + select([func.count()]) + .select_from(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ).one_or_none() + + def get_daily_datatable(self, order_column="date", order_dir="asc", search=None): + """Retrieve the datatable of consumption daily max power records from the database. + + Args: + order_column (str, optional): The column to order the datatable by. Defaults to "date". + order_dir (str, optional): The direction to order the datatable in. Defaults to "asc". + search (str, optional): The search term to filter the datatable. Defaults to None. + + Returns: + list: The datatable of consumption daily max power records. + """ + yesterday = datetime.combine(datetime.now(pytz.utc) - timedelta(days=1), datetime.max.time()) + sort = asc(order_column) if order_dir == "desc" else desc(order_column) + if search is not None and search != "": + result = self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where( + (ConsumptionDailyMaxPower.date.like(f"%{search}%")) + | (ConsumptionDailyMaxPower.value.like(f"%{search}%")) + ) + .where(ConsumptionDailyMaxPower.date <= yesterday) + .order_by(sort) + ) + else: + result = self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where(ConsumptionDailyMaxPower.date <= yesterday) + .order_by(sort) + ) + return result.all() + + def daily_fail_increment(self, date): + """Increment the fail count for a specific date in the consumption daily max power records. + + Args: + date (datetime): The date for which to increment the fail count. + + Returns: + int: The updated fail count. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + daily = self.get_date(date) + if daily is not None: + fail_count = int(daily.fail_count) + 1 + if fail_count >= MAX_IMPORT_TRY: + blacklist = 1 + fail_count = 0 + else: + blacklist = 0 + daily.id = unique_id + daily.usage_point_id = self.usage_point_id + daily.date = date + daily.event_date = None + daily.value = 0 + daily.blacklist = blacklist + daily.fail_count = fail_count + else: + fail_count = 0 + self.session.add( + ConsumptionDailyMaxPower( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + event_date=None, + value=0, + blacklist=0, + fail_count=0, + ) + ) + self.session.flush() + return fail_count + + def reset_daily(self, date=None): + """Reset the daily max power record for a specific date. + + Args: + date (datetime, optional): The date to reset the record for. Defaults to None. + + Returns: + bool: True if the reset is successful, False otherwise. + """ + daily = self.get_date(date) + if daily is not None: + daily.event_date = None + daily.value = 0 + daily.blacklist = 0 + daily.fail_count = 0 + self.session.flush() + return True + else: + return False + + def delete_daily(self, date=None): + """Delete the daily max power record for a specific date or all records for the usage point. + + Args: + date (datetime, optional): The date to delete the record for. Defaults to None. + + Returns: + bool: True if the deletion is successful, False otherwise. + """ + if date is not None: + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.id == unique_id)) + else: + self.session.execute( + delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.usage_point_id == self.usage_point_id) + ) + self.session.flush() + return True + + def blacklist_daily(self, date, action=True): + """Blacklist or unblacklist the daily max power record for a specific date. + + Args: + date (datetime): The date to blacklist or unblacklist the record for. + action (bool, optional): True to blacklist the record, False to unblacklist it. Defaults to True. + + Returns: + bool: True if the operation is successful, False otherwise. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + daily = self.get_date(date) + if daily is not None: + daily.blacklist = action + else: + self.session.add( + ConsumptionDailyMaxPower( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + blacklist=action, + fail_count=0, + ) + ) + self.session.flush() + return True + + def get_fail_count(self, date): + """Get the fail count for a specific date. + + Args: + date (datetime): The date to get the fail count for. + + Returns: + int: The fail count for the specified date. + """ + result = self.get_date(date) + if hasattr(result, "fail_count"): + return result.fail_count + return 0 diff --git a/src/database/statistique.py b/src/database/statistique.py new file mode 100644 index 00000000..19d46bca --- /dev/null +++ b/src/database/statistique.py @@ -0,0 +1,45 @@ +"""Manage Config table in database.""" + + +from sqlalchemy import delete, select + +from db_schema import Statistique, UsagePoints + +from . import DB + + +class DatabaseStatistique: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id): + """Initialize DatabaseConfig.""" + self.session = DB.session() + self.usage_point_id = usage_point_id + + def get(self, key): + """Retrieve the value associated with the given key.""" + return self.session.scalars( + select(Statistique) + .join(UsagePoints.relation_stats) + .where(Statistique.usage_point_id == self.usage_point_id) + .where(Statistique.key == key) + ).all() + + def set(self, key, value): + """Set the value associated with the given key. + + If the key already exists, the value will be updated. + If the key does not exist, it will be created. + """ + current_value = self.get(key) + if current_value: + for item in current_value: + item.value = value + else: + self.session.add(Statistique(usage_point_id=self.usage_point_id, key=key, value=value)) + self.session.flush() + return True + + def delete(self): + """Delete the statistics associated with the usage point.""" + self.session.execute(delete(Statistique).where(Statistique.usage_point_id == self.usage_point_id)) diff --git a/src/database/tempo.py b/src/database/tempo.py new file mode 100644 index 00000000..51bfc756 --- /dev/null +++ b/src/database/tempo.py @@ -0,0 +1,110 @@ +"""Manage Tempo table in database.""" + +import json +from datetime import datetime + +from sqlalchemy import select + +from db_schema import Tempo, TempoConfig + +from . import DB + + +class DatabaseTempo: + """Manage configuration for the database.""" + + def __init__(self): + """Initialize DatabaseTempo.""" + self.session = DB.session() + + def get(self, order="desc"): + """Retrieve Tempo data from the database. + + Args: + order (str, optional): The order in which to retrieve the data. Defaults to "desc". + + Returns: + list: List of Tempo data. + """ + if order == "desc": + order = Tempo.date.desc() + else: + order = Tempo.date.asc() + return self.session.scalars(select(Tempo).order_by(order)).all() + + def get_range(self, begin, end, order="desc"): + """Retrieve Tempo data within a specified date range from the database. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + order (str, optional): The order in which to retrieve the data. Defaults to "desc". + + Returns: + list: List of Tempo data within the specified date range. + """ + if order == "desc": + order = Tempo.date.desc() + else: + order = Tempo.date.asc() + return self.session.scalars( + select(Tempo).where(Tempo.date >= begin).where(Tempo.date <= end).order_by(order) + ).all() + + def set(self, date, color): + """Set the color for a specific date in the Tempo data. + + Args: + date (datetime): The date for which to set the color. + color (str): The color to set. + + Returns: + bool: True if the operation is successful. + """ + date = datetime.combine(date, datetime.min.time()) + tempo = self.get_range(date, date) + if tempo: + for item in tempo: + item.color = color + else: + self.session.add(Tempo(date=date, color=color)) + self.session.flush() + return True + + # ----------------------------------------------------------------------------------------------------------------- + # TEMPO CONFIG + # ----------------------------------------------------------------------------------------------------------------- + def get_config(self, key): + """Retrieve the value of a configuration key from the database. + + Args: + key (str): The key of the configuration. + + Returns: + Any: The value associated with the key, or None if the key is not found. + """ + query = select(TempoConfig).where(TempoConfig.key == key) + data = self.session.scalars(query).one_or_none() + if data is not None: + data = json.loads(data.value) + self.session.close() + return data + + def set_config(self, key, value): + """Set the value of a configuration key in the database. + + Args: + key (str): The key of the configuration. + value (Any): The value to set. + + Returns: + None + """ + query = select(TempoConfig).where(TempoConfig.key == key) + config = self.session.scalars(query).one_or_none() + if config: + config.value = json.dumps(value) + else: + self.session.add(TempoConfig(key=key, value=json.dumps(value))) + self.session.flush() + self.session.close() diff --git a/src/database/usage_points.py b/src/database/usage_points.py new file mode 100644 index 00000000..3f690634 --- /dev/null +++ b/src/database/usage_points.py @@ -0,0 +1,198 @@ +"""Manage UsagePoints table in database.""" + +from datetime import datetime, timedelta +from typing import List + +from sqlalchemy import delete, select, update +from sqlalchemy.orm import scoped_session + +from const import TIMEZONE_UTC +from db_schema import ( + Addresses, + ConsumptionDaily, + ConsumptionDailyMaxPower, + ConsumptionDetail, + Contracts, + ProductionDaily, + ProductionDetail, + Statistique, + UsagePoints, +) + +from . import DB + + +class UsagePointsConfig: # pylint: disable=R0902 + """Default configuration for UsagePoints.""" + + def __init__(self) -> None: + self.usage_point_id: str = "------ SET_YOUR_USAGE_POINT_ID ------" + self.enable: bool = True + self.name: str = "Maison" + self.token: str = "------- SET_YOUR_TOKEN --------" + self.cache: bool = True + self.consumption: bool = True + self.consumption_detail: bool = True + self.consumption_price_base: float = 0 + self.consumption_price_hc: float = 0 + self.consumption_price_hp: float = 0 + self.consumption_max_power: bool = True + self.production: bool = False + self.production_detail: bool = False + self.production_price: float = 0 + self.offpeak_hours_0: str = None + self.offpeak_hours_1: str = None + self.offpeak_hours_2: str = None + self.offpeak_hours_3: str = None + self.offpeak_hours_4: str = None + self.offpeak_hours_5: str = None + self.offpeak_hours_6: str = None + self.plan: str = "BASE" + self.refresh_addresse: bool = False + self.refresh_contract: bool = False + self.consumption_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.consumption_detail_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.production_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.production_detail_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.call_number: int = 0 + self.quota_reached: bool = False + self.quota_limit: bool = False + self.quota_reset_at: datetime = None + self.ban: bool = False + self.consentement_expiration: datetime = None + self.progress: int = 0 + self.progress_status: str = "" + + +class DatabaseUsagePoints: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id=None): + """Initialize DatabaseConfig.""" + self.usage_point_id = usage_point_id + self.session: scoped_session = DB.session() + self.usage_point_config = None + + def get_all(self) -> List[UsagePoints]: + """Get all data from usage point table.""" + query = select(UsagePoints) + data = self.session.scalars(query).all() + self.session.close() + return data + + def get(self): + """Get data from usage point table.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + data = self.session.scalars(query).one_or_none() + self.session.close() + return data + + def get_plan( + self, + ): + """Get plan from usage point table.""" + data = self.get() + if data.plan in ["HP/HC"]: + return "HC/HP" + return data.plan.upper() + + def set_value(self, key, value): + """Set value in usage point table.""" + values = {key: value} + self.session.execute( + update(UsagePoints, values=values).where(UsagePoints.usage_point_id == self.usage_point_id) + ) + self.session.flush() + self.session.close() + + def set(self, data: dict) -> None: + """Set data from usage point table.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + usage_points: UsagePoints = self.session.execute(query).scalar_one_or_none() + if usage_points is not None: + self.session.execute( + update(UsagePoints, values=data).where(UsagePoints.usage_point_id == self.usage_point_id) + ) + else: + usage_points = UsagePoints(usage_point_id=self.usage_point_id) + for key, value in data.items(): + setattr(usage_points, key, value) + self.session.add(usage_points) + self.session.flush() + self.session.close() + + def progress(self, increment): + """Update progress in database.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + usage_points = self.session.scalars(query).one_or_none() + usage_points.progress = usage_points.progress + increment + self.session.close() + + def last_call_update(self) -> None: + """Update last call in database.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + usage_points = self.session.scalars(query).one_or_none() + usage_points.last_call = datetime.now(tz=TIMEZONE_UTC) + self.session.flush() + self.session.close() + + def update( # noqa: PLR0913 + self, + consentement_expiration=None, + call_number=None, + quota_reached=None, + quota_limit=None, + quota_reset_at=None, + last_call=None, + ban=None, + ) -> None: + """Update usage point in database.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + usage_points = self.session.scalars(query).one_or_none() + if consentement_expiration is not None: + usage_points.consentement_expiration = consentement_expiration + if call_number is not None: + usage_points.call_number = call_number + if quota_reached is not None: + usage_points.quota_reached = quota_reached + if quota_limit is not None: + usage_points.quota_limit = quota_limit + if quota_reset_at is not None: + usage_points.quota_reset_at = quota_reset_at + if last_call is not None: + usage_points.last_call = last_call + if ban is not None: + usage_points.ban = ban + self.session.flush() + self.session.close() + + def delete(self) -> True: + """Delete usage point from database.""" + self.session.execute(delete(Addresses).where(Addresses.usage_point_id == self.usage_point_id)) + self.session.execute(delete(Contracts).where(Contracts.usage_point_id == self.usage_point_id)) + self.session.execute( + delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.usage_point_id == self.usage_point_id) + ) + self.session.execute(delete(ConsumptionDetail).where(ConsumptionDetail.usage_point_id == self.usage_point_id)) + self.session.execute(delete(ConsumptionDaily).where(ConsumptionDaily.usage_point_id == self.usage_point_id)) + self.session.execute(delete(ProductionDetail).where(ProductionDetail.usage_point_id == self.usage_point_id)) + self.session.execute(delete(ProductionDaily).where(ProductionDaily.usage_point_id == self.usage_point_id)) + self.session.execute(delete(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id)) + self.session.execute(delete(Statistique).where(Statistique.usage_point_id == self.usage_point_id)) + self.session.flush() + self.session.close() + return True + + def get_error_log(self): + """Get error log in usage point table.""" + data = self.get() + return data.last_error + + def set_error_log(self, message): + """Set error log in usage point table.""" + values = {UsagePoints.last_error: message} + self.session.execute( + update(UsagePoints, values=values).where(UsagePoints.usage_point_id == self.usage_point_id) + ) + self.session.flush() + return True diff --git a/src/db_schema.py b/src/db_schema.py index 8bb02b69..43ea8391 100644 --- a/src/db_schema.py +++ b/src/db_schema.py @@ -1,5 +1,7 @@ """This module defines the database schema for the application.""" +import typing + from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, String, Text from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship @@ -8,6 +10,8 @@ class Config(Base): + """This class represents the configuration table in the database.""" + __tablename__ = "config" key = Column(Text, primary_key=True, index=True, unique=True) @@ -18,14 +22,16 @@ def __init__(self, key, value): self.value = value def __str__(self): + """Return the string representation of the Config object.""" return self.value class UsagePoints(Base): + """This class represents the usage points table in the database.""" + __tablename__ = "usage_points" usage_point_id = Column(Text, primary_key=True, unique=True, nullable=False, index=True) - name = Column(Text, nullable=False) cache = Column(Boolean, nullable=False, default=False) consumption = Column(Boolean, nullable=False, default=True) @@ -58,10 +64,7 @@ class UsagePoints(Base): last_call = Column(DateTime, nullable=True) ban = Column(Boolean, nullable=True) consumption_max_date = Column(DateTime, nullable=True) - consumption_detail_max_date = Column( - DateTime, - nullable=True, - ) + consumption_detail_max_date = Column(DateTime, nullable=True) production_max_date = Column(DateTime, nullable=True) production_detail_max_date = Column(DateTime, nullable=True) consumption_max_power = Column(Boolean, nullable=False, default=True) @@ -77,6 +80,7 @@ class UsagePoints(Base): relation_consumption_daily_max_power = relationship("ConsumptionDailyMaxPower", back_populates="usage_point") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"UsagePoints(" f"usage_point_id={self.usage_point_id!r}, " @@ -122,8 +126,10 @@ def __repr__(self): class Addresses(Base): + """Represents the Addresses class.""" + __tablename__ = "addresses" - __table_args__ = {"sqlite_autoincrement": True} + __table_args__: typing.ClassVar[dict] = {"sqlite_autoincrement": True} id = Column(Integer, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -139,6 +145,7 @@ class Addresses(Base): usage_point = relationship("UsagePoints", back_populates="relation_addressess") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"Addresses(" f"id={self.id!r}, " @@ -156,8 +163,10 @@ def __repr__(self): class Contracts(Base): + """Represents the Contracts class.""" + __tablename__ = "contracts" - __table_args__ = {"sqlite_autoincrement": True} + __table_args__: typing.ClassVar[dict] = {"sqlite_autoincrement": True} id = Column(Integer, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -181,6 +190,7 @@ class Contracts(Base): usage_point = relationship("UsagePoints", back_populates="relation_contract") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"Contracts(" f"id={self.id!r}, " @@ -206,8 +216,9 @@ def __repr__(self): class ConsumptionDaily(Base): + """Represents the ConsumptionDaily class.""" + __tablename__ = "consumption_daily" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -219,6 +230,7 @@ class ConsumptionDaily(Base): usage_point = relationship("UsagePoints", back_populates="relation_consumption_daily") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ConsumptionDaily(" f"id={self.id!r}, " @@ -232,8 +244,9 @@ def __repr__(self): class ConsumptionDetail(Base): + """Represents the ConsumptionDetail class.""" + __tablename__ = "consumption_detail" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -247,6 +260,7 @@ class ConsumptionDetail(Base): usage_point = relationship("UsagePoints", back_populates="relation_consumption_detail") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ConsumptionDetail(" f"id={self.id!r}, " @@ -262,8 +276,9 @@ def __repr__(self): class ProductionDaily(Base): + """Represents the ProductionDaily class.""" + __tablename__ = "production_daily" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -275,6 +290,7 @@ class ProductionDaily(Base): usage_point = relationship("UsagePoints", back_populates="relation_production_daily") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ProductionDaily(" f"id={self.id!r}, " @@ -288,8 +304,9 @@ def __repr__(self): class ProductionDetail(Base): + """Represents the ProductionDetail class.""" + __tablename__ = "production_detail" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -303,6 +320,7 @@ class ProductionDetail(Base): usage_point = relationship("UsagePoints", back_populates="relation_production_detail") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ProductionDetail(" f"id={self.id!r}, " @@ -318,8 +336,10 @@ def __repr__(self): class Statistique(Base): + """Represents the Statistique class.""" + __tablename__ = "statistique" - __table_args__ = {"sqlite_autoincrement": True} + __table_args__: typing.ClassVar[dict] = {"sqlite_autoincrement": True} id = Column(Integer, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -329,6 +349,7 @@ class Statistique(Base): usage_point = relationship("UsagePoints", back_populates="relation_stats") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"Statistique(" f"id={self.id!r}, " @@ -340,8 +361,9 @@ def __repr__(self): class ConsumptionDailyMaxPower(Base): + """Represents the ConsumptionDailyMaxPower class.""" + __tablename__ = "consumption_daily_max_power" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -354,6 +376,7 @@ class ConsumptionDailyMaxPower(Base): usage_point = relationship("UsagePoints", back_populates="relation_consumption_daily_max_power") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ConsumptionDailyMaxPower(" f"id={self.id!r}, " @@ -368,16 +391,21 @@ def __repr__(self): class Tempo(Base): + """Represents the Tempo class.""" + __tablename__ = "tempo" date = Column(DateTime, primary_key=True, index=True, unique=True) color = Column(Text, nullable=False, index=True) def __repr__(self): + """Return the string representation of the Config object.""" return f"Tempo(" f"date={self.date!r}, " f"color={self.color!r}, " f")" class TempoConfig(Base): + """Represents the TempoConfig class.""" + __tablename__ = "tempo_config" key = Column(Text, primary_key=True, index=True, unique=True) @@ -388,10 +416,13 @@ def __init__(self, key, value): self.value = value def __str__(self): + """Return the string representation of the Config object.""" return self.value class Ecowatt(Base): + """Represents the Ecowatt class.""" + __tablename__ = "ecowatt" date = Column(DateTime, primary_key=True, index=True, unique=True) @@ -400,6 +431,7 @@ class Ecowatt(Base): detail = Column(Text, nullable=False, index=True) def __repr__(self): + """Return the string representation of the Config object.""" return ( f"Ecowatt(" f"date={self.date!r}, " diff --git a/src/dependencies.py b/src/dependencies.py deleted file mode 100755 index db935730..00000000 --- a/src/dependencies.py +++ /dev/null @@ -1,230 +0,0 @@ -"""This module contains dependencies for the application.""" -import datetime -import logging -from math import floor -from os import environ, getenv - -from art import decor, text2art - -from __version__ import VERSION - -if environ.get("APPLICATION_PATH") is None: - APPLICATION_PATH = "/app" -else: - APPLICATION_PATH = environ.get("APPLICATION_PATH") - -if environ.get("APPLICATION_PATH_DATA") is None: - APPLICATION_PATH_DATA = "/data" -else: - APPLICATION_PATH_DATA = getenv("APPLICATION_PATH_DATA") - -if environ.get("APPLICATION_PATH_LOG") is None: - APPLICATION_PATH_LOG = "/log" -else: - APPLICATION_PATH_LOG = getenv("APPLICATION_PATH_LOG") - - -def daterange(start_date, end_date): - """Generate a range of dates between the start_date and end_date. - - Args: - start_date (datetime.date): The start date of the range. - end_date (datetime.date): The end date of the range. - - Yields: - datetime.date: The dates in the range. - - """ - for n in range(int((end_date - start_date).days)): - yield start_date + datetime.timedelta(n) - - -def is_bool(v): - """Check if a value is a boolean. - - Args: - v (any): The value to check. - - Returns: - bool: True if the value is a boolean, False otherwise. - - """ - if v in ["true", "false", "yes, no", "t, f", "y, n", 1, 0]: - return True - return False - - -def str2bool(v): - """Convert a string representation of a boolean value to a boolean. - - Args: - v (str): The string representation of the boolean value. - - Returns: - bool: The boolean value. - - """ - if type(v) != bool: - return v and v.lower() in ("yes", "true", "t", "1") - else: - return v - - -def is_float(element): - """Check if a value can be converted to a float. - - Args: - element (any): The value to check. - - Returns: - bool: True if the value can be converted to a float, False otherwise. - - """ - try: - float(element) - return True - except ValueError: - return False - - -def is_integer(element): - """Check if a value can be converted to an integer. - - Args: - element (any): The value to check. - - Returns: - bool: True if the value can be converted to an integer, False otherwise. - - """ - try: - return float(element).is_integer() - except ValueError: - return False - - -def reformat_json(yaml): - """Reformat a JSON object. - - Args: - yaml (dict): The JSON object to reformat. - - Returns: - dict: The reformatted JSON object. - - """ - result = {} - for key, value in yaml.items(): - if value in ["true", "false"]: - result[key] = str2bool(value) - elif type(value) == dict: - result[key] = value - elif not isinstance(value, bool) and is_float(value): - result[key] = float(value) - else: - result[key] = value - return result - - -def truncate(f, n=2): - """Truncate a float number to a specified number of decimal places. - - Args: - f (float): The float number to truncate. - n (int, optional): The number of decimal places to keep. Defaults to 2. - - Returns: - float: The truncated float number. - - """ - return floor(f * 10**n) / 10**n - - -def title(message): - """Print a title message. - - Args: - message (str or list): The message or list of messages to print as a title. - - """ - separator() - if type(message) is list: - for msg in message: - logging.info(f"{msg.upper()}") - else: - logging.info(f"{message.upper()}") - separator() - - -def title_warning(message): - """Print a warning message with a title format. - - Args: - message (str): The warning message to print. - - """ - separator_warning() - logging.warning(f" {message.upper()}") - separator_warning() - - -def separator(): - """Print a separator line.""" - logging.info( - "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ◦ ❖ ◦ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - ) - - -def separator_warning(): - """Print a warning separator line.""" - logging.warning( - "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ▲ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - ) - - -def export_finish(): - """Finish the export process.""" - logging.info( - "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ◦ TERMINE ◦ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - ) - - -def log_usage_point_id(usage_point_id): - """Log the usage point ID. - - Args: - usage_point_id (str): The usage point ID to log. - """ - text = f"Point de livraison : {usage_point_id}" - separator() - logging.info(f'{decor("barcode1")}{text: ^93}{decor("barcode1", reverse=True)}') - separator() - - -def finish(): - """Finish the import process.""" - separator() - for line in text2art("Import Finish!!!").splitlines(): - logging.info(f'{decor("barcode1")}{line: ^93}{decor("barcode1", reverse=True)}') - separator() - - -def get_version(): - """Return the version of the module.""" - return VERSION - - -def logo(version): - Art = text2art("MyElectricalData") - separator() - for line in Art.splitlines(): - logging.info(f'{decor("barcode1")}{line: ^93}{decor("barcode1", reverse=True)}') - separator() - version = f"VERSION : {version}" - logging.info(f'{decor("barcode1")}{version: ^93}{decor("barcode1", reverse=True)}') - separator() - -def chunks_list(lst, n): - """Yield successive n-sized chunks from lst.""" - for i in range(0, len(lst), n): - yield lst[i:i + n] diff --git a/src/external_services/home_assistant/main.py b/src/external_services/home_assistant/main.py new file mode 100644 index 00000000..5452c169 --- /dev/null +++ b/src/external_services/home_assistant/main.py @@ -0,0 +1,802 @@ +"""This module contains the code for exporting data to Home Assistant.""" +import inspect +import json +import logging +import traceback +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from config.myelectricaldata import UsagePointId +from const import TIMEZONE +from database.contracts import Contracts, DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from external_services.mqtt.client import Mqtt +from models.stat import Stat +from utils import convert_kw, convert_kw_to_euro, convert_price, get_version + + +class HomeAssistant: # pylint: disable=R0902 + """Represents a Home Assistant instance.""" + + def __init__(self, usage_point_id): + self.usage_point_id = usage_point_id + self.usage_point: UsagePointId = APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id] + self.contract: Contracts = DatabaseContracts(self.usage_point_id).get() + self.mqtt = Mqtt() + self.date_format = "%Y-%m-%d" + self.date_format_detail = "%Y-%m-%d %H:%M:%S" + self.tempo_color = None + + def export(self): + """Export data to Home Assistant. + + This method exports consumption, production, tempo, and ecowatt data to Home Assistant. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + try: + if self.mqtt.valid: + if self.usage_point.consumption or self.usage_point.consumption_detail: + logging.info("Consommation :") + self.myelectricaldata_usage_point_id("consumption") + self.last_x_day(5, "consumption") + self.history_usage_point_id("consumption") + + if self.usage_point.production or self.usage_point.production_detail: + logging.info("Production :") + self.myelectricaldata_usage_point_id("production") + self.last_x_day(5, "production") + self.history_usage_point_id("production") + + self.tempo() + self.tempo_info() + self.tempo_days() + self.tempo_price() + self.ecowatt() + else: + logging.critical("=> Export MQTT Désactivée (Echec de connexion)") + except Exception: + traceback.print_exc() + + def sensor(self, **kwargs): + """Publish sensor data to Home Assistant. + + This method publishes sensor data to Home Assistant using MQTT. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info( + "- sensor.%s_%s", + kwargs["device_name"].lower().replace(" ", "_"), + kwargs["name"].lower().replace(" ", "_"), + ) + topic = f"{APP_CONFIG.home_assistant.discovery_prefix}/sensor/{kwargs['topic']}" + if "device_class" not in kwargs: + device_class = None + else: + device_class = kwargs["device_class"] + config = { + "name": f"{kwargs['name']}", + "uniq_id": kwargs["uniq_id"], + "stat_t": f"{topic}/state", + "json_attr_t": f"{topic}/attributes", + "device_class": device_class, + "device": { + "identifiers": kwargs["device_identifiers"], + "name": kwargs["device_name"], + "model": kwargs["device_model"], + "manufacturer": "MyElectricalData", + }, + } + if "unit_of_measurement" in kwargs: + config["unit_of_measurement"] = kwargs["unit_of_measurement"] + if "numPDL" in kwargs: + config["numPDL"] = kwargs["numPDL"] + attributes_params = {} + if "attributes" in kwargs: + attributes_params = kwargs["attributes"] + activation_date = getattr(self.contract, "last_activation_date", None) + if activation_date is not None: + activation_date = activation_date.strftime(self.date_format) + attributes = { + **attributes_params, + **{ + "version": get_version(), + "activationDate": activation_date, + "lastUpdate": datetime.now(tz=TIMEZONE).strftime(self.date_format_detail), + "timeLastCall": datetime.now(tz=TIMEZONE).strftime(self.date_format_detail), + }, + } + + data = { + "config": json.dumps(config), + "state": kwargs["state"], + "attributes": json.dumps(attributes), + } + return self.mqtt.publish_multiple(data, topic) + + def last_x_day(self, days, measurement_direction): + """Get data for the last x days and publish it to Home Assistant. + + Args: + days (int): The number of days to retrieve data for. + measurement_direction (str): The direction of the measurement (e.g., consumption or production). + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_last{days}day" + end = datetime.combine(datetime.now(tz=TIMEZONE) - timedelta(days=1), datetime.max.time()) + begin = datetime.combine(end - timedelta(days), datetime.min.time()) + range_detail = DatabaseDetail(self.usage_point_id, measurement_direction).get_range(begin, end) + attributes = {"time": [], measurement_direction: []} + for data in range_detail: + attributes["time"].append(data.date.strftime("%Y-%m-%d %H:%M:%S")) + attributes[measurement_direction].append(data.value) + self.sensor( + topic=f"myelectricaldata_{measurement_direction}_last_{days}_day/{self.usage_point_id}", + name=f"{measurement_direction}.last{days}day", + device_name=f"Linky {self.usage_point_id}", + device_model=f"linky {self.usage_point_id}", + device_identifiers=f"{self.usage_point_id}", + uniq_id=uniq_id, + unit_of_measurement="kWh", + attributes=attributes, + state=days, + device_class="energy", + numPDL=self.usage_point_id, + ) + + def history_usage_point_id(self, measurement_direction): + """Retrieve the historical usage point ID and publishes it to Home Assistant. + + Args: + measurement_direction (str): The direction of the measurement (e.g., "consumption", "production"). + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_history" + stats = Stat(self.usage_point_id, measurement_direction) + state = DatabaseDaily(self.usage_point_id, measurement_direction).get_last() + if state: + state = state.value + else: + state = 0 + state = convert_kw(state) + attributes = {"yesterdayDate": stats.daily(0)["begin"]} + self.sensor( + topic=f"myelectricaldata_{measurement_direction}_history/{self.usage_point_id}", + name=f"{measurement_direction}.history", + device_name=f"Linky {self.usage_point_id}", + device_model=f"linky {self.usage_point_id}", + device_identifiers=f"{self.usage_point_id}", + uniq_id=uniq_id, + unit_of_measurement="kWh", + attributes=attributes, + state=state, + device_class="energy", + numPDL=self.usage_point_id, + ) + + def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR0915, C901 + """Retrieve the usage point ID and calculates various statistics related to energy consumption. + + Args: + measurement_direction (str): The direction of the measurement (e.g., "consumption", "production"). + + Returns: + dict: A dictionary containing various statistics related to energy consumption, such as daily, weekly, + monthly, and yearly values. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + stats = Stat(self.usage_point_id, measurement_direction) + state = DatabaseDaily(self.usage_point_id, measurement_direction).get_last() + if state: + state = state.value + else: + state = 0 + + offpeak_hours_enedis = ( + f"Lundi ({self.usage_point.offpeak_hours_0});" + f"Mardi ({self.usage_point.offpeak_hours_1});" + f"Mercredi ({self.usage_point.offpeak_hours_2});" + f"Jeudi ({self.usage_point.offpeak_hours_3});" + f"Vendredi ({self.usage_point.offpeak_hours_4});" + f"Samedi ({self.usage_point.offpeak_hours_5});" + f"Dimanche ({self.usage_point.offpeak_hours_6});" + ) + + offpeak_hours = [] + idx = 0 + max_day = 6 + while idx <= max_day: + _offpeak_hours = [] + offpeak_hour = getattr(self.usage_point, f"offpeak_hours_{idx}") + if not isinstance(offpeak_hour, str): + logging.error( + [ + f"offpeak_hours_{idx} n'est pas une chaine de caractères", + " Format si une seule période : 00H00-06H00", + " Format si plusieurs périodes : 00H00-06H00;12H00-14H00", + ] + ) + else: + for offpeak_hours_data in getattr(self.usage_point, f"offpeak_hours_{idx}").split(";"): + if isinstance(offpeak_hours_data, str): + _offpeak_hours.append(offpeak_hours_data.split("-")) + + offpeak_hours.append(_offpeak_hours) + idx = idx + 1 + + yesterday = datetime.combine(datetime.now(tz=TIMEZONE) - relativedelta(days=1), datetime.max.time()) + previous_week = datetime.combine(yesterday - relativedelta(days=7), datetime.min.time()) + yesterday_last_year = yesterday - relativedelta(years=1) + + info = { + "yesterday": yesterday.strftime(self.date_format), + "previous_week": previous_week.strftime(self.date_format), + "yesterday_last_year": yesterday_last_year.strftime(self.date_format), + } + + # current_week + current_week = stats.current_week() + current_week_value = current_week["value"] + info["current_week"] = { + "begin": current_week["begin"], + "end": current_week["end"], + } + # last_week + last_week = stats.last_week() + last_week_value = last_week["value"] + info["last_week"] = {"begin": last_week["begin"], "end": last_week["end"]} + # current_week_last_year + current_week_last_year = stats.current_week_last_year() + current_week_last_year_value = current_week_last_year["value"] + info["current_week_last_year"] = { + "begin": current_week_last_year["begin"], + "end": current_week_last_year["end"], + } + # last_month + last_month = stats.last_month() + last_month_value = last_month["value"] + info["last_month"] = {"begin": last_month["begin"], "end": last_month["end"]} + # current_month + current_month = stats.current_month() + current_month_value = current_month["value"] + info["current_month"] = { + "begin": current_month["begin"], + "end": current_month["end"], + } + # current_month_last_year + current_month_last_year = stats.current_month_last_year() + current_month_last_year_value = current_month_last_year["value"] + info["current_month_last_year"] = { + "begin": current_month_last_year["begin"], + "end": current_month_last_year["end"], + } + # last_month_last_year + last_month_last_year = stats.last_month_last_year() + last_month_last_year_value = last_month_last_year["value"] + info["last_month_last_year"] = { + "begin": last_month_last_year["begin"], + "end": last_month_last_year["end"], + } + # current_year + current_year = stats.current_year() + current_year_value = current_year["value"] + info["current_year"] = { + "begin": current_year["begin"], + "end": current_year["end"], + } + # current_year_last_year + current_year_last_year = stats.current_year_last_year() + current_year_last_year_value = current_year_last_year["value"] + info["current_year_last_year"] = { + "begin": current_year_last_year["begin"], + "end": current_year_last_year["end"], + } + # last_year + last_year = stats.last_year() + last_year_value = last_year["value"] + info["last_year"] = {"begin": last_year["begin"], "end": last_year["end"]} + # yesterday_hc_hp + yesterday_hc_hp = stats.yesterday_hc_hp() + yesterday_hc_value = yesterday_hc_hp["value"]["hc"] + yesterday_hp_value = yesterday_hc_hp["value"]["hp"] + info["yesterday_hc_hp"] = { + "begin": yesterday_hc_hp["begin"], + "end": yesterday_hc_hp["end"], + } + + # evolution + peak_offpeak_percent = stats.peak_offpeak_percent() + current_week_evolution = stats.current_week_evolution() + current_month_evolution = stats.current_month_evolution() + yesterday_evolution = stats.yesterday_evolution() + monthly_evolution = stats.monthly_evolution() + yearly_evolution = stats.yearly_evolution() + yesterday_last_year = DatabaseDaily(self.usage_point_id).get_date( + datetime.combine(yesterday_last_year, datetime.min.time()), + ) + dailyweek_cost = [] + dailyweek_hp = [] + dailyweek_cost_hp = [] + dailyweek_hc = [] + dailyweek_cost_hc = [] + yesterday_hp_value_cost = 0 + if measurement_direction == "consumption": + daily_cost = 0 + plan = self.usage_point.plan + if plan == "HC/HP": + for i in range(7): + hp = stats.detail(i, "HP")["value"] + hc = stats.detail(i, "HC")["value"] + dailyweek_hp.append(convert_kw(hp)) + dailyweek_hc.append(convert_kw(hc)) + cost_hp = convert_kw_to_euro(hp, self.usage_point.consumption_price_hp) + cost_hc = convert_kw_to_euro(hc, self.usage_point.consumption_price_hc) + dailyweek_cost_hp.append(cost_hp) + dailyweek_cost_hc.append(cost_hc) + value = cost_hp + cost_hc + if i == 0: + daily_cost = value + elif i == 1: + yesterday_hp_value_cost = convert_kw_to_euro(hp, self.usage_point.consumption_price_hp) + dailyweek_cost.append(round(value, 1)) + elif plan == "TEMPO": + tempo_config = DatabaseTempo().get_config("price") + for i in range(7): + tempo_data = stats.tempo(i)["value"] + hp = tempo_data["blue_hp"] + tempo_data["white_hp"] + tempo_data["red_hp"] + hc = tempo_data["blue_hc"] + tempo_data["white_hc"] + tempo_data["red_hc"] + dailyweek_hp.append(convert_kw(hp)) + dailyweek_hc.append(convert_kw(hc)) + cost_hp = ( + convert_kw_to_euro( + tempo_data["blue_hp"], + convert_price(tempo_config["blue_hp"]), + ) + + convert_kw_to_euro( + tempo_data["white_hp"], + convert_price(tempo_config["white_hp"]), + ) + + convert_kw_to_euro(tempo_data["red_hp"], convert_price(tempo_config["red_hp"])) + ) + cost_hc = ( + convert_kw_to_euro( + tempo_data["blue_hc"], + convert_price(tempo_config["blue_hc"]), + ) + + convert_kw_to_euro( + tempo_data["white_hc"], + convert_price(tempo_config["white_hc"]), + ) + + convert_kw_to_euro(tempo_data["red_hc"], convert_price(tempo_config["red_hc"])) + ) + dailyweek_cost_hp.append(cost_hp) + dailyweek_cost_hc.append(cost_hc) + value = cost_hp + cost_hc + if i == 0: + daily_cost = value + elif i == 1: + yesterday_hp_value_cost = cost_hp + dailyweek_cost.append(round(value, 1)) + else: + for i in range(7): + hour_hp = stats.detail(i, "HP")["value"] + hour_hc = stats.detail(i, "HC")["value"] + dailyweek_hp.append(convert_kw(hour_hp)) + dailyweek_hc.append(convert_kw(hour_hc)) + dailyweek_cost_hp.append(convert_kw_to_euro(hour_hp, self.usage_point.consumption_price_base)) + dailyweek_cost_hc.append(convert_kw_to_euro(hour_hc, self.usage_point.consumption_price_base)) + dailyweek_cost.append( + convert_kw_to_euro(stats.daily(i)["value"], self.usage_point.consumption_price_base) + ) + if i == 0: + daily_cost = convert_kw_to_euro( + stats.daily(0)["value"], self.usage_point.consumption_price_base + ) + elif i == 1: + yesterday_hp_value_cost = convert_kw_to_euro( + hour_hp, self.usage_point.consumption_price_base + ) + else: + daily_cost = convert_kw_to_euro(stats.daily(0)["value"], self.usage_point.production_price) + for i in range(7): + dailyweek_cost.append( + convert_kw_to_euro(stats.daily(i)["value"], self.usage_point.production_price) + ) + + if not dailyweek_hp: + dailyweek_hp = [0, 0, 0, 0, 0, 0, 0, 0] + if not dailyweek_cost_hp: + dailyweek_cost_hp = [0, 0, 0, 0, 0, 0, 0, 0] + if not dailyweek_hc: + dailyweek_hc = [0, 0, 0, 0, 0, 0, 0, 0] + if not dailyweek_cost_hc: + dailyweek_cost_hc = [0, 0, 0, 0, 0, 0, 0, 0] + + yesterday_consumption_max_power = 0 + if self.usage_point.consumption_max_power: + yesterday_consumption_max_power = stats.max_power(0)["value"] + + error_last_call = DatabaseUsagePoints(self.usage_point_id).get_error_log() + if error_last_call is None: + error_last_call = "" + + attributes = { + "yesterdayDate": stats.daily(0)["begin"], + "yesterday": convert_kw(stats.daily(0)["value"]), + "serviceEnedis": "myElectricalData", + "yesterdayLastYearDate": (datetime.now(tz=TIMEZONE) - relativedelta(years=1)).strftime( + self.date_format + ), + "yesterdayLastYear": convert_kw(yesterday_last_year.value) + if hasattr(yesterday_last_year, "value") + else 0, + "daily": [ + convert_kw(stats.daily(0)["value"]), + convert_kw(stats.daily(1)["value"]), + convert_kw(stats.daily(2)["value"]), + convert_kw(stats.daily(3)["value"]), + convert_kw(stats.daily(4)["value"]), + convert_kw(stats.daily(5)["value"]), + convert_kw(stats.daily(6)["value"]), + ], + "current_week": convert_kw(current_week_value), + "last_week": convert_kw(last_week_value), + "day_1": convert_kw(stats.daily(0)["value"]), + "day_2": convert_kw(stats.daily(1)["value"]), + "day_3": convert_kw(stats.daily(2)["value"]), + "day_4": convert_kw(stats.daily(3)["value"]), + "day_5": convert_kw(stats.daily(4)["value"]), + "day_6": convert_kw(stats.daily(5)["value"]), + "day_7": convert_kw(stats.daily(6)["value"]), + "current_week_last_year": convert_kw(current_week_last_year_value), + "last_month": convert_kw(last_month_value), + "current_month": convert_kw(current_month_value), + "current_month_last_year": convert_kw(current_month_last_year_value), + "last_month_last_year": convert_kw(last_month_last_year_value), + "last_year": convert_kw(last_year_value), + "current_year": convert_kw(current_year_value), + "current_year_last_year": convert_kw(current_year_last_year_value), + "dailyweek": [ + stats.daily(0)["begin"], + stats.daily(1)["begin"], + stats.daily(2)["begin"], + stats.daily(3)["begin"], + stats.daily(4)["begin"], + stats.daily(5)["begin"], + stats.daily(6)["begin"], + ], + "dailyweek_cost": dailyweek_cost, + "dailyweek_costHP": dailyweek_cost_hp, + "dailyweek_HP": dailyweek_hp, + "dailyweek_costHC": dailyweek_cost_hc, + "dailyweek_HC": dailyweek_hc, + "daily_cost": daily_cost, + "yesterday_HP_cost": yesterday_hp_value_cost, + "yesterday_HP": convert_kw(yesterday_hp_value), + "day_1_HP": stats.detail(0, "HP")["value"], + "day_2_HP": stats.detail(1, "HP")["value"], + "day_3_HP": stats.detail(2, "HP")["value"], + "day_4_HP": stats.detail(3, "HP")["value"], + "day_5_HP": stats.detail(4, "HP")["value"], + "day_6_HP": stats.detail(5, "HP")["value"], + "day_7_HP": stats.detail(6, "HP")["value"], + "yesterday_HC_cost": convert_kw_to_euro(yesterday_hc_value, self.usage_point.consumption_price_hc), + "yesterday_HC": convert_kw(yesterday_hc_value), + "day_1_HC": stats.detail(0, "HC")["value"], + "day_2_HC": stats.detail(1, "HC")["value"], + "day_3_HC": stats.detail(2, "HC")["value"], + "day_4_HC": stats.detail(3, "HC")["value"], + "day_5_HC": stats.detail(4, "HC")["value"], + "day_6_HC": stats.detail(5, "HC")["value"], + "day_7_HC": stats.detail(6, "HC")["value"], + "peak_offpeak_percent": round(peak_offpeak_percent, 2), + "yesterdayConsumptionMaxPower": yesterday_consumption_max_power, + "dailyweek_MP": [ + convert_kw(stats.max_power(0)["value"]), + convert_kw(stats.max_power(1)["value"]), + convert_kw(stats.max_power(2)["value"]), + convert_kw(stats.max_power(3)["value"]), + convert_kw(stats.max_power(4)["value"]), + convert_kw(stats.max_power(5)["value"]), + convert_kw(stats.max_power(6)["value"]), + ], + "dailyweek_MP_time": [ + (stats.max_power_time(0)["value"]), + (stats.max_power_time(1)["value"]), + (stats.max_power_time(2)["value"]), + (stats.max_power_time(3)["value"]), + (stats.max_power_time(4)["value"]), + (stats.max_power_time(5)["value"]), + (stats.max_power_time(6)["value"]), + ], + "dailyweek_MP_over": [ + stats.max_power_over(0)["value"], + stats.max_power_over(1)["value"], + stats.max_power_over(2)["value"], + stats.max_power_over(3)["value"], + stats.max_power_over(4)["value"], + stats.max_power_over(5)["value"], + stats.max_power_over(6)["value"], + ], + "dailyweek_Tempo": [ + stats.tempo_color(0)["value"], + stats.tempo_color(1)["value"], + stats.tempo_color(2)["value"], + stats.tempo_color(3)["value"], + stats.tempo_color(4)["value"], + stats.tempo_color(5)["value"], + stats.tempo_color(6)["value"], + ], + "monthly_evolution": round(monthly_evolution, 2), + "current_week_evolution": round(current_week_evolution, 2), + "current_month_evolution": round(current_month_evolution, 2), + "yesterday_evolution": round(yesterday_evolution, 2), + "yearly_evolution": round(yearly_evolution, 2), + "friendly_name": f"myelectricaldata.{self.usage_point_id}", + "errorLastCall": error_last_call, + "errorLastCallInterne": "", + "current_week_number": yesterday.strftime("%V"), + "offpeak_hours_enedis": offpeak_hours_enedis, + "offpeak_hours": offpeak_hours, + "subscribed_power": getattr(self.contract, "subscribed_power", None) + # "info": info + } + + uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}" + self.sensor( + topic=f"myelectricaldata_{measurement_direction}/{self.usage_point_id}", + name=f"{measurement_direction}", + device_name=f"Linky {self.usage_point_id}", + device_model=f"linky {self.usage_point_id}", + device_identifiers=f"{self.usage_point_id}", + uniq_id=uniq_id, + unit_of_measurement="kWh", + attributes=attributes, + state=convert_kw(state), + device_class="energy", + numPDL=self.usage_point_id, + ) + + def tempo(self): + """Add a sensor to Home Assistant with the tempo data for today and tomorrow. + + Returns: + None + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = "myelectricaldata_tempo_today" + begin = datetime.combine(datetime.now(tz=TIMEZONE), datetime.min.time()) + end = datetime.combine(datetime.now(tz=TIMEZONE), datetime.max.time()) + tempo_data = DatabaseTempo().get_range(begin, end, "asc") + if tempo_data: + date = tempo_data[0].date.strftime(self.date_format_detail) + state = tempo_data[0].color + else: + date = begin.strftime(self.date_format_detail) + state = "Inconnu" + attributes = {"date": date} + self.tempo_color = state + self.sensor( + topic="myelectricaldata_rte/tempo_today", + name="Today", + device_name="RTE Tempo", + device_model="RTE", + device_identifiers="rte_tempo", + uniq_id=uniq_id, + attributes=attributes, + state=state, + ) + + uniq_id = "myelectricaldata_tempo_tomorrow" + begin = begin + timedelta(days=1) + end = end + timedelta(days=1) + if tempo_data: + date = tempo_data[0].date.strftime(self.date_format_detail) + state = tempo_data[0].color + else: + date = begin.strftime(self.date_format_detail) + state = "Inconnu" + attributes = {"date": date} + self.sensor( + topic="myelectricaldata_rte/tempo_tomorrow", + name="Tomorrow", + device_name="RTE Tempo", + device_model="RTE", + device_identifiers="rte_tempo", + uniq_id=uniq_id, + attributes=attributes, + state=state, + ) + + def tempo_days(self): + """Add tempo days sensors to Home Assistant. + + This method retrieves tempo days configuration from the database + and creates sensors for each color and corresponding number of days. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + tempo_days = DatabaseTempo().get_config("days") + for color, days in tempo_days.items(): + self.tempo_days_sensor(f"{color}", days) + + def tempo_days_sensor(self, color, days): + """Add a sensor to Home Assistant with the given name and state. + + Args: + color (str): The color of the tempo (e.g. blue, white, red). + days (int): The number of days in the tempo. + + Returns: + None + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_tempo_days_{color}" + self.sensor( + topic=f"myelectricaldata_edf/tempo_days_{color}", + name=f"Days {color.capitalize()}", + device_name="EDF Tempo", + device_model="EDF", + device_identifiers="edf_tempo", + uniq_id=uniq_id, + state=days, + ) + + def tempo_info(self): + """Add tempo information sensor to Home Assistant. + + This method retrieves tempo configuration from the database + and creates a sensor with information about tempo days and prices. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + tempo_begin = 6 + tempo_end = 22 + uniq_id = "myelectricaldata_tempo_info" + tempo_days = DatabaseTempo().get_config("days") + tempo_price = DatabaseTempo().get_config("price") + if tempo_end > int(datetime.now(tz=TIMEZONE).strftime("%H")) < tempo_begin: + measure_type = "hc" + else: + measure_type = "hp" + current_price = None + if self.tempo_color.lower() in ["blue", "white", "red"]: + current_price = convert_price( + tempo_price[f"{self.tempo_color.lower()}_{measure_type}"].replace(",", ".") + ) + attributes = { + "days_blue": f'{tempo_days["blue"]} / 300', + "days_white": f'{tempo_days["white"]} / 43', + "days_red": f'{tempo_days["red"]} / 22', + "price_blue_hp": convert_price(tempo_price["blue_hp"]), + "price_blue_hc": convert_price(tempo_price["blue_hc"]), + "price_white_hp": convert_price(tempo_price["white_hp"]), + "price_white_hc": convert_price(tempo_price["white_hc"]), + "price_red_hp": convert_price(tempo_price["red_hp"]), + "price_red_hc": convert_price(tempo_price["red_hc"]), + } + self.sensor( + topic="myelectricaldata_edf/tempo_info", + name="Info", + device_name="EDF Tempo", + device_model="EDF", + device_identifiers="edf_tempo", + uniq_id=uniq_id, + attributes=attributes, + state=current_price, + unit_of_measurement="EUR/kWh", + ) + + def tempo_price(self): + """Add tempo price sensors to Home Assistant. + + This method retrieves tempo price configuration from the database + and creates sensors for each color with corresponding price. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + tempo_price = DatabaseTempo().get_config("price") + for color, price in tempo_price.items(): + self.tempo_price_sensor( + f"{color}", + float(price.replace(",", ".")), + f"{color.split('_')[0].capitalize()}{color.split('_')[1].capitalize()}", + ) + + def tempo_price_sensor(self, color, price, name): + """Add tempo price sensor to Home Assistant. + + This method creates a sensor for a specific tempo color with the corresponding price. + + Args: + color (str): The color of the tempo. + price (float): The price of the tempo. + name (str): The name of the tempo. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_tempo_price_{color}" + name = f"{name[0:-2]} {name[-2:]}" + self.sensor( + topic=f"myelectricaldata_edf/tempo_price_{color}", + name=f"Price {name}", + device_name="EDF Tempo", + device_model="EDF", + device_identifiers="edf_tempo", + uniq_id=uniq_id, + state=convert_price(price), + unit_of_measurement="EUR/kWh", + ) + + def ecowatt(self): + """Calculate the ecowatt sensor values for different delta values. + + This method calculates the ecowatt sensor values for different delta values (0, 1, and 2). + It calls the `ecowatt_delta` method with the corresponding delta values. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + try: + if self.mqtt.valid: + self.ecowatt_delta("J0", 0) + self.ecowatt_delta("J1", 1) + self.ecowatt_delta("J2", 2) + else: + logging.critical("=> Export MQTT Désactivée (Echec de connexion)") + except Exception: + traceback.print_exc() + + def ecowatt_delta(self, name, delta): + """Calculate the delta value for the ecowatt sensor. + + Args: + name (str): The name of the ecowatt sensor. + delta (int): The number of days to calculate the delta. + + Returns: + None + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + uniq_id = f"myelectricaldata_ecowatt_{name}" + current_date = datetime.combine(datetime.now(tz=TIMEZONE), datetime.min.time()) + timedelta(days=delta) + fetch_date = current_date - timedelta(days=1) + ecowatt_data = DatabaseEcowatt().get_range(fetch_date, fetch_date, "asc") + day_value = 0 + if ecowatt_data: + forecast = {} + for data in ecowatt_data: + day_value = data.value + for date, value in json.loads(data.detail.replace("'", '"')).items(): + date_datetime = datetime.strptime(date, self.date_format_detail).replace(tzinfo=TIMEZONE) + forecast[f'{date_datetime.strftime("%H")} h'] = value + attributes = { + "date": current_date.strftime(self.date_format), + "forecast": forecast, + } + self.sensor( + topic=f"myelectricaldata_rte/ecowatt_{name}", + name=f"{name}", + device_name="RTE EcoWatt", + device_model="RTE", + device_identifiers="rte_ecowatt", + uniq_id=uniq_id, + attributes=attributes, + state=day_value, + ) diff --git a/src/external_services/home_assistant_ws/main.py b/src/external_services/home_assistant_ws/main.py new file mode 100644 index 00000000..0d28cd51 --- /dev/null +++ b/src/external_services/home_assistant_ws/main.py @@ -0,0 +1,511 @@ +"""Import data in statistique recorder of Home Assistant.""" + +import inspect +import json +import logging +import ssl +import traceback +from datetime import datetime, timedelta + +import websocket + +from config.main import APP_CONFIG +from config.myelectricaldata import UsagePointId +from const import TEMPO_BEGIN, TIMEZONE, URL_CONFIG_FILE +from database.config import DatabaseConfig +from database.detail import DatabaseDetail +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from models.stat import Stat +from utils import chunks_list + + +class HomeAssistantWs: + """Class to interact with Home Assistant WebSocket API.""" + + def __init__(self, usage_point_id): + """Initialize the class with the usage point id. + + Args: + usage_point_id (str): The usage point id + """ + self.websocket: websocket.WebSocket = None + self.usage_point_id = usage_point_id + self.usage_point_id_config: UsagePointId = APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id] + self.id = 1 + self.purge_force = False + self.current_stats = [] + if self.connect(): + self.import_data() + else: + logging.critical("La configuration Home Assistant WebSocket est erronée") + if self.websocket.connected: + self.websocket.close() + + def connect(self): + """Connect to the Home Assistant WebSocket server. + + Returns: + bool: True if the connection is successful, False otherwise + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + try: + prefix = "ws" + sslopt = None + if APP_CONFIG.home_assistant_ws.ssl: + sslopt = {"cert_reqs": ssl.CERT_NONE} + prefix = "wss" + self.uri = f"{prefix}://{APP_CONFIG.home_assistant_ws.url}/api/websocket" + self.websocket = websocket.WebSocket(sslopt=sslopt) + logging.info("Connexion au WebSocket Home Assistant %s", self.uri) + self.websocket.connect(self.uri, timeout=5) + output = json.loads(self.websocket.recv()) + if "type" in output and output["type"] == "auth_required": + logging.info("Authentification requise") + return self.authentificate() + return True + except Exception as _e: + self.websocket.close() + logging.error( + f""" + Impossible de se connecter au WebSocket Home Assistant. + + Vous pouvez récupérer un exemple ici : +{URL_CONFIG_FILE} +""" + ) + + def authentificate(self): + """Authenticate with the Home Assistant WebSocket server. + + Returns: + bool: True if the authentication is successful, False otherwise + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + data = {"type": "auth", "access_token": APP_CONFIG.home_assistant_ws.token} + auth_output = self.send(data) + if auth_output["type"] == "auth_ok": + logging.info(" => OK") + return True + logging.error(" => Authentification impossible, merci de vérifier votre url & token.") + return False + + def send(self, data): + """Send data to the Home Assistant WebSocket server. + + Args: + data (dict): The data to send + Returns: + dict: The output from the server + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + self.websocket.send(json.dumps(data)) + self.id = self.id + 1 + output = json.loads(self.websocket.recv()) + if "type" in output and output["type"] == "result": + if not output["success"]: + logging.error(f"Erreur d'envoi : {data}") + logging.error(output) + return output + + def list_data(self): + """List the data already cached in Home Assistant. + + Returns: + dict: The list of data + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Liste les données déjà en cache.") + import_statistics = { + "id": self.id, + "type": "recorder/list_statistic_ids", + "statistic_type": "sum", + } + current_stats = self.send(import_statistics) + for stats in current_stats["result"]: + if stats["statistic_id"].startswith("myelectricaldata:"): + self.current_stats.append(stats["statistic_id"]) + return current_stats + + def clear_data(self, statistic_ids): + """Clear the data imported into Energy. + + Args: + statistic_ids (list): The list of statistic ids + Returns: + dict: The output from clearing the data + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Effacement des données importées dans Energy.") + for key in statistic_ids: + logging.info(f" - {key}") + clear_statistics = { + "id": self.id, + "type": "recorder/clear_statistics", + "statistic_ids": statistic_ids, + } + for data in self.current_stats: + logging.info(f" - {data}") + clear_stat = self.send(clear_statistics) + return clear_stat + + def get_data(self, statistic_ids, begin: datetime, end: datetime): + """Get the data for a given period. + + Args: + statistic_ids (list): The list of statistic ids + begin (datetime): The start of the period + end (datetime): The end of the period + Returns: + dict: The data for the period + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + statistics_during_period = { + "id": self.id, + "type": "recorder/statistics_during_period", + "start_time": begin.isoformat(), + "end_time": end.isoformat(), + "statistic_ids": [statistic_ids], + "period": "hour", + } + stat_period = self.send(statistics_during_period) + return stat_period + + def import_data(self): # noqa: C901, PLR0915 + """Import the data for the usage point into Home Assistant.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info(f"Point de livraison : {self.usage_point_id}") + try: + plan = DatabaseUsagePoints(self.usage_point_id).get_plan() + if self.usage_point_id_config.consumption_detail: + logging.info(" => Préparation des données de consommation...") + measurement_direction = "consumption" + max_date = APP_CONFIG.home_assistant_ws.max_date + if max_date is not None: + logging.warning("Max date détectée %s", max_date) + begin = datetime.strptime(max_date, "%Y-%m-%d").replace(tzinfo=TIMEZONE) + detail = DatabaseDetail(self.usage_point_id).get_all(begin=begin, order_dir="desc") + else: + detail = DatabaseDetail(self.usage_point_id).get_all(order_dir="desc") + + cost = 0 + last_year = None + last_month = None + + stats_kwh = {} + stats_euro = {} + + db_tempo_price = DatabaseTempo().get_config("price") + tempo_color_ref = {} + for tempo_data in DatabaseTempo().get(): + tempo_color_ref[tempo_data.date] = tempo_data.color + + stats = Stat(usage_point_id=self.usage_point_id, measurement_direction="consumption") + + for data in detail: + year = int(f"{data.date.strftime('%Y')}") + if last_year is None or year != last_year: + logging.info(f" - {year} :") + month = int(f"{data.date.strftime('%m')}") + if last_month is None or month != last_month: + logging.info(f" * {month}") + last_year = year + last_month = month + hour_minute = int(f"{data.date.strftime('%H')}{data.date.strftime('%M')}") + name = f"MyElectricalData - {self.usage_point_id}" + statistic_id = f"myelectricaldata:{self.usage_point_id}" + day_interval = data.interval if hasattr(data, "interval") and data.interval != 0 else 1 + value = data.value / (60 / day_interval) + tag = None + if plan == "BASE": + name = f"{name} {plan} {measurement_direction}" + statistic_id = f"{statistic_id}_{plan.lower()}_{measurement_direction}" + cost = value * self.usage_point_id_config.consumption_price_base / 1000 + tag = "base" + elif plan == "HC/HP": + measure_type = stats.get_mesure_type(data.date) + if measure_type == "HC": + name = f"{name} HC {measurement_direction}" + statistic_id = f"{statistic_id}_hc_{measurement_direction}" + cost = value * self.usage_point_id_config.consumption_price_hc / 1000 + tag = "hc" + else: + name = f"{name} HP {measurement_direction}" + statistic_id = f"{statistic_id}_hp_{measurement_direction}" + cost = value * self.usage_point_id_config.consumption_price_hp / 1000 + tag = "hp" + elif plan.upper() == "TEMPO": + hour_type = stats.get_mesure_type(data.date) + max_time = 2359 + if TEMPO_BEGIN <= hour_minute <= max_time: + date = datetime.combine(data.date, datetime.min.time()) + else: + date = datetime.combine(data.date - timedelta(days=1), datetime.min.time()) + + if date not in tempo_color_ref: + logging.error(f"Import impossible, pas de donnée tempo sur la date du {data.date}") + else: + day_color = tempo_color_ref[date] + tempo_color = f"{day_color}{hour_type}" + tempo_color_price_key = f"{day_color.lower()}_{hour_type.lower()}" + tempo_price = float(db_tempo_price[tempo_color_price_key]) + cost = value / 1000 * tempo_price + name = f"{name} {tempo_color} {measurement_direction}" + statistic_id = f"{statistic_id}_{tempo_color.lower()}_{measurement_direction}" + tag = tempo_color.lower() + else: + logging.error(f"Plan {plan} inconnu.") + + date = TIMEZONE.localize(data.date, "%Y-%m-%d %H:%M:%S") + date = date.replace(minute=0, second=0, microsecond=0) + key = date.strftime("%Y-%m-%d %H:%M:%S") + + # KWH + if statistic_id not in stats_kwh: + stats_kwh[statistic_id] = {"name": name, "sum": 0, "data": {}} + if key not in stats_kwh[statistic_id]["data"]: + stats_kwh[statistic_id]["data"][key] = { + "start": date.isoformat(), + "state": 0, + "sum": 0, + } + value = value / 1000 + stats_kwh[statistic_id]["data"][key]["state"] = ( + stats_kwh[statistic_id]["data"][key]["state"] + value + ) + stats_kwh[statistic_id]["tag"] = tag + stats_kwh[statistic_id]["sum"] += value + stats_kwh[statistic_id]["data"][key]["sum"] = stats_kwh[statistic_id]["sum"] + + # EURO + statistic_id = f"{statistic_id}_cost" + if statistic_id not in stats_euro: + stats_euro[statistic_id] = { + "name": f"{name} Cost", + "sum": 0, + "data": {}, + } + if key not in stats_euro[statistic_id]["data"]: + stats_euro[statistic_id]["data"][key] = { + "start": date.isoformat(), + "state": 0, + "sum": 0, + } + stats_euro[statistic_id]["tag"] = tag + stats_euro[statistic_id]["data"][key]["state"] += cost + stats_euro[statistic_id]["sum"] += cost + stats_euro[statistic_id]["data"][key]["sum"] = stats_euro[statistic_id]["sum"] + + # CLEAN OLD DATA + if APP_CONFIG.home_assistant_ws.purge or self.purge_force: + logging.info(f"Clean old data import In Home Assistant Recorder {self.usage_point_id}") + list_statistic_ids = [] + for statistic_id, _ in stats_kwh.items(): + list_statistic_ids.append(statistic_id) + self.clear_data(list_statistic_ids) + APP_CONFIG.home_assistant_ws.purge = False + DatabaseConfig().set("purge", False) + + logging.info(" => Envoie des données...") + logging.info(" - Consommation :") + for statistic_id, data in stats_kwh.items(): + metadata = { + "has_mean": False, + "has_sum": True, + "name": data["name"], + "source": "myelectricaldata", + "statistic_id": statistic_id, + "unit_of_measurement": "kWh", + } + chunks = list( + chunks_list(list(data["data"].values()), APP_CONFIG.home_assistant_ws.batch_size) + ) + chunks_len = len(chunks) + for i, chunk in enumerate(chunks): + current_plan = data["tag"].upper() + logging.info( + " * %s : %s => %s (%s/%s) ", + current_plan, + chunk[-1]["start"], + chunk[0]["start"], + i + 1, + chunks_len, + ) + self.send( + { + "id": self.id, + "type": "recorder/import_statistics", + "metadata": metadata, + "stats": chunk, + } + ) + + logging.info(" - Coût :") + for statistic_id, data in stats_euro.items(): + metadata = { + "has_mean": False, + "has_sum": True, + "name": data["name"], + "source": "myelectricaldata", + "statistic_id": statistic_id, + "unit_of_measurement": "EURO", + } + chunks = list( + chunks_list(list(data["data"].values()), APP_CONFIG.home_assistant_ws.batch_size) + ) + chunks_len = len(chunks) + for i, chunk in enumerate(chunks): + current_plan = data["tag"].upper() + logging.info( + " * %s : %s => %s (%s/%s) ", + current_plan, + chunk[-1]["start"], + chunk[0]["start"], + i + 1, + chunks_len, + ) + self.send( + { + "id": self.id, + "type": "recorder/import_statistics", + "metadata": metadata, + "stats": list(chunk), + } + ) + + if self.usage_point_id_config.production_detail: + logging.info(" => Préparation des données de production...") + measurement_direction = "production" + max_date = APP_CONFIG.home_assistant_ws.max_date + if max_date is not None: + logging.warning("Max date détectée %s", max_date) + begin = datetime.strptime(max_date, "%Y-%m-%d").replace(tzinfo=TIMEZONE) + detail = DatabaseDetail(self.usage_point_id, "production") + detail = detail.get_all(begin=begin, order_dir="desc") + else: + detail = DatabaseDetail(self.usage_point_id, "production").get_all(order_dir="desc") + + cost = 0 + last_year = None + last_month = None + + stats_kwh = {} + stats_euro = {} + for data in detail: + year = int(f"{data.date.strftime('%Y')}") + if last_year is None or year != last_year: + logging.info(f"{year} :") + month = int(f"{data.date.strftime('%m')}") + if last_month is None or month != last_month: + logging.info(f"- {month}") + last_year = year + last_month = month + hour_minute = int(f"{data.date.strftime('%H')}{data.date.strftime('%M')}") + name = f"MyElectricalData - {self.usage_point_id} {measurement_direction}" + statistic_id = f"myelectricaldata:{self.usage_point_id}_{measurement_direction}" + day_interval = data.interval if hasattr(data, "interval") and data.interval != 0 else 1 + value = data.value / (60 / day_interval) + cost = value * self.usage_point_id_config.production_price / 1000 + date = TIMEZONE.localize(data.date, "%Y-%m-%d %H:%M:%S") + date = date.replace(minute=0, second=0, microsecond=0) + key = date.strftime("%Y-%m-%d %H:%M:%S") + + # KWH + if statistic_id not in stats_kwh: + stats_kwh[statistic_id] = {"name": name, "sum": 0, "data": {}} + if key not in stats_kwh[statistic_id]["data"]: + stats_kwh[statistic_id]["data"][key] = { + "start": date.isoformat(), + "state": 0, + "sum": 0, + } + value = value / 1000 + stats_kwh[statistic_id]["data"][key]["state"] = ( + stats_kwh[statistic_id]["data"][key]["state"] + value + ) + stats_kwh[statistic_id]["sum"] += value + stats_kwh[statistic_id]["data"][key]["sum"] = stats_kwh[statistic_id]["sum"] + + # EURO + statistic_id = f"{statistic_id}_revenue" + if statistic_id not in stats_euro: + stats_euro[statistic_id] = { + "name": f"{name} Revenue", + "sum": 0, + "data": {}, + } + if key not in stats_euro[statistic_id]["data"]: + stats_euro[statistic_id]["data"][key] = { + "start": date.isoformat(), + "state": 0, + "sum": 0, + } + stats_euro[statistic_id]["data"][key]["state"] += cost + stats_euro[statistic_id]["sum"] += cost + stats_euro[statistic_id]["data"][key]["sum"] = stats_euro[statistic_id]["sum"] + + if APP_CONFIG.home_assistant_ws.purge or self.purge_force: + list_statistic_ids = [] + for statistic_id, _ in stats_kwh.items(): + list_statistic_ids.append(statistic_id) + self.clear_data(list_statistic_ids) + APP_CONFIG.home_assistant_ws.purge = False + DatabaseConfig().set("purge", False) + + logging.info(" => Envoie des données de production...") + logging.info(" - Production :") + + for statistic_id, data in stats_kwh.items(): + metadata = { + "has_mean": False, + "has_sum": True, + "name": data["name"], + "source": "myelectricaldata", + "statistic_id": statistic_id, + "unit_of_measurement": "kWh", + } + chunks = list( + chunks_list(list(data["data"].values()), APP_CONFIG.home_assistant_ws.batch_size) + ) + chunks_len = len(chunks) + for i, chunk in enumerate(chunks): + current_plan = data["tag"].upper() + logging.info( + " * %s : %s => %s (%s/%s) ", + current_plan, + chunk[-1]["start"], + chunk[0]["start"], + i + 1, + chunks_len, + ) + self.send( + { + "id": self.id, + "type": "recorder/import_statistics", + "metadata": metadata, + "stats": chunk, + } + ) + + for statistic_id, data in stats_euro.items(): + metadata = { + "has_mean": False, + "has_sum": True, + "name": data["name"], + "source": "myelectricaldata", + "statistic_id": statistic_id, + "unit_of_measurement": "EURO", + } + import_statistics = { + "id": self.id, + "type": "recorder/import_statistics", + "metadata": metadata, + "stats": list(data["data"].values()), + } + self.send(import_statistics) + + except Exception as _e: + self.websocket.close() + traceback.print_exc() + logging.error(_e) + logging.critical("Erreur lors de l'export des données vers Home Assistant") diff --git a/src/external_services/influxdb/client.py b/src/external_services/influxdb/client.py new file mode 100644 index 00000000..2e286f97 --- /dev/null +++ b/src/external_services/influxdb/client.py @@ -0,0 +1,236 @@ +"""This module contains the InfluxDB class for connecting to and interacting with InfluxDB.""" +import datetime +import inspect +import logging + +import influxdb_client +from dateutil.tz import tzlocal +from influxdb_client.client.util import date_utils +from influxdb_client.client.util.date_utils import DateHelper +from influxdb_client.client.write_api import ASYNCHRONOUS, SYNCHRONOUS + +from config.main import APP_CONFIG +from const import TIMEZONE_UTC, URL_CONFIG_FILE +from utils import separator, separator_warning, title + + +class InfluxDB: + """Class for connecting to and interacting with InfluxDB.""" + + def __init__(self): + self.influxdb = {} + self.query_api = {} + self.write_api = {} + self.delete_api = {} + self.buckets_api = {} + self.retention = 0 + self.max_retention = None + self.valid = False + if APP_CONFIG.influxdb.enable: + self.connect() + if self.valid: + if self.retention != 0: + day = int(self.retention / 60 / 60 / 24) + logging.warning( + f" ATTENTION, InfluxDB est configuré avec une durée de rétention de {day} jours." + ) + logging.warning( + f" Toutes les données supérieures à {day} jours ne seront jamais insérées dans celui-ci." + ) + else: + logging.warning(" => Aucune durée de rétention de données détectée.") + + def connect(self): + """Connect to InfluxDB. + + This method establishes a connection to the InfluxDB database using the provided configuration. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + separator() + logging.info(f"Connect to InfluxDB {APP_CONFIG.influxdb.hostname}:{APP_CONFIG.influxdb.port}") + date_utils.date_helper = DateHelper(timezone=tzlocal()) + self.influxdb = influxdb_client.InfluxDBClient( + url=f"{APP_CONFIG.influxdb.scheme}://{APP_CONFIG.influxdb.hostname}:{APP_CONFIG.influxdb.port}", + token=APP_CONFIG.influxdb.token, + org=APP_CONFIG.influxdb.org, + timeout="600000", + ) + health = self.influxdb.health() + if health.status == "pass": + logging.info(" => Connection success") + self.valid = True + title(f"Méthode d'importation : {APP_CONFIG.influxdb.method.upper()}") + if APP_CONFIG.influxdb.method.upper() == "ASYNCHRONOUS": + logging.warning( + ' ATTENTION, le mode d\'importation "ASYNCHRONOUS"' + "est très consommateur de ressources système." + ) + self.write_api = self.influxdb.write_api(write_options=ASYNCHRONOUS) + elif APP_CONFIG.influxdb.method.upper() == "SYNCHRONOUS": + self.write_api = self.influxdb.write_api(write_options=SYNCHRONOUS) + else: + self.write_api = self.influxdb.write_api( + write_options=influxdb_client.WriteOptions( + batch_size=APP_CONFIG.influxdb.batching_options.batch_size, + flush_interval=APP_CONFIG.influxdb.batching_options.flush_interval, + jitter_interval=APP_CONFIG.influxdb.batching_options.jitter_interval, + retry_interval=APP_CONFIG.influxdb.batching_options.retry_interval, + max_retries=APP_CONFIG.influxdb.batching_options.max_retries, + max_retry_delay=APP_CONFIG.influxdb.batching_options.max_retry_delay, + exponential_base=APP_CONFIG.influxdb.batching_options.exponential_base, + ) + ) + self.query_api = self.influxdb.query_api() + self.delete_api = self.influxdb.delete_api() + self.buckets_api = self.influxdb.buckets_api() + self.get_list_retention_policies() + else: + logging.error( + f""" + Impossible de se connecter à la base influxdb. + + Vous pouvez récupérer un exemple de configuration ici: + {URL_CONFIG_FILE} +""" + ) + + def purge_influxdb(self): + """Purge the InfluxDB database. + + This method wipes the InfluxDB database by deleting all data within specified measurement types. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + separator_warning() + logging.warning(f"Wipe influxdb database {APP_CONFIG.influxdb.hostname}:{APP_CONFIG.influxdb.port}") + start = "1970-01-01T00:00:00Z" + stop = datetime.datetime.now(tz=TIMEZONE_UTC) + measurement = [ + "consumption", + "production", + "consumption_detail", + "production_detail", + ] + for mesure in measurement: + self.delete_api.delete( + start, stop, f'_measurement="{mesure}"', APP_CONFIG.influxdb.bucket, org=APP_CONFIG.influxdb.org + ) + logging.warning(" => Data reset") + + def get_list_retention_policies(self): + """Get the list of retention policies. + + This method retrieves the list of retention policies for the InfluxDB database. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if APP_CONFIG.influxdb.org == "-": # InfluxDB 1.8 + self.retention = 0 + self.max_retention = 0 + return + buckets = self.buckets_api.find_buckets().buckets + for bucket in buckets: + if bucket.name == APP_CONFIG.influxdb.bucket: + self.retention = bucket.retention_rules[0].every_seconds + self.max_retention = datetime.datetime.now(tz=TIMEZONE_UTC) - datetime.timedelta( + seconds=self.retention + ) + + def get(self, start, end, measurement): + """Retrieve data from the InfluxDB database. + + This method retrieves data from the specified measurement within the given time range. + + Args: + start (str): Start time of the data range. + end (str): End time of the data range. + measurement (str): Name of the measurement to retrieve data from. + + Returns: + list: List of data points retrieved from the database. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if APP_CONFIG.influxdb.org != "-": + query = f""" + from(bucket: "{APP_CONFIG.influxdb.bucket}") + |> range(start: {start}, stop: {end}) + |> filter(fn: (r) => r["_measurement"] == "{measurement}") + """ + logging.debug(query) + output = self.query_api.query(query) + else: + # Skip for InfluxDB 1.8 + output = [] + return output + + def count(self, start, end, measurement): + """Count the number of data points within a specified time range and measurement. + + Args: + start (str): Start time of the data range. + end (str): End time of the data range. + measurement (str): Name of the measurement to count data points from. + + Returns: + list: List of count values. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if APP_CONFIG.influxdb.org != "-": + query = f""" + from(bucket: "{APP_CONFIG.influxdb.bucket}") + |> range(start: {start}, stop: {end}) + |> filter(fn: (r) => r["_measurement"] == "{measurement}") + |> filter(fn: (r) => r["_field"] == "Wh") + |> count() + |> yield(name: "count") + """ + logging.debug(query) + output = self.query_api.query(query) + else: + # Skip for InfluxDB 1.8 + output = [] + return output + + def delete(self, date, measurement): + """Delete data from the InfluxDB database. + + This method deletes data from the specified measurement for a given date. + + Args: + date (str): Date of the data to be deleted. + measurement (str): Name of the measurement to delete data from. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + self.delete_api.delete( + date, date, f'_measurement="{measurement}"', APP_CONFIG.influxdb.bucket, org=APP_CONFIG.influxdb.org + ) + + def write(self, tags, date=None, fields=None, measurement="log"): + """Write data to the InfluxDB database. + + This method writes data to the specified measurement in the InfluxDB database. + + Args: + tags (dict): Dictionary of tags associated with the data. + date (datetime.datetime, optional): Date and time of the data. Defaults to None. + fields (dict, optional): Dictionary of fields and their values. Defaults to None. + measurement (str, optional): Name of the measurement. Defaults to "log". + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + date_max = self.max_retention + if date is None: + date_object = datetime.datetime.now(tz=TIMEZONE_UTC) + else: + date_object = date + if self.retention == 0 or (date.replace(tzinfo=None) > date_max.replace(tzinfo=None)): + record = { + "measurement": measurement, + "time": date_object, + "tags": {}, + "fields": {}, + } + if tags: + for key, value in tags.items(): + record["tags"][key] = value + if fields is not None: + for key, value in fields.items(): + record["fields"][key] = value + self.write_api.write(bucket=APP_CONFIG.influxdb.bucket, org=APP_CONFIG.influxdb.org, record=record) diff --git a/src/external_services/influxdb/main.py b/src/external_services/influxdb/main.py new file mode 100755 index 00000000..6ba117dc --- /dev/null +++ b/src/external_services/influxdb/main.py @@ -0,0 +1,237 @@ +"""Class for exporting data to InfluxDB.""" +import ast +import inspect +import logging +import traceback +from datetime import datetime + +import pytz + +from config.main import APP_CONFIG +from config.myelectricaldata import UsagePointId +from const import TIMEZONE_UTC +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.tempo import DatabaseTempo +from external_services.influxdb.client import InfluxDB +from models.stat import Stat +from utils import force_round + + +class ExportInfluxDB: + """Class for exporting data to InfluxDB.""" + + def __init__(self, usage_point_id, measurement_direction="consumption"): + self.usage_point_id = usage_point_id + self.usage_point_config: UsagePointId = APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id] + self.usage_point_id = self.usage_point_config.usage_point_id + self.measurement_direction = measurement_direction + self.stat = Stat(self.usage_point_id, measurement_direction=measurement_direction) + self.time_format = "%Y-%m-%dT%H:%M:%SZ" + timezone = getattr(APP_CONFIG.influxdb, "timezone", "UTC") + if timezone == "UTC": + self.tz = TIMEZONE_UTC + else: + self.tz = pytz.timezone(timezone) + self.influxdb_client = InfluxDB() + self.bootstap() + + def bootstap(self): + """Bootstrap apps.""" + try: + if self.influxdb_client.valid: + self.run() + else: + logging.critical("=> InfluxDB Désactivée (Echec de connexion)") + except Exception: + traceback.print_exc() + + def run(self): + """Runner.""" + if self.usage_point_config.consumption: + self.daily() + if self.usage_point_config.production: + self.daily(measurement_direction="production") + if self.usage_point_config.consumption_detail: + self.detail() + if self.usage_point_config.production_detail: + self.detail(measurement_direction="production") + self.tempo() + self.ecowatt() + + def daily(self, measurement_direction="consumption"): + """Export daily data to InfluxDB. + + Args: + measurement_direction (str, optional): The measurement direction. Defaults to "consumption". + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_month = "" + if measurement_direction == "consumption": + price = self.usage_point_config.consumption_price_base + else: + price = self.usage_point_config.production_price + logging.info(f'Envoi des données "{measurement_direction.upper()}" dans influxdb') + get_daily_all = DatabaseDaily(self.usage_point_id).get_all() + get_daily_all_count = len(get_daily_all) + last_data = DatabaseDaily(self.usage_point_id, measurement_direction).get_last_date() + first_data = DatabaseDaily(self.usage_point_id, measurement_direction).get_first_date() + if last_data and first_data: + start = datetime.strftime(last_data, self.time_format) + end = datetime.strftime(first_data, self.time_format) + influxdb_data = self.influxdb_client.count(start, end, measurement_direction) + count = 1 + for data in influxdb_data: + for record in data.records: + count += record.get_value() + if get_daily_all_count != count: + logging.info(f" Cache : {get_daily_all_count} / InfluxDb : {count}") + for daily in get_daily_all: + date = daily.date + if current_month != date.strftime("%m"): + logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") + # if len(INFLUXDB.get(start, end, measurement_direction)) == 0: + watt = daily.value + kwatt = watt / 1000 + euro = kwatt * price + self.influxdb_client.write( + measurement=measurement_direction, + date=self.tz.localize(date), + tags={ + "usage_point_id": self.usage_point_id, + "year": daily.date.strftime("%Y"), + "month": daily.date.strftime("%m"), + }, + fields={ + "Wh": float(watt), + "kWh": float(force_round(kwatt, 5)), + "price": float(force_round(euro, 5)), + }, + ) + current_month = date.strftime("%m") + logging.info(" => OK") + else: + logging.info(f" => Données synchronisées ({count} valeurs)") + else: + logging.info(" => Aucune donnée") + + def detail(self, measurement_direction="consumption"): + """Export detailed data to InfluxDB. + + Args: + measurement_direction (str, optional): The measurement direction. Defaults to "consumption". + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_month = "" + measurement = f"{measurement_direction}_detail" + logging.info(f'Envoi des données "{measurement.upper()}" dans influxdb') + get_detail_all = DatabaseDetail(self.usage_point_id, measurement_direction).get_all() + get_detail_all_count = len(get_detail_all) + last_data = DatabaseDetail(self.usage_point_id, measurement_direction).get_last_date() + first_data = DatabaseDetail(self.usage_point_id, measurement_direction).get_first_date() + if last_data and first_data: + start = datetime.strftime(last_data, self.time_format) + end = datetime.strftime(first_data, self.time_format) + influxdb_data = self.influxdb_client.count(start, end, measurement) + count = 1 + for data in influxdb_data: + for record in data.records: + count += record.get_value() + + if get_detail_all_count != count: + logging.info(f" Cache : {get_detail_all_count} / InfluxDb : {count}") + for _, detail in enumerate(get_detail_all): + date = detail.date + if current_month != date.strftime("%m"): + logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") + watt = detail.value + kwatt = watt / 1000 + interval = getattr(detail, "interval", 1) + interval = 1 if interval == 0 else interval + watth = watt / (60 / interval) + kwatth = watth / 1000 + if measurement_direction == "consumption": + measure_type = self.stat.get_mesure_type(date) + if measure_type == "HP": + euro = kwatth * self.usage_point_config.consumption_price_hp + else: + euro = kwatth * self.usage_point_config.consumption_price_hc + else: + measure_type = "BASE" + euro = kwatth * self.usage_point_config.production_price + self.influxdb_client.write( + measurement=measurement, + date=self.tz.localize(date), + tags={ + "usage_point_id": self.usage_point_id, + "year": detail.date.strftime("%Y"), + "month": detail.date.strftime("%m"), + "internal": interval, + "measure_type": measure_type, + }, + fields={ + "W": float(watt), + "kW": float(force_round(kwatt, 5)), + "Wh": float(watth), + "kWh": float(force_round(kwatth, 5)), + "price": float(force_round(euro, 5)), + }, + ) + current_month = date.strftime("%m") + logging.info(" => OK") + else: + logging.info(f" => Données synchronisées ({count} valeurs)") + else: + logging.info(" => Aucune donnée") + + def tempo(self): + """Export tempo data to InfluxDB.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + measurement = "tempo" + logging.info('Envoi des données "TEMPO" dans influxdb') + tempo_data = DatabaseTempo().get() + if tempo_data: + for data in tempo_data: + self.influxdb_client.write( + measurement=measurement, + date=self.tz.localize(data.date), + tags={ + "usage_point_id": self.usage_point_id, + }, + fields={"color": data.color}, + ) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def ecowatt(self): + """Export ecowatt data to InfluxDB.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + measurement = "ecowatt" + logging.info('Envoi des données "ECOWATT" dans influxdb') + ecowatt_data = DatabaseEcowatt().get() + if ecowatt_data: + for data in ecowatt_data: + self.influxdb_client.write( + measurement=f"{measurement}_daily", + date=self.tz.localize(data.date), + tags={ + "usage_point_id": self.usage_point_id, + }, + fields={"value": data.value, "message": data.message}, + ) + data_detail = ast.literal_eval(data.detail) + for date, value in data_detail.items(): + date_format = datetime.strptime(date, "%Y-%m-%d %H:%M:%S").replace(tzinfo=TIMEZONE_UTC) + self.influxdb_client.write( + measurement=f"{measurement}_detail", + date=date_format, + tags={ + "usage_point_id": self.usage_point_id, + }, + fields={"value": value}, + ) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") diff --git a/src/external_services/mqtt/client.py b/src/external_services/mqtt/client.py new file mode 100644 index 00000000..3d88995f --- /dev/null +++ b/src/external_services/mqtt/client.py @@ -0,0 +1,97 @@ +"""MQTT Client.""" + +import inspect +import logging + +from paho.mqtt import client as mqtt +from paho.mqtt import publish + +from config.main import APP_CONFIG +from const import URL_CONFIG_FILE +from utils import separator + + +class Mqtt: + """MQTT Client.""" + + def __init__(self): + self.client: mqtt.Client = {} + self.valid: bool = False + self.connect() + + def connect(self) -> None: + """Connector.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + separator() + logging.info(f"Connect to MQTT broker {APP_CONFIG.mqtt.hostname}:{APP_CONFIG.mqtt.port}") + try: + self.client = mqtt.Client(APP_CONFIG.mqtt.client_id) + if APP_CONFIG.mqtt.username != "" and APP_CONFIG.mqtt.password != "": + self.client.username_pw_set(APP_CONFIG.mqtt.username, APP_CONFIG.mqtt.password) + if APP_CONFIG.mqtt.cert: + logging.info(f"Using ca_cert: {APP_CONFIG.mqtt.cert}") + self.client.tls_set(ca_certs=APP_CONFIG.mqtt.cert) + self.client.connect(APP_CONFIG.mqtt.hostname, APP_CONFIG.mqtt.port) + self.client.loop_start() + self.valid = True + logging.info(" => Connection success") + except Exception: + logging.error( + f""" + Impossible de se connecter au serveur MQTT. + + Vous pouvez récupérer un exemple de configuration ici: + {URL_CONFIG_FILE} +""" + ) + + def publish(self, topic, msg, prefix=None): + """Publish one message.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if self.valid: + if prefix is None: + prefix = APP_CONFIG.mqtt.prefix + result = self.client.publish( + f"{APP_CONFIG.mqtt.prefix}/{prefix}/{topic}", + str(msg), + qos=APP_CONFIG.mqtt.qos, + retain=APP_CONFIG.mqtt.retain, + ) + status = result[0] + if status == 0: + logging.debug(f" MQTT Send : {prefix}/{topic} => {msg}") + else: + logging.info(f" - Failed to send message to topic {prefix}/{topic}") + + def publish_multiple(self, data, prefix=None): + """Public multiple message.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if self.valid: + if data: + payload = [] + if prefix is None: + prefix = APP_CONFIG.mqtt.prefix + else: + prefix = f"{prefix}" + for topics, value in data.items(): + payload.append( + { + "topic": f"{prefix}/{topics}", + "payload": value, + "qos": APP_CONFIG.mqtt.qos, + "retain": APP_CONFIG.mqtt.retain, + } + ) + username = None if not APP_CONFIG.mqtt.username else APP_CONFIG.mqtt.username + password = None if not APP_CONFIG.mqtt.password else APP_CONFIG.mqtt.password + if username is None and password is None: + auth = None + else: + auth = {"username": username, "password": password} + publish.multiple( + payload, + hostname=APP_CONFIG.mqtt.hostname, + port=APP_CONFIG.mqtt.port, + client_id=APP_CONFIG.mqtt.client_id, + auth=auth, + ) diff --git a/src/external_services/mqtt/main.py b/src/external_services/mqtt/main.py new file mode 100644 index 00000000..c5fe7d8d --- /dev/null +++ b/src/external_services/mqtt/main.py @@ -0,0 +1,580 @@ +"""Export des données vers MQTT.""" + +import ast +import inspect +import logging +import traceback +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import TIMEZONE_UTC +from database.addresses import DatabaseAddresses +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.max_power import DatabaseMaxPower +from database.statistique import DatabaseStatistique +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from external_services.mqtt.client import Mqtt +from models.stat import Stat + + +class ExportMqtt: + """A class for exporting MQTT data.""" + + def __init__(self, usage_point_id): + self.usage_point_id = usage_point_id + self.usage_point_config = APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id] + self.date_format = "%Y-%m-%d" + self.date_format_detail = "%Y-%m-%d %H:%M:%S" + self.mqtt_client = Mqtt() + self.bootstrap() + + def bootstrap(self): + """Bootstrap apps.""" + try: + if self.mqtt_client.valid: + self.run() + else: + logging.critical("=> Export MQTT Désactivée (Echec de connexion)") + except Exception: + traceback.print_exc() + + def run(self): + """Run jobs.""" + self.status() + self.contract() + self.address() + self.ecowatt() + if getattr(self.usage_point_config, "consumption", False) or getattr( + self.usage_point_config, "consumption_detail", False + ): + self.tempo() + if getattr(self.usage_point_config, "consumption", False): + price_base = self.usage_point_config.consumption_price_base + self.daily_annual(price_base, measurement_direction="consumption") + self.daily_linear(price_base, measurement_direction="consumption") + if getattr(self.usage_point_config, "production", False): + self.daily_annual(self.usage_point_config.production_price, measurement_direction="production") + self.daily_linear(self.usage_point_config.production_price, measurement_direction="production") + if getattr(self.usage_point_config, "consumption_detail", False): + price_hp = self.usage_point_config.consumption_price_hp + price_hc = self.usage_point_config.consumption_price_hc + self.detail_annual(price_hp, price_hc, measurement_direction="consumption") + self.detail_linear(price_hp, price_hc, measurement_direction="consumption") + if getattr(self.usage_point_config, "production_detail", False): + self.detail_annual(self.usage_point_config.production_price, measurement_direction="production") + self.detail_linear(self.usage_point_config.production_price, measurement_direction="production") + if getattr(self.usage_point_config, "consumption_max_power", False): + self.max_power() + + def status(self): + """Get the status of the account.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Statut du compte.") + usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + send_data = [ + "consentement_expiration", + "call_number", + "quota_reached", + "quota_limit", + "quota_reset_at", + "last_call", + "ban", + ] + consentement_expiration = {} + for item in send_data: + if hasattr(usage_point_config, item): + queue = f"{self.usage_point_id}/status/{item}" + value = getattr(usage_point_config, item) + if isinstance(value, datetime): + value = value.strftime("%Y-%m-%d %H:%M:%S") + consentement_expiration[queue] = str(getattr(usage_point_config, item)) + self.mqtt_client.publish_multiple(consentement_expiration) + logging.info(" => OK") + + def contract(self): + """Get the contract data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des messages du contrat") + contract_data = DatabaseContracts(self.usage_point_id).get() + if hasattr(contract_data, "__table__"): + output = {} + for column in contract_data.__table__.columns: + output[f"{self.usage_point_id}/contract/{column.name}"] = str(getattr(contract_data, column.name)) + self.mqtt_client.publish_multiple(output) + logging.info(" => OK") + else: + logging.info(" => ERREUR") + + def address(self): + """Get the address data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des messages d'addresse") + address_data = DatabaseAddresses(self.usage_point_id).get() + if hasattr(address_data, "__table__"): + output = {} + for column in address_data.__table__.columns: + output[f"{self.usage_point_id}/address/{column.name}"] = str(getattr(address_data, column.name)) + self.mqtt_client.publish_multiple(output) + logging.info(" => OK") + else: + logging.info(" => ERREUR") + + def daily_annual(self, price, measurement_direction="consumption"): + """Get the daily annual data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données annuelles") + date_range = DatabaseDaily(self.usage_point_id).get_date_range() + stat = Stat(self.usage_point_id, measurement_direction) + if date_range["begin"] and date_range["end"]: + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = datetime.combine( + date_end.replace(month=1).replace(day=1), datetime.min.time() + ).astimezone(TIMEZONE_UTC) + finish = False + while not finish: + year = int(date_begin_current.strftime("%Y")) + get_daily_year = stat.get_year(year=year) + get_daily_month = stat.get_month(year=year) + get_daily_week = stat.get_week(year=year) + if year == int(datetime.now(tz=TIMEZONE_UTC).strftime("%Y")): + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" + else: + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" + mqtt_data = { + # thisYear + f"{sub_prefix}/thisYear/dateBegin": get_daily_year["begin"], + f"{sub_prefix}/thisYear/dateEnd": get_daily_year["end"], + f"{sub_prefix}/thisYear/base/Wh": get_daily_year["value"], + f"{sub_prefix}/thisYear/base/kWh": round(get_daily_year["value"] / 1000, 2), + f"{sub_prefix}/thisYear/base/euro": round(get_daily_year["value"] / 1000 * price, 2), + # thisMonth + f"{sub_prefix}/thisMonth/dateBegin": get_daily_month["begin"], + f"{sub_prefix}/thisMonth/dateEnd": get_daily_month["end"], + f"{sub_prefix}/thisMonth/base/Wh": get_daily_month["value"], + f"{sub_prefix}/thisMonth/base/kWh": round(get_daily_month["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/base/euro": round(get_daily_month["value"] / 1000 * price, 2), + # thisWeek + f"{sub_prefix}/thisWeek/dateBegin": get_daily_week["begin"], + f"{sub_prefix}/thisWeek/dateEnd": get_daily_week["end"], + f"{sub_prefix}/thisWeek/base/Wh": get_daily_week["value"], + f"{sub_prefix}/thisWeek/base/kWh": round(get_daily_week["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/base/euro": round(get_daily_week["value"] / 1000 * price, 2), + } + + for week in range(7): + begin = stat.daily(week)["begin"] + begin_day = ( + datetime.strptime(stat.daily(week)["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) + end = stat.daily(week)["end"] + value = stat.daily(week)["value"] + mqtt_data[f"{sub_prefix}/week/{begin_day}/dateBegin"] = begin + mqtt_data[f"{sub_prefix}/week/{begin_day}/dateEnd"] = end + mqtt_data[f"{sub_prefix}/week/{begin_day}/base/Wh"] = value + mqtt_data[f"{sub_prefix}/week/{begin_day}/base/kWh"] = round(value / 1000, 2) + mqtt_data[f"{sub_prefix}/week/{begin_day}/base/euro"] = round(value / 1000 * price, 2) + + for month in range(1, 13): + get_daily_month = stat.get_month(year=year, month=month) + mqtt_data[f"{sub_prefix}/month/{month}/dateBegin"] = get_daily_month["begin"] + mqtt_data[f"{sub_prefix}/month/{month}/dateEnd"] = get_daily_month["end"] + mqtt_data[f"{sub_prefix}/month/{month}/base/Wh"] = get_daily_month["value"] + mqtt_data[f"{sub_prefix}/month/{month}/base/kWh"] = round(get_daily_month["value"] / 1000, 2) + mqtt_data[f"{sub_prefix}/month/{month}/base/euro"] = round( + get_daily_month["value"] / 1000 * price, 2 + ) + + if date_begin_current == date_begin: + finish = True + date_begin_current = date_begin_current - relativedelta(years=1) + if date_begin_current < date_begin: + date_begin_current = date_begin + self.mqtt_client.publish_multiple(mqtt_data) + + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def daily_linear(self, price, measurement_direction="consumption"): + """Get the daily linear data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données linéaires journalières.") + date_range = DatabaseDaily(self.usage_point_id).get_date_range() + stat = Stat(self.usage_point_id, measurement_direction) + if date_range["begin"] and date_range["end"]: + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = date_end - relativedelta(years=1) + date_begin_current = date_begin_current.astimezone(TIMEZONE_UTC) + idx = 0 + finish = False + while not finish: + if idx == 0: + key = "year" + else: + key = f"year-{idx}" + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/linear/{key}" + get_daily_year_linear = stat.get_year_linear(idx) + get_daily_month_linear = stat.get_month_linear(idx) + get_daily_week_linear = stat.get_week_linear(idx) + mqtt_data = { + # thisYear + f"{sub_prefix}/thisYear/dateBegin": get_daily_year_linear["begin"], + f"{sub_prefix}/thisYear/dateEnd": get_daily_year_linear["end"], + f"{sub_prefix}/thisYear/base/Wh": get_daily_year_linear["value"], + f"{sub_prefix}/thisYear/base/kWh": round(get_daily_year_linear["value"] / 1000, 2), + f"{sub_prefix}/thisYear/base/euro": round(get_daily_year_linear["value"] / 1000 * price, 2), + # thisMonth + f"{sub_prefix}/thisMonth/dateBegin": get_daily_month_linear["begin"], + f"{sub_prefix}/thisMonth/dateEnd": get_daily_month_linear["end"], + f"{sub_prefix}/thisMonth/base/Wh": get_daily_month_linear["value"], + f"{sub_prefix}/thisMonth/base/kWh": round(get_daily_month_linear["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/base/euro": round(get_daily_month_linear["value"] / 1000 * price, 2), + # thisWeek + f"{sub_prefix}/thisWeek/dateBegin": get_daily_week_linear["begin"], + f"{sub_prefix}/thisWeek/dateEnd": get_daily_week_linear["end"], + f"{sub_prefix}/thisWeek/base/Wh": get_daily_week_linear["value"], + f"{sub_prefix}/thisWeek/base/kWh": round(get_daily_week_linear["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/base/euro": round(get_daily_week_linear["value"] / 1000 * price, 2), + } + + # CALCUL NEW DATE + if date_begin_current <= date_begin: + finish = True + date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) + date_begin_current = date_begin_current - relativedelta(years=1) + if date_begin_current.astimezone(TIMEZONE_UTC) < date_begin.astimezone(TIMEZONE_UTC): + date_begin_current = datetime.combine(date_begin, datetime.min.time()).astimezone(TIMEZONE_UTC) + idx = idx + 1 + + self.mqtt_client.publish_multiple(mqtt_data) + + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption"): # noqa: PLR0915 + """Get the detailed annual data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données annuelles détaillé.") + date_range = DatabaseDetail(self.usage_point_id).get_date_range() + stat = Stat(self.usage_point_id, measurement_direction) + if date_range["begin"] and date_range["end"]: + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) + finish = False + while not finish: + year = int(date_begin_current.strftime("%Y")) + month = int(datetime.now(tz=TIMEZONE_UTC).strftime("%m")) + get_detail_year_hp = stat.get_year(year=year, measure_type="HP") + get_detail_year_hc = stat.get_year(year=year, measure_type="HC") + get_detail_month_hp = stat.get_month(year=year, month=month, measure_type="HP") + get_detail_month_hc = stat.get_month(year=year, month=month, measure_type="HC") + get_detail_week_hp = stat.get_week( + year=year, + month=month, + measure_type="HP", + ) + get_detail_week_hc = stat.get_week( + year=year, + month=month, + measure_type="HC", + ) + + if year == int(datetime.now(tz=TIMEZONE_UTC).strftime("%Y")): + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" + else: + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" + mqtt_data = { + # thisYear - HP + f"{sub_prefix}/thisYear/hp/Wh": get_detail_year_hp["value"], + f"{sub_prefix}/thisYear/hp/kWh": round(get_detail_year_hp["value"] / 1000, 2), + f"{sub_prefix}/thisYear/hp/euro": round(get_detail_year_hp["value"] / 1000 * price_hp, 2), + # thisYear - HC + f"{sub_prefix}/thisYear/hc/Wh": get_detail_year_hc["value"], + f"{sub_prefix}/thisYear/hc/kWh": round(get_detail_year_hc["value"] / 1000, 2), + f"{sub_prefix}/thisYear/hc/euro": round(get_detail_year_hc["value"] / 1000 * price_hc, 2), + # thisMonth - HP + f"{sub_prefix}/thisMonth/hp/Wh": get_detail_month_hp["value"], + f"{sub_prefix}/thisMonth/hp/kWh": round(get_detail_month_hp["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/hp/euro": round(get_detail_month_hp["value"] / 1000 * price_hp, 2), + # thisMonth - HC + f"{sub_prefix}/thisMonth/hc/Wh": get_detail_month_hc["value"], + f"{sub_prefix}/thisMonth/hc/kWh": round(get_detail_month_hc["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/hc/euro": round(get_detail_month_hc["value"] / 1000 * price_hc, 2), + # thisWeek - HP + f"{sub_prefix}/thisWeek/hp/Wh": get_detail_week_hp["value"], + f"{sub_prefix}/thisWeek/hp/kWh": round(get_detail_week_hp["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/hp/euro": round(get_detail_week_hp["value"] / 1000 * price_hp, 2), + # thisWeek - HC + f"{sub_prefix}/thisWeek/hc/Wh": get_detail_week_hc["value"], + f"{sub_prefix}/thisWeek/hc/kWh": round(get_detail_week_hc["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/hc/euro": round(get_detail_week_hc["value"] / 1000 * price_hc, 2), + } + + for week in range(7): + # HP + begin_hp_day = ( + datetime.strptime(stat.detail(week, "HP")["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) + value_hp = stat.detail(week, "HP")["value"] + prefix = f"{sub_prefix}/week/{begin_hp_day}/hp" + mqtt_data[f"{prefix}/Wh"] = value_hp + mqtt_data[f"{prefix}/kWh"] = round(value_hp / 1000, 2) + mqtt_data[f"{prefix}/euro"] = round(value_hp / 1000 * price_hp, 2) + # HC + begin_hc_day = ( + datetime.strptime(stat.detail(week, "HC")["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) + value_hc = stat.detail(week, "HC")["value"] + prefix = f"{sub_prefix}/week/{begin_hc_day}/hc" + mqtt_data[f"{prefix}/Wh"] = value_hc + mqtt_data[f"{prefix}/kWh"] = round(value_hc / 1000, 2) + mqtt_data[f"{prefix}/euro"] = round(value_hc / 1000 * price_hc, 2) + + for month in range(12): + current_month = month + 1 + # HP + get_detail_month_hp = stat.get_month(year=year, month=current_month, measure_type="HP") + prefix = f"{sub_prefix}/month/{current_month}/hp" + mqtt_data[f"{prefix}/Wh"] = get_detail_month_hp["value"] + mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hp["value"] / 1000, 2) + mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hp["value"] / 1000 * price_hp, 2) + # HC + get_detail_month_hc = stat.get_month(year=year, month=current_month, measure_type="HC") + prefix = f"{sub_prefix}/month/{current_month}/hc" + mqtt_data[f"{prefix}/Wh"] = get_detail_month_hc["value"] + mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hc["value"] / 1000, 2) + mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hc["value"] / 1000 * price_hc, 2) + if date_begin_current == date_begin: + finish = True + date_end = datetime.combine( + (date_end - relativedelta(years=1)).replace(month=12, day=31), + datetime.max.time(), + ) + date_begin_current = date_begin_current - relativedelta(years=1) + if date_begin_current.astimezone(TIMEZONE_UTC) < date_begin.astimezone(TIMEZONE_UTC): + date_begin_current = date_begin + + self.mqtt_client.publish_multiple(mqtt_data) + + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def detail_linear(self, price_hp, price_hc=0, measurement_direction="consumption"): + """Get the detailed linear data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données linéaires détaillées") + date_range = DatabaseDetail(self.usage_point_id).get_date_range() + stat = Stat(self.usage_point_id, measurement_direction) + if date_range["begin"] and date_range["end"]: + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = date_end - relativedelta(years=1) + idx = 0 + finish = False + while not finish: + if idx == 0: + key = "year" + else: + key = f"year-{idx}" + sub_prefix = f"{self.usage_point_id}/{measurement_direction}/linear/{key}" + get_daily_year_linear_hp = stat.get_year_linear(idx, "HP") + get_daily_year_linear_hc = stat.get_year_linear(idx, "HC") + get_detail_month_linear_hp = stat.get_month_linear(idx, "HP") + get_detail_month_linear_hc = stat.get_month_linear(idx, "HC") + get_detail_week_linear_hp = stat.get_week_linear(idx, "HP") + get_detail_week_linear_hc = stat.get_week_linear( + idx, + "HC", + ) + mqtt_data = { + # thisYear + f"{sub_prefix}/thisYear/hp/Wh": get_daily_year_linear_hp["value"], + f"{sub_prefix}/thisYear/hp/kWh": round(get_daily_year_linear_hp["value"] / 1000, 2), + f"{sub_prefix}/thisYear/hp/euro": round( + get_daily_year_linear_hp["value"] / 1000 * price_hp, 2 + ), + f"{sub_prefix}/thisYear/hc/Wh": get_daily_year_linear_hc["value"], + f"{sub_prefix}/thisYear/hc/kWh": round(get_daily_year_linear_hc["value"] / 1000, 2), + f"{sub_prefix}/thisYear/hc/euro": round( + get_daily_year_linear_hc["value"] / 1000 * price_hc, 2 + ), + # thisMonth + f"{sub_prefix}/thisMonth/hp/Wh": get_detail_month_linear_hp["value"], + f"{sub_prefix}/thisMonth/hp/kWh": round(get_detail_month_linear_hp["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/hp/euro": round( + get_detail_month_linear_hp["value"] / 1000 * price_hp, 2 + ), + f"{sub_prefix}/thisMonth/hc/Wh": get_detail_month_linear_hc["value"], + f"{sub_prefix}/thisMonth/hc/kWh": round(get_detail_month_linear_hc["value"] / 1000, 2), + f"{sub_prefix}/thisMonth/hc/euro": round( + get_detail_month_linear_hc["value"] / 1000 * price_hc, 2 + ), + # thisWeek + f"{sub_prefix}/thisWeek/hp/Wh": get_detail_week_linear_hp["value"], + f"{sub_prefix}/thisWeek/hp/kWh": round(get_detail_week_linear_hp["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/hp/euro": round( + get_detail_week_linear_hp["value"] / 1000 * price_hp, 2 + ), + f"{sub_prefix}/thisWeek/hc/Wh": get_detail_week_linear_hc["value"], + f"{sub_prefix}/thisWeek/hc/kWh": round(get_detail_week_linear_hc["value"] / 1000, 2), + f"{sub_prefix}/thisWeek/hc/euro": round( + get_detail_week_linear_hc["value"] / 1000 * price_hc, 2 + ), + } + + # CALCUL NEW DATE + if date_begin_current.astimezone(TIMEZONE_UTC) <= date_begin.astimezone(TIMEZONE_UTC): + finish = True + date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) + date_begin_current = date_begin_current - relativedelta(years=1) + if date_begin_current.astimezone(TIMEZONE_UTC) < date_begin.astimezone(TIMEZONE_UTC): + date_begin_current = datetime.combine(date_begin, datetime.min.time()) + idx = idx + 1 + + self.mqtt_client.publish_multiple(mqtt_data) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def max_power(self): + """Get the maximum power data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données de puissance max journalières.") + max_power_data = DatabaseMaxPower(self.usage_point_id).get_all(order="asc") + mqtt_data = {} + contract = DatabaseContracts(self.usage_point_id).get() + if max_power_data: + max_value = 0 + if hasattr(contract, "subscribed_power"): + max_value = int(contract.subscribed_power.split(" ")[0]) * 1000 + for data in max_power_data: + if data.event_date is not None: + date = data.event_date.strftime("%A") + sub_prefix = f"{self.usage_point_id}/power_max/{date}" + mqtt_data[f"{sub_prefix}/date"] = data.event_date.strftime("%Y-%m-%d") + mqtt_data[f"{sub_prefix}/event_hour"] = data.event_date.strftime("%H:%M:%S") + mqtt_data[f"{sub_prefix}/value"] = data.value + value_w = data.value + if max_value != 0 and max_value >= value_w: + mqtt_data[f"{sub_prefix}/threshold_exceeded"] = 0 + threshold_usage = int(100 * value_w / max_value) + else: + mqtt_data[f"{sub_prefix}/threshold_exceeded"] = 1 + threshold_usage = int(0) + mqtt_data[f"{sub_prefix}/percentage_usage"] = threshold_usage + self.mqtt_client.publish_multiple(mqtt_data) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def ecowatt(self): + """Get the ecowatt data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Génération des données Ecowatt") + begin = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - relativedelta(days=1), datetime.min.time()) + end = begin + timedelta(days=7) + ecowatt = DatabaseEcowatt().get_range(begin, end) + today = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) + mqtt_data = {} + if ecowatt: + for data in ecowatt: + if data.date == today: + queue = "j0" + elif data.date == today + timedelta(days=1): + queue = "j1" + else: + queue = "j2" + mqtt_data[f"ecowatt/{queue}/date"] = data.date.strftime(self.date_format_detail) + mqtt_data[f"ecowatt/{queue}/value"] = data.value + mqtt_data[f"ecowatt/{queue}/message"] = data.message + for date, value in ast.literal_eval(data.detail).items(): + date_tmp = ( + datetime.strptime(date, self.date_format_detail).astimezone(TIMEZONE_UTC).strftime("%H") + ) + mqtt_data[f"ecowatt/{queue}/detail/{date_tmp}"] = value + self.mqtt_client.publish_multiple(mqtt_data) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") + + def tempo(self): # noqa: C901 + """Get the tempo data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + logging.info("Envoie des données Tempo") + mqtt_data = {} + tempo_data = DatabaseStatistique(self.usage_point_id).get("price_consumption") + tempo_price = DatabaseTempo().get_config("price") + if tempo_price: + for color, price in tempo_price.items(): + mqtt_data[f"tempo/price/{color}"] = price + tempo_days = DatabaseTempo().get_config("days") + if tempo_days: + for color, days in tempo_days.items(): + mqtt_data[f"tempo/days/{color}"] = days + today = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) + tempo_color = DatabaseTempo().get_range(today, today) + if tempo_color: + mqtt_data["tempo/color/today"] = tempo_color[0].color + tomorrow = today + timedelta(days=1) + tempo_color = DatabaseTempo().get_range(tomorrow, tomorrow) + if tempo_color: + mqtt_data["tempo/color/tomorrow"] = tempo_color[0].color + if tempo_data: + for year, data in ast.literal_eval(tempo_data[0].value).items(): + select_year = year + if year == datetime.now(tz=TIMEZONE_UTC).strftime("%Y"): + select_year = "current" + for color, tempo in data["TEMPO"].items(): + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/Wh" + ] = round(tempo["Wh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/kWh" + ] = round(tempo["kWh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/euro" + ] = round(tempo["euro"], 2) + for month, month_data in data["month"].items(): + for month_color, month_tempo in month_data["TEMPO"].items(): + if month == datetime.strftime(datetime.now(tz=TIMEZONE_UTC), "%m"): + if month_tempo: + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/Wh" + ] = round(month_tempo["Wh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/kWh" + ] = round(month_tempo["kWh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/euro" + ] = round(month_tempo["euro"], 2) + if month_tempo: + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/Wh" + ] = round(month_tempo["Wh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/kWh" + ] = round(month_tempo["kWh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/euro" + ] = round(month_tempo["euro"], 2) + self.mqtt_client.publish_multiple(mqtt_data) + logging.info(" => OK") + else: + logging.info(" => Pas de donnée") diff --git a/src/external_services/myelectricaldata/address.py b/src/external_services/myelectricaldata/address.py new file mode 100755 index 00000000..52f67a16 --- /dev/null +++ b/src/external_services/myelectricaldata/address.py @@ -0,0 +1,108 @@ +"""Fetch address data from the API and store it in the database.""" + +import inspect +import json +import logging +import traceback + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, URL +from database.addresses import DatabaseAddresses +from database.usage_points import DatabaseUsagePoints +from models.query import Query + + +class Address: + """Fetch address data from the API and store it in the database.""" + + def __init__(self, headers, usage_point_id): + self.url = URL + + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = APP_CONFIG.usage_point_id_config(self.usage_point_id) + + def run(self): + """Run the address query process.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + name = "addresses" + endpoint = f"{name}/{self.usage_point_id}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + target = f"{self.url}/{endpoint}" + + response = Query(endpoint=target, headers=self.headers).get() + if response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(response.text) + response = response_json["customer"]["usage_points"][0] + usage_point = response["usage_point"] + usage_point_addresses = usage_point["usage_point_addresses"] + response = usage_point_addresses + response.update(usage_point) + DatabaseAddresses(self.usage_point_id).set( + { + "usage_points": str(usage_point["usage_point_id"]) + if usage_point["usage_point_id"] is not None + else "", + "street": str(usage_point_addresses["street"]) + if usage_point_addresses["street"] is not None + else "", + "locality": str(usage_point_addresses["locality"]) + if usage_point_addresses["locality"] is not None + else "", + "postal_code": str(usage_point_addresses["postal_code"]) + if usage_point_addresses["postal_code"] is not None + else "", + "insee_code": str(usage_point_addresses["insee_code"]) + if usage_point_addresses["insee_code"] is not None + else "", + "city": str(usage_point_addresses["city"]) + if usage_point_addresses["city"] is not None + else "", + "country": str(usage_point_addresses["country"]) + if usage_point_addresses["country"] is not None + else "", + "geo_points": str(usage_point_addresses["geo_points"]) + if usage_point_addresses["geo_points"] is not None + else "", + } + ) + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération du contrat.", + } + return response + else: + return {"error": True, "description": json.loads(response.text)["detail"]} + + def get(self): + """Retrieve address data from the database and format it as a dictionary.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_cache = DatabaseAddresses(self.usage_point_id).get() + if not current_cache: + # No cache + logging.info(" => Pas de cache") + result = self.run() + elif hasattr(self.usage_point_config, "refresh_addresse") and self.usage_point_config.refresh_addresse: + logging.info(" => Mise à jour du cache") + result = self.run() + self.usage_point_config.refresh_addresse = False + DatabaseUsagePoints(self.usage_point_id).set(self.usage_point_config.__dict__) + else: + # Get data in cache + logging.info(" => Récupération du cache") + result = {} + for column in current_cache.__table__.columns: + result[column.name] = str(getattr(current_cache, column.name)) + logging.debug(f" => {result}") + if "error" not in result: + for key, value in result.items(): + if key != "usage_point_addresses": + logging.info(f"{key}: {value}") + else: + logging.error(result) + return result diff --git a/src/external_services/myelectricaldata/cache.py b/src/external_services/myelectricaldata/cache.py new file mode 100644 index 00000000..f5435322 --- /dev/null +++ b/src/external_services/myelectricaldata/cache.py @@ -0,0 +1,36 @@ +"""Manage local cache.""" + +import inspect +import json +import logging + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, URL +from models.query import Query +from utils import get_version + + +class Cache: + """Manage local cache.""" + + def __init__(self, usage_point_id, headers=None): + self.url = URL + self.headers = headers + self.usage_point_id = usage_point_id + + def reset(self): + """Reset local cache.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + target = f"{self.url}/cache/{self.usage_point_id}" + response = Query(endpoint=target, headers=self.headers).delete() + if response.status_code == CODE_200_SUCCESS: + try: + status = json.loads(response.text) + for key, value in status.items(): + logging.info(f"{key}: {value}") + status["version"] = get_version() + return status + except LookupError: + return {"error": True, "description": "Erreur lors du reset du cache."} + else: + return {"error": True, "description": "Erreur lors du reset du cache."} diff --git a/src/external_services/myelectricaldata/contract.py b/src/external_services/myelectricaldata/contract.py new file mode 100755 index 00000000..c4143260 --- /dev/null +++ b/src/external_services/myelectricaldata/contract.py @@ -0,0 +1,126 @@ +"""Query contract from gateway.""" + +import datetime +import inspect +import json +import logging +import re +import traceback + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, URL +from database.contracts import DatabaseContracts +from database.usage_points import DatabaseUsagePoints +from models.query import Query + + +class Contract: + """Query contract from gateway.""" + + def __init__(self, headers, usage_point_id, config): + self.url = URL + + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = config + + def run(self): + """Run the contract query process.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + name = "contracts" + endpoint = f"{name}/{self.usage_point_id}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + target = f"{self.url}/{endpoint}" + + query_response = Query(endpoint=target, headers=self.headers).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + response = response_json["customer"]["usage_points"][0] + usage_point = response["usage_point"] + contracts = response["contracts"] + response = contracts + response.update(usage_point) + + if contracts["offpeak_hours"] is not None: + offpeak_hours = re.search(r"HC \((.*)\)", contracts["offpeak_hours"]).group(1) + else: + offpeak_hours = "" + if "last_activation_date" in contracts and contracts["last_activation_date"] is not None: + last_activation_date = ( + datetime.datetime.strptime(contracts["last_activation_date"], "%Y-%m-%d%z") + ).replace(tzinfo=None) + else: + last_activation_date = contracts["last_activation_date"] + if ( + "last_distribution_tariff_change_date" in contracts + and contracts["last_distribution_tariff_change_date"] is not None + ): + last_distribution_tariff_change_date = ( + datetime.datetime.strptime( + contracts["last_distribution_tariff_change_date"], + "%Y-%m-%d%z", + ) + ).replace(tzinfo=None) + else: + last_distribution_tariff_change_date = contracts["last_distribution_tariff_change_date"] + DatabaseContracts(self.usage_point_id).set( + { + "usage_point_status": usage_point["usage_point_status"], + "meter_type": usage_point["meter_type"], + "segment": contracts["segment"], + "subscribed_power": contracts["subscribed_power"], + "last_activation_date": last_activation_date, + "distribution_tariff": contracts["distribution_tariff"], + "offpeak_hours_0": offpeak_hours, + "offpeak_hours_1": offpeak_hours, + "offpeak_hours_2": offpeak_hours, + "offpeak_hours_3": offpeak_hours, + "offpeak_hours_4": offpeak_hours, + "offpeak_hours_5": offpeak_hours, + "offpeak_hours_6": offpeak_hours, + "contract_status": contracts["contract_status"], + "last_distribution_tariff_change_date": last_distribution_tariff_change_date, + } + ) + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération du contrat.", + } + return response + else: + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } + + def get(self): + """Get Contract information.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_cache = DatabaseContracts(self.usage_point_id).get() + if not current_cache: + # No cache + logging.info(" => Pas de cache") + result = self.run() + elif hasattr(self.usage_point_config, "refresh_contract") and self.usage_point_config.refresh_contract: + logging.info(" => Mise à jour du cache") + result = self.run() + self.usage_point_config.refresh_contract = False + DatabaseUsagePoints(self.usage_point_id).set(self.usage_point_config.__dict__) + else: + # Get data in cache + logging.info(" => Récupération du cache") + result = {} + for column in current_cache.__table__.columns: + result[column.name] = str(getattr(current_cache, column.name)) + logging.debug(f" => {result}") + if "error" not in result: + for key, value in result.items(): + logging.info(f"{key}: {value}") + else: + logging.error(result) + return result diff --git a/src/external_services/myelectricaldata/daily.py b/src/external_services/myelectricaldata/daily.py new file mode 100644 index 00000000..ef3f6fbd --- /dev/null +++ b/src/external_services/myelectricaldata/daily.py @@ -0,0 +1,309 @@ +"""The 'Daily' class represents a daily data retrieval and manipulation process for a specific usage point.""" + +import inspect +import json +import logging +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import ( + CODE_200_SUCCESS, + CODE_403_FORBIDDEN, + CODE_404_NOT_FOUND, + CODE_500_INTERNAL_SERVER_ERROR, + DAILY_MAX_DAYS, + TIMEZONE, + URL, +) +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.usage_points import DatabaseUsagePoints +from models.query import Query +from models.stat import Stat +from utils import daterange, is_json + + +class Daily: + """The 'Daily' class represents a daily data retrieval and manipulation process for a specific usage point. + + It provides methods for fetching, resetting, deleting, and blacklisting daily data. + + Attributes: + config (dict): The configuration settings. + db (object): The database object. + url (str): The base URL for API requests. + max_daily (int): The maximum number of days to retrieve data for. + date_format (str): The format of dates. + date_detail_format (str): The format of detailed dates. + headers (dict): The headers for API requests. + usage_point_id (str): The ID of the usage point. + usage_point_config (object): The configuration settings for the usage point. + contract (object): The contract associated with the usage point. + daily_max_days (int): The maximum number of days for daily data. + max_days_date (datetime): The maximum date for retrieving data. + activation_date (datetime): The activation date for retrieving data. + measure_type (str): The type of measurement (consumption or production). + base_price (float): The base price for the measurement type. + + Methods: + run(begin, end): + Retrieves and stores daily data for a specified date range. + + get(): + Retrieves and returns all available daily data for the usage point. + + reset(date=None): + Resets the daily data for the usage point, optionally for a specific date. + + delete(date=None): + Deletes the daily data for the usage point, optionally for a specific date. + + fetch(date): + Fetches and returns the daily data for a specific date. + + blacklist(date, action): + Adds or removes a date from the blacklist for the usage point. + + Note: + The 'Daily' class relies on the 'Query' class for making API requests and the 'Stat' class + for retrieving additional statistics. + + Example usage: + headers = {"Authorization": "Bearer token"} + usage_point_id = "1234567890" + daily = Daily(headers, usage_point_id) + data = daily.get() + for item in data: + print(item) + """ + + def __init__(self, headers, usage_point_id, measure_type="consumption"): + self.url = URL + self.max_daily = 1095 + self.date_format = "%Y-%m-%d" + self.date_detail_format = "%Y-%m-%d %H:%M:%S" + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + self.contract = DatabaseContracts(self.usage_point_id).get() + self.daily_max_days = int(DAILY_MAX_DAYS) + self.max_days_date = datetime.now(tz=TIMEZONE) - timedelta(days=self.daily_max_days) + if ( + measure_type == "consumption" + and hasattr(self.usage_point_config, "consumption_max_date") + and self.usage_point_config.consumption_max_date != "" + and self.usage_point_config.consumption_max_date is not None + ): + self.activation_date = self.usage_point_config.consumption_max_date + elif ( + measure_type == "production" + and hasattr(self.usage_point_config, "production_max_date") + and self.usage_point_config.production_max_date != "" + and self.usage_point_config.production_max_date is not None + ): + self.activation_date = self.usage_point_config.production_max_date + elif ( + hasattr(self.contract, "last_activation_date") + and self.contract.last_activation_date != "" + and self.contract.last_activation_date is not None + ): + self.activation_date = self.contract.last_activation_date + else: + self.activation_date = self.max_days_date + self.measure_type = measure_type + self.daily = DatabaseDaily(self.usage_point_id, self.measure_type) + self.base_price = 0 + if measure_type == "consumption": + if hasattr(self.usage_point_config, "consumption_price_base"): + self.base_price = self.usage_point_config.consumption_price_base + elif hasattr(self.usage_point_config, "production_price"): + self.base_price = self.usage_point_config.production_price + + def run(self, begin, end): # noqa: C901, PLR0915 + """Retrieves and stores daily data for a specified date range.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + begin_str = begin.strftime(self.date_format) + end_str = end.strftime(self.date_format) + logging.info(f"Récupération des données : {begin_str} => {end_str}") + endpoint = f"daily_{self.measure_type}/{self.usage_point_id}/start/{begin_str}/end/{end_str}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + try: + current_data = DatabaseDaily(self.usage_point_id, self.measure_type).get(begin, end) + if not current_data["missing_data"]: + logging.info(" => Toutes les données sont déjà en cache.") + output = [] + for date, data in current_data["date"].items(): + output.append({"date": date, "value": data["value"]}) + return output + else: + logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") + data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() + if data.status_code == CODE_403_FORBIDDEN: + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + else: + description = data + if hasattr(data, "status_code"): + status_code = data.status_code + else: + status_code = CODE_500_INTERNAL_SERVER_ERROR + return { + "error": True, + "description": description, + "status_code": status_code, + "exit": True, + } + blacklist = 0 + max_histo = datetime.combine(datetime.now(tz=TIMEZONE), datetime.max.time()) - timedelta(days=1) + if hasattr(data, "status_code"): + if data.status_code == CODE_200_SUCCESS: + meter_reading = json.loads(data.text)["meter_reading"] + if meter_reading is not None and "interval_reading" in meter_reading: + interval_reading = meter_reading["interval_reading"] + interval_reading_tmp = {} + for interval_reading_data in interval_reading: + interval_reading_tmp[interval_reading_data["date"]] = interval_reading_data[ + "value" + ] + single_date: datetime + for single_date in daterange(begin, end): + single_date_tz: datetime = single_date.replace(tzinfo=TIMEZONE) + max_histo = max_histo.replace(tzinfo=TIMEZONE) + if single_date_tz < max_histo: + if single_date_tz.strftime(self.date_format) in interval_reading_tmp: + # FOUND + self.daily.insert( + date=datetime.combine(single_date_tz, datetime.min.time()), + value=interval_reading_tmp[single_date_tz.strftime(self.date_format)], + blacklist=blacklist, + ) + else: + # NOT FOUND + self.daily.fail_increment( + date=datetime.combine(single_date_tz, datetime.min.time()), + ) + return interval_reading + return { + "error": True, + "description": "Données non disponibles.", + "status_code": CODE_404_NOT_FOUND, + } + if is_json(data.text): + description = json.loads(data.text)["detail"] + else: + description = data.text + return { + "error": True, + "description": description, + "status_code": data.status_code, + } + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + else: + description = data + if hasattr(data, "status_code"): + status_code = data.status_code + else: + status_code = CODE_500_INTERNAL_SERVER_ERROR + return { + "error": True, + "description": description, + "status_code": status_code, + } + except Exception as e: + logging.exception(e) + logging.error(e) + + def get(self): + """Generate a range of dates between a start date and an end date. + + Parameters: + start_date (datetime.date): The start date of the range. + end_date (datetime.date): The end date of the range. + + Yields: + datetime.date: The next date in the range. + + Example: + >>> start_date = datetime.date(2021, 1, 1) + >>> end_date = datetime.date(2021, 1, 5) + >>> for date in daterange(start_date, end_date): + ... print(date) + ... + 2021-01-01 + 2021-01-02 + 2021-01-03 + 2021-01-04 + + Note: + The end date is exclusive, meaning it is not included in the range. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + end = datetime.combine((datetime.now(tz=TIMEZONE) + timedelta(days=2)), datetime.max.time()).astimezone( + TIMEZONE + ) + begin = datetime.combine(end - relativedelta(days=self.max_daily), datetime.min.time()).astimezone( + TIMEZONE + ) + result = [] + self.activation_date = self.activation_date.astimezone(TIMEZONE) + response = self.run(begin, end) + if response is None or ("error" in response and response.get("error", False)): + logging.error("Echec de la récupération des données") + if "description" in response: + logging.error(f'=> {response["description"]}') + logging.error(f"=> {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") + return result + + def reset(self, date=None): + """Resets the daily data for the usage point, optionally for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + DatabaseDaily(self.usage_point_id, self.measure_type).reset(date) + return True + + def delete(self, date=None): + """Deletes the daily data for the usage point, optionally for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + DatabaseDaily(self.usage_point_id, self.measure_type).delete(date) + return True + + def fetch(self, date): + """Fetches and returns the daily data for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + result = self.run( + datetime.combine(date - timedelta(days=2), datetime.min.time()), + datetime.combine(date + timedelta(days=2), datetime.min.time()), + ) + if "error" in result: + return { + "error": True, + "notif": result["description"], + "fail_count": DatabaseDaily(self.usage_point_id, self.measure_type).get_fail_count(date=date), + } + for item in result: + if date.strftime(self.date_format) in item["date"]: + item["hc"] = Stat(self.usage_point_id, self.measure_type).get_daily(date, "hc") + item["hp"] = Stat(self.usage_point_id, self.measure_type).get_daily(date, "hp") + return item + return { + "error": True, + "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", + "fail_count": DatabaseDaily(self.usage_point_id, self.measure_type).get_fail_count(date=date), + } + + def blacklist(self, date, action): + """Adds or removes a date from the blacklist for the usage point.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + DatabaseDaily(self.usage_point_id, self.measure_type).blacklist(date, action) + return True diff --git a/src/external_services/myelectricaldata/detail.py b/src/external_services/myelectricaldata/detail.py new file mode 100644 index 00000000..fe19eb02 --- /dev/null +++ b/src/external_services/myelectricaldata/detail.py @@ -0,0 +1,301 @@ +"""Get myelectricaldata detail data.""" + +import inspect +import json +import logging +import re +from datetime import datetime, timedelta + +from config.main import APP_CONFIG +from const import ( + CODE_200_SUCCESS, + CODE_400_BAD_REQUEST, + CODE_403_FORBIDDEN, + CODE_404_NOT_FOUND, + CODE_409_CONFLICT, + CODE_500_INTERNAL_SERVER_ERROR, + DETAIL_MAX_DAYS, + TIMEZONE, + URL, +) +from database.config import DatabaseConfig +from database.contracts import DatabaseContracts +from database.detail import DatabaseDetail +from database.usage_points import DatabaseUsagePoints +from db_schema import ConsumptionDetail, ProductionDetail +from models.query import Query +from utils import is_json + + +class Detail: + """Manage detail data.""" + + def __init__(self, headers, usage_point_id, measure_type="consumption"): + self.url = URL + self.max_detail = 7 + self.date_format = "%Y-%m-%d" + self.date_detail_format = "%Y-%m-%d %H:%M:%S" + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + self.contract = DatabaseContracts(self.usage_point_id).get() + self.daily_max_days = int(DETAIL_MAX_DAYS) + self.max_days_date = datetime.now(tz=TIMEZONE) - timedelta(days=self.daily_max_days) + if ( + measure_type == "consumption" + and hasattr(self.usage_point_config, "consumption_detail_max_date") + and self.usage_point_config.consumption_detail_max_date != "" + and self.usage_point_config.consumption_detail_max_date is not None + ): + self.activation_date = self.usage_point_config.consumption_detail_max_date + elif ( + measure_type == "production" + and hasattr(self.usage_point_config, "production_detail_max_date") + and self.usage_point_config.production_detail_max_date != "" + and self.usage_point_config.production_detail_max_date is not None + ): + self.activation_date = self.usage_point_config.production_detail_max_date + elif ( + hasattr(self.contract, "last_activation_date") + and self.contract.last_activation_date != "" + and self.contract.last_activation_date is not None + ): + self.activation_date = self.contract.last_activation_date + else: + self.activation_date = self.max_days_date + self.offpeak_hours = { + 0: self.usage_point_config.offpeak_hours_0, + 1: self.usage_point_config.offpeak_hours_1, + 2: self.usage_point_config.offpeak_hours_2, + 3: self.usage_point_config.offpeak_hours_3, + 4: self.usage_point_config.offpeak_hours_4, + 5: self.usage_point_config.offpeak_hours_5, + 6: self.usage_point_config.offpeak_hours_6, + } + self.activation_date = self.activation_date.replace(tzinfo=TIMEZONE) + self.measure_type = measure_type + self.base_price = 0 + if measure_type == "consumption": + self.detail_table = ConsumptionDetail + if hasattr(self.usage_point_config, "consumption_price_base"): + self.base_price = self.usage_point_config.consumption_price_base + else: + self.detail_table = ProductionDetail + if hasattr(self.usage_point_config, "production_price"): + self.base_price = self.usage_point_config.production_price + + def run(self, begin, end): # noqa: C901 + """Run the detail query.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if begin.strftime(self.date_format) == end.strftime(self.date_format): + end = end + timedelta(days=1) + begin_str = begin.strftime(self.date_format) + end_str = end.strftime(self.date_format) + logging.info(f"Récupération des données : {begin_str} => {end_str}") + endpoint = f"{self.measure_type}_load_curve/{self.usage_point_id}/start/{begin_str}/end/{end_str}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + try: + if datetime.now(tz=TIMEZONE) >= end.astimezone(TIMEZONE): + current_data = DatabaseDetail(self.usage_point_id, self.measure_type).get(begin, end) + if not current_data["missing_data"]: + logging.info(" => Toutes les données sont déjà en cache.") + output = [] + for date, data in current_data["date"].items(): + output.append({"date": date, "value": data["value"]}) + return output + + logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") + data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() + if hasattr(data, "status_code"): + if data.status_code == CODE_403_FORBIDDEN: + description = data + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + return { + "error": True, + "description": description, + "status_code": getattr(data, "status_code", CODE_403_FORBIDDEN), + "exit": True, + } + if data.status_code == CODE_200_SUCCESS: + meter_reading = json.loads(data.text)["meter_reading"] + if meter_reading is not None and "interval_reading" in meter_reading: + interval_reading = meter_reading["interval_reading"] + for interval_reading_data in interval_reading: + value = interval_reading_data["value"] + interval = re.findall(r"\d+", interval_reading_data["interval_length"])[0] + date = interval_reading_data["date"] + date_object = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE) + # CHANGE DATE TO BEGIN RANGE + date = date_object - timedelta(minutes=int(interval)) + if int(value) == 0: + logging.warning(f" => {date} blacklint incrementation.") + DatabaseDetail(self.usage_point_id, self.measure_type).fail_increment(date) + else: + DatabaseDetail(self.usage_point_id, self.measure_type).insert( + date=date, + value=value, + interval=interval, + blacklist=0, + ) + return interval_reading + return { + "error": True, + "description": "Données non disponibles.", + "status_code": CODE_404_NOT_FOUND, + } + if is_json(data.text) and "detail" in data.text: + description = json.loads(data.text)["detail"] + else: + description = data.text + return { + "error": True, + "description": description, + "status_code": data.status_code, + } + description = data + if hasattr(data, "text") and "detail" in data.text: + description = json.loads(data.text)["detail"] + return { + "error": True, + "description": description, + "status_code": getattr(data, "status_code", CODE_500_INTERNAL_SERVER_ERROR), + } + except Exception as e: + logging.exception(e) + logging.error(e) + + def get(self): + """Get the detail data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + end = datetime.combine((datetime.now(tz=TIMEZONE) + timedelta(days=2)), datetime.max.time()).replace( + tzinfo=TIMEZONE + ) + begin = datetime.combine(end - timedelta(days=self.max_detail), datetime.min.time()).replace( + tzinfo=TIMEZONE + ) + finish = True + result = [] + while finish: + if self.max_days_date > begin: + # Max day reached + begin = self.max_days_date + finish = False + response = self.run(begin, end) + elif self.activation_date and self.activation_date > begin: + # Activation date reached + begin = self.activation_date + finish = False + response = self.run(begin, end) + else: + response = self.run(begin, end) + begin = begin - timedelta(days=self.max_detail) + end = end - timedelta(days=self.max_detail) + if "exit" in response: + finish = False + response = { + "error": True, + "description": response["description"], + "status_code": response["status_code"], + } + if response is not None: + result = [*result, *response] + else: + response = { + "error": True, + "description": "MyElectricalData est indisponible.", + } + if response is None or ("error" in response and response.get("error", False)): + logging.error("Echec de la récupération des données.") + if "description" in response: + logging.error(f'=> {response["description"]}') + logging.error(" => %s -> %s", begin.strftime(self.date_format), end.strftime(self.date_format)) + if "status_code" in response and ( + response["status_code"] == CODE_409_CONFLICT or response["status_code"] == CODE_400_BAD_REQUEST + ): + finish = False + logging.error("Arrêt de la récupération des données suite à une erreur.") + logging.error( + "Prochain lancement à %s", + datetime.now(tz=TIMEZONE) + timedelta(seconds=DatabaseConfig().get("cycle")), + ) + return result + + def reset_daily(self, date): + """Reset the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + begin = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE), datetime.min.time() + ).astimezone(TIMEZONE) + end = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE), datetime.max.time() + ).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).reset_range(begin, end) + return True + + def delete_daily(self, date): + """Delete the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + begin = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE), datetime.min.time() + ).astimezone(TIMEZONE) + end = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE), datetime.max.time() + ).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).reset_range(begin, end) + return True + + def reset(self, date=None): + """Reset the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).reset(date) + return True + + def delete(self, date=None): + """Delete the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).delete(date) + return True + + def fetch(self, date): + """Fetch the detail for a specific date.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + result = self.run( + datetime.combine(date - timedelta(days=2), datetime.min.time()), + datetime.combine(date + timedelta(days=2), datetime.min.time()), + ) + if isinstance(result, dict) and result.get("error", False): + return { + "error": True, + "notif": result["description"], + "fail_count": DatabaseDetail(self.usage_point_id, self.measure_type).get_fail_count(date), + } + + for item in result: + if isinstance(item["date"], str): + item["date"] = datetime.strptime(item["date"], self.date_detail_format).astimezone(TIMEZONE) + result_date = item["date"].strftime(self.date_format) + if date.strftime(self.date_format) in result_date: + item["date"] = result_date + return item + + return { + "error": True, + "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", + "fail_count": DatabaseDetail(self.usage_point_id, self.measure_type).get_fail_count(date), + } + + def blacklist(self, date, action): + """Adds or removes a date from the blacklist for the usage point.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE) + DatabaseDetail(self.usage_point_id, self.measure_type).blacklist(date, action) + return True diff --git a/src/external_services/myelectricaldata/ecowatt.py b/src/external_services/myelectricaldata/ecowatt.py new file mode 100644 index 00000000..ce8c3da6 --- /dev/null +++ b/src/external_services/myelectricaldata/ecowatt.py @@ -0,0 +1,87 @@ +"""Fetch and store Ecowatt data.""" + +import ast +import inspect +import json +import logging +import traceback +from datetime import datetime + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, TIMEZONE, URL +from database.ecowatt import DatabaseEcowatt +from models.query import Query +from utils import title + + +class Ecowatt: + """Class for fetching and storing Ecowatt data.""" + + def __init__(self): + self.url = URL + self.valid_date = datetime.combine(datetime.now(tz=TIMEZONE) + relativedelta(days=2), datetime.min.time()) + + def run(self): + """Fetches Ecowatt data from the API and stores it in the database.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + start = (datetime.now(tz=TIMEZONE) - relativedelta(years=3)).strftime("%Y-%m-%d") + end = (datetime.now(tz=TIMEZONE) + relativedelta(days=3)).strftime("%Y-%m-%d") + target = f"{self.url}/rte/ecowatt/{start}/{end}" + query_response = Query(endpoint=target).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + for date, data in response_json.items(): + date_obj = datetime.strptime(date, "%Y-%m-%d").astimezone(TIMEZONE) + DatabaseEcowatt().set(date_obj, data["value"], data["message"], str(data["detail"])) + response = response_json + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération des données Ecowatt.", + } + return response + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } + + def get(self): + """Retrieve Ecowatt data from the database and format it as a dictionary.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + data = DatabaseEcowatt().get() + output = {} + for d in data: + if hasattr(d, "date") and hasattr(d, "value") and hasattr(d, "message") and hasattr(d, "detail"): + output[d.date] = { + "value": d.value, + "message": d.message, + "detail": ast.literal_eval(d.detail), + } + return output + + def fetch(self): + """Fetches Ecowatt data and returns the result.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_cache = DatabaseEcowatt().get() + result = {} + if not current_cache: + title("No cache") + result = self.run() + else: + last_item = current_cache[0] + if last_item.date < self.valid_date: + result = self.run() + else: + logging.info(" => Toutes les données sont déjà en cache.") + if "error" not in result: + for key, value in result.items(): + logging.info(f"{key}: {value['message']}") + else: + logging.error(result) + return "OK" + return result diff --git a/src/external_services/myelectricaldata/power.py b/src/external_services/myelectricaldata/power.py new file mode 100644 index 00000000..70fa24d4 --- /dev/null +++ b/src/external_services/myelectricaldata/power.py @@ -0,0 +1,232 @@ +"""Model to manage the power consumption data.""" + +import inspect +import json +import logging +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import ( + CODE_200_SUCCESS, + CODE_404_NOT_FOUND, + CODE_500_INTERNAL_SERVER_ERROR, + DAILY_MAX_DAYS, + TIMEZONE, + TIMEZONE_UTC, + URL, +) +from database.contracts import DatabaseContracts +from database.max_power import DatabaseMaxPower +from database.usage_points import DatabaseUsagePoints +from models.query import Query +from utils import daterange + + +class Power: + """Class to manage the power consumption data.""" + + def __init__(self, headers, usage_point_id): + self.url = URL + self.max_daily = 1095 + self.date_format = "%Y-%m-%d" + self.date_format_detail = "%Y-%m-%d %H:%M:%S" + self.headers = headers + self.usage_point_id = usage_point_id + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + self.contract = DatabaseContracts(self.usage_point_id).get() + self.daily_max_days = DAILY_MAX_DAYS + self.max_days_date = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=self.daily_max_days) + if ( + hasattr(self.usage_point_config, "consumption_max_date") + and self.usage_point_config.consumption_max_date != "" + and self.usage_point_config.consumption_max_date is not None + ): + self.activation_date = self.usage_point_config.consumption_max_date + elif ( + hasattr(self.contract, "last_activation_date") + and self.contract.last_activation_date != "" + and self.contract.last_activation_date is not None + ): + self.activation_date = self.contract.last_activation_date + else: + self.activation_date = self.max_days_date + self.activation_date = self.activation_date.astimezone(TIMEZONE_UTC) + self.power = DatabaseMaxPower(self.usage_point_id) + + def run(self, begin, end): # noqa: C901, PLR0915 + """Run the query to get the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + begin_str = begin.strftime(self.date_format) + end_str = end.strftime(self.date_format) + logging.info(f"Récupération des données : {begin_str} => {end_str}") + endpoint = f"daily_consumption_max_power/{self.usage_point_id}/start/{begin_str}/end/{end_str}" + if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: + endpoint += "/cache" + try: + current_data = DatabaseMaxPower(self.usage_point_id).get_power(begin, end) + if not current_data["missing_data"]: + logging.info(" => Toutes les données sont déjà en cache.") + output = [] + for date, data in current_data["date"].items(): + output.append({"date": date, "value": data["value"]}) + return output + else: + logging.info(" Chargement des données depuis MyElectricalData %s => %s", begin_str, end_str) + data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() + blacklist = 0 + max_histo = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.max.time()) - timedelta( + days=1 + ) + if hasattr(data, "status_code"): + if data.status_code == CODE_200_SUCCESS: + meter_reading = json.loads(data.text)["meter_reading"] + if meter_reading is not None and "interval_reading" in meter_reading: + interval_reading = meter_reading["interval_reading"] + interval_reading_tmp = {} + for interval_reading_data in interval_reading: + date_1 = datetime.strptime( + interval_reading_data["date"], self.date_format_detail + ).astimezone(TIMEZONE_UTC) + date = datetime.combine(date_1, datetime.min.time()) + interval_reading_tmp[date.strftime(self.date_format)] = { + "date": date_1, + "value": interval_reading_data["value"], + } + for single_date in daterange(begin, end): + single_date_tz: datetime = single_date.replace(tzinfo=TIMEZONE_UTC) + max_histo = max_histo.replace(tzinfo=TIMEZONE_UTC) + if single_date_tz < max_histo: + if single_date_tz.strftime(self.date_format) in interval_reading_tmp: + # FOUND + single_date_value = interval_reading_tmp[ + single_date_tz.strftime(self.date_format) + ] + self.power.insert( + date=datetime.combine(single_date_tz, datetime.min.time()), + event_date=single_date_value["date"], + value=single_date_value["value"], + blacklist=blacklist, + ) + else: + # NOT FOUND + self.power.daily_fail_increment( + date=datetime.combine(single_date, datetime.min.time()), + ) + return interval_reading + return { + "error": True, + "description": "Données non disponibles.", + "status_code": CODE_404_NOT_FOUND, + } + else: + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + else: + description = data + if hasattr(data, "status_code"): + status_code = data.status_code + else: + status_code = CODE_500_INTERNAL_SERVER_ERROR + return { + "error": True, + "description": description, + "status_code": status_code, + } + else: + if hasattr(data, "text"): + description = json.loads(data.text)["detail"] + else: + description = data + if hasattr(data, "status_code"): + status_code = data.status_code + else: + status_code = CODE_500_INTERNAL_SERVER_ERROR + return { + "error": True, + "description": description, + "status_code": status_code, + } + except Exception as e: + logging.exception(e) + logging.error(e) + + def get(self): + """Get the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + end = datetime.combine((datetime.now(tz=TIMEZONE) + timedelta(days=2)), datetime.max.time()).astimezone( + TIMEZONE + ) + begin = datetime.combine(end - relativedelta(days=self.max_daily), datetime.min.time()).astimezone( + TIMEZONE + ) + result = [] + self.activation_date = self.activation_date.astimezone(TIMEZONE) + response = self.run(begin, end) + if response is None or ("error" in response and response.get("error", False)): + logging.error("Echec de la récupération des données") + if "description" in response: + logging.error(f'=> {response["description"]}') + logging.error(f"=> {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") + return result + + def reset(self, date=None): + """Reset the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).reset_daily(date) + return True + + def delete(self, date=None): + """Delete the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).delete_daily(date) + return True + + def blacklist(self, date, action): + """Blacklist the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).blacklist_daily(date, action) + return True + + def fetch(self, date): + """Fetch the daily power consumption data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if date is not None: + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + result = self.run( + date - timedelta(days=1), + date + timedelta(days=1), + ) + if "error" in result and result.get("error"): + return { + "error": True, + "notif": result["description"], + "fail_count": DatabaseMaxPower(self.usage_point_id).get_fail_count(date), + } + for item in result: + target_date = ( + datetime.strptime(item["date"], self.date_format_detail) + .astimezone(TIMEZONE_UTC) + .strftime(self.date_format) + ) + event_date = ( + datetime.strptime(item["date"], self.date_format_detail) + .astimezone(TIMEZONE_UTC) + .strftime("%H:%M:%S") + ) + if date.strftime(self.date_format) == target_date: + item["date"] = target_date + item["event_date"] = event_date + return item + return { + "error": True, + "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", + "fail_count": DatabaseMaxPower(self.usage_point_id).get_fail_count(date), + } diff --git a/src/external_services/myelectricaldata/status.py b/src/external_services/myelectricaldata/status.py new file mode 100755 index 00000000..04cdcf0b --- /dev/null +++ b/src/external_services/myelectricaldata/status.py @@ -0,0 +1,97 @@ +"""Class representing the status of MyElectricalData.""" + +import datetime +import inspect +import json +import logging +import traceback + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, URL +from database.usage_points import DatabaseUsagePoints +from models.query import Query +from utils import get_version + + +class Status: + """Class representing the status of MyElectricalData.""" + + def __init__(self, headers=None): + self.url = URL + self.headers = headers + + def ping(self): + """Ping the MyElectricalData endpoint to check its availability.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + target = f"{self.url}/ping" + status = { + "version": get_version(), + "status": False, + "information": "MyElectricalData injoignable.", + } + try: + response = Query(endpoint=target, headers=self.headers).get() + if hasattr(response, "status_code") and response.status_code == CODE_200_SUCCESS: + status = json.loads(response.text) + for key, value in status.items(): + logging.debug(f"{key}: {value}") + status["version"] = get_version() + return status + except LookupError: + return status + + def status(self, usage_point_id): + """Retrieve the status of a usage point. + + Args: + usage_point_id (str): The ID of the usage point. + + Returns: + dict: The status of the usage point. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + usage_point_id_config = DatabaseUsagePoints(usage_point_id).get() + target = f"{self.url}/valid_access/{usage_point_id}" + if hasattr(usage_point_id_config, "cache") and usage_point_id_config.cache: + target += "/cache" + response = Query(endpoint=target, headers=self.headers).get() + if response: + status = json.loads(response.text) + if response.status_code == CODE_200_SUCCESS: + try: + for key, value in status.items(): + logging.info(f"{key}: {value}") + DatabaseUsagePoints(usage_point_id).update( + consentement_expiration=datetime.datetime.strptime( + status["consent_expiration_date"], "%Y-%m-%dT%H:%M:%S" + ).replace(tzinfo=datetime.timezone.utc), + call_number=status["call_number"], + quota_limit=status["quota_limit"], + quota_reached=status["quota_reached"], + quota_reset_at=datetime.datetime.strptime( + status["quota_reset_at"], "%Y-%m-%dT%H:%M:%S.%f" + ).replace(tzinfo=datetime.timezone.utc), + ban=status["ban"], + ) + return status + except Exception as e: + if APP_CONFIG.debug: + traceback.print_exc() + logging.error(e) + return { + "error": True, + "description": "Erreur lors de la récupération du statut du compte.", + } + else: + if APP_CONFIG.debug: + traceback.print_exc() + logging.error(status["detail"]) + return {"error": True, "description": status["detail"]} + else: + if APP_CONFIG.debug: + traceback.print_exc() + return { + "error": True, + "status_code": response.status_code, + "description": json.loads(response.text), + } diff --git a/src/external_services/myelectricaldata/tempo.py b/src/external_services/myelectricaldata/tempo.py new file mode 100644 index 00000000..0cc2c4b8 --- /dev/null +++ b/src/external_services/myelectricaldata/tempo.py @@ -0,0 +1,213 @@ +"""Fetch tempo data from gateway and store it in the database.""" +import inspect +import json +import logging +import traceback +from datetime import datetime, timedelta + +from dateutil.relativedelta import relativedelta + +from config.main import APP_CONFIG +from const import CODE_200_SUCCESS, TIMEZONE, URL +from database.tempo import DatabaseTempo +from models.query import Query +from utils import title + + +class Tempo: + """Fetches tempo data from gateway and stores it in the database.""" + + def __init__(self): + self.url = URL + self.valid_date = datetime.combine(datetime.now(tz=TIMEZONE) + relativedelta(days=1), datetime.min.time()) + self.display_nb_day = 10 + self.nb_check_day = 31 + self.total_tempo_days = { + "red": 22, + "white": 43, + "blue": 300, + } + + def run(self): + """Runs the tempo data retrieval process. + + Args: + None + + Returns: + A dictionary containing the retrieved tempo data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + start = (datetime.now(tz=TIMEZONE) - relativedelta(years=3)).strftime("%Y-%m-%d") + end = (datetime.now(tz=TIMEZONE) + relativedelta(days=2)).strftime("%Y-%m-%d") + target = f"{self.url}/rte/tempo/{start}/{end}" + query_response = Query(endpoint=target).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json: dict = json.loads(query_response.text) + for date, color in response_json.items(): + date_obj = datetime.strptime(date, "%Y-%m-%d").replace(tzinfo=TIMEZONE) + DatabaseTempo().set(date_obj, color) + response = response_json + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération de données Tempo.", + } + return response + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } + + def get(self): + """Retrieves tempo data from the database. + + Args: + None + + Returns: + A dictionary containing the tempo data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + data = DatabaseTempo().get() + output = {} + for d in data: + if hasattr(d, "date") and hasattr(d, "color"): + output[d.date] = d.color + return output + + def fetch(self): + """Fetches tempo data from the database or retrieves it from the cache if available. + + Args: + None + + Returns: + A dictionary containing the tempo data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + current_cache = DatabaseTempo().get() + result = {} + if not current_cache: + # No cache + title("No cache") + result = self.run() + else: + valid_date = self.valid_date + missing_date = False + for i in range(self.nb_check_day): + if current_cache[i].date != valid_date: + missing_date = True + valid_date = valid_date - relativedelta(days=1) + if missing_date: + result = self.run() + else: + logging.info(" => Toutes les données sont déjà en cache.") + if "error" not in result: + if len(result) > 0: + i = 0 + for key, value in result.items(): + if i < self.display_nb_day: + logging.info(f"{key}: {value}") + i += 1 + logging.info("...") + else: + logging.error(result) + return "OK" + return result + + def calc_day(self): + """Calculates the number of days left for each color based on the current date. + + Args: + None + + Returns: + A dictionary containing the number of days left for each color. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + now = datetime.now(tz=TIMEZONE) + begin = datetime.combine(now.replace(month=9, day=1), datetime.min.time()).astimezone(TIMEZONE) + if now < begin: + begin = begin.replace(year=int(now.strftime("%Y")) - 1) + end = datetime.combine(begin - timedelta(hours=5), datetime.max.time()).replace( + year=int(begin.strftime("%Y")) + 1 + ) + current_tempo_day = DatabaseTempo().get_range(begin=begin, end=end) + result = self.total_tempo_days + for day in current_tempo_day: + result[day.color.lower()] -= 1 + DatabaseTempo().set_config("days", result) + logging.info(" => OK") + return result + + def fetch_day(self): + """Fetches tempo days data from the API and updates the database. + + Args: + None + + Returns: + A dictionary containing the tempo days data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + target = f"{self.url}/edf/tempo/days" + query_response = Query(endpoint=target).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + DatabaseTempo().set_config("days", response_json) + response = {"error": False, "description": "", "items": response_json} + logging.info(" => Toutes les valeurs sont mises à jour.") + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération de jours Tempo.", + } + return response + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } + + def fetch_price(self): + """Fetches tempo price data from the API and updates the database. + + Args: + None + + Returns: + A dictionary containing the tempo price data. + + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + target = f"{self.url}/edf/tempo/price" + query_response = Query(endpoint=target).get() + if query_response.status_code == CODE_200_SUCCESS: + try: + response_json = json.loads(query_response.text) + DatabaseTempo().set_config("price", response_json) + response = {"error": False, "description": "", "items": response_json} + logging.info(" => Toutes les valeurs sont misent à jours.") + except Exception as e: + logging.error(e) + traceback.print_exc() + response = { + "error": True, + "description": "Erreur lors de la récupération de jours Tempo.", + } + return response + return { + "error": True, + "description": json.loads(query_response.text)["detail"], + } diff --git a/src/init.py b/src/init.py deleted file mode 100644 index ffea92eb..00000000 --- a/src/init.py +++ /dev/null @@ -1,133 +0,0 @@ -"""Initialisation of the application.""" - -import locale -import logging -import sys -import time -import typing as t -from os import environ, getenv -from pathlib import Path - -import yaml - -from config import LOG_FORMAT, LOG_FORMAT_DATE, cycle_minimun -from dependencies import APPLICATION_PATH_DATA, APPLICATION_PATH_LOG, str2bool -from models.config import Config -from models.database import Database -from models.influxdb import InfluxDB -from models.mqtt import Mqtt - -# LOGGING CONFIGURATION -config = {} -CONFIG_PATH = Path(APPLICATION_PATH_DATA) / "config.yaml" -if Path(CONFIG_PATH).exists(): - with Path(CONFIG_PATH).open() as file: - config = yaml.safe_load(file) - -if "DEBUG" in environ and str2bool(getenv("DEBUG")): - logging_level = logging.DEBUG -else: - logging_level = logging.INFO - -if config.get("log2file"): - logging.basicConfig( - filename=f"{APPLICATION_PATH_LOG}/myelectricaldata.log", - format=LOG_FORMAT, - datefmt=LOG_FORMAT_DATE, - level=logging_level, - ) - console = logging.StreamHandler() - console.setLevel(logging_level) - formatter = logging.Formatter(LOG_FORMAT, datefmt=LOG_FORMAT_DATE) - console.setFormatter(formatter) - logging.getLogger("").addHandler(console) -else: - logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_FORMAT_DATE, level=logging_level) - -if not Path(CONFIG_PATH).exists(): - logging.critical(f"Config file is not found ({CONFIG_PATH})") - sys.exit() - - -class EndpointFilter(logging.Filter): - """Filter class for filtering log records based on the path.""" - - def __init__( - self, - path: str, - *args: t.Any, - **kwargs: t.Any, - ): - super().__init__(*args, **kwargs) - self._path = path - - def filter(self, record: logging.LogRecord) -> bool: - """Filter log records based on the path.""" - return record.getMessage().find(self._path) == -1 - - -uvicorn_logger = logging.getLogger("uvicorn.access") -uvicorn_logger.addFilter(EndpointFilter(path="/import_status")) - -locale.setlocale(locale.LC_ALL, "fr_FR.UTF-8") - -MINIMUN_CYCLE = cycle_minimun - -CONFIG = Config(path=APPLICATION_PATH_DATA) -CONFIG.load() -CONFIG.display() -CONFIG.check() - -DB = Database(CONFIG) -DB.init_database() -DB.unlock() - -CONFIG.set_db(DB) - -INFLUXB_ENABLE = False -INFLUXDB = None -INFLUXDB_CONFIG = CONFIG.influxdb_config() -if INFLUXDB_CONFIG and "enable" in INFLUXDB_CONFIG and str2bool(INFLUXDB_CONFIG["enable"]): - INFLUXB_ENABLE = True - if "method" in INFLUXDB_CONFIG: - method = INFLUXDB_CONFIG["method"] - else: - method = "SYNCHRONOUS" - - if "scheme" not in INFLUXDB_CONFIG: - INFLUXDB_CONFIG["scheme"] = "http" - - write_options = [] - if "batching_options" in INFLUXDB_CONFIG: - write_options = INFLUXDB_CONFIG["batching_options"] - INFLUXDB = InfluxDB( - scheme=INFLUXDB_CONFIG["scheme"], - hostname=INFLUXDB_CONFIG["hostname"], - port=INFLUXDB_CONFIG["port"], - token=INFLUXDB_CONFIG["token"], - org=INFLUXDB_CONFIG["org"], - bucket=INFLUXDB_CONFIG["bucket"], - method=method, - write_options=write_options, - ) - if CONFIG.get("wipe_influxdb"): - INFLUXDB.purge_influxdb() - CONFIG.set("wipe_influxdb", False) - time.sleep(1) - -MQTT_ENABLE = False -MQTT = None -MQTT_CONFIG = CONFIG.mqtt_config() -if MQTT_CONFIG and "enable" in MQTT_CONFIG and str2bool(MQTT_CONFIG["enable"]): - MQTT_ENABLE = True - MQTT = Mqtt( - hostname=MQTT_CONFIG["hostname"], - port=MQTT_CONFIG["port"], - username=MQTT_CONFIG["username"], - password=MQTT_CONFIG["password"], - client_id=MQTT_CONFIG["client_id"], - prefix=MQTT_CONFIG["prefix"], - retain=MQTT_CONFIG["retain"], - qos=MQTT_CONFIG["qos"], - ca_cert=MQTT_CONFIG.get("ca_cert"), - ) diff --git a/src/main.py b/src/main.py index d8e2e441..417be499 100755 --- a/src/main.py +++ b/src/main.py @@ -1,143 +1,115 @@ -import logging -from os import environ, getenv +"""Main module of the application.""" + +from contextlib import asynccontextmanager +from os import listdir +from pathlib import Path import uvicorn from fastapi import APIRouter, FastAPI -from fastapi.openapi.utils import get_openapi from fastapi.staticfiles import StaticFiles from fastapi_utils.tasks import repeat_every +from uvicorn.config import LOGGING_CONFIG -from config import LOG_FORMAT, LOG_FORMAT_DATE, cycle_minimun -from dependencies import APPLICATION_PATH, get_version, logo, str2bool, title, title_warning -from init import CONFIG, DB +from config.main import APP_CONFIG from models.jobs import Job from routers import account, action, data, html, info +from utils import get_version -if "DEV" in environ or "DEBUG" in environ: - title_warning("Run in Development mode") -else: - title("Run in production mode") - -title("Chargement du config.yaml...") -usage_point_list = [] -if CONFIG.list_usage_point() is not None: - for upi, upi_data in CONFIG.list_usage_point().items(): - logging.info(f"{upi}") - DB.set_usage_point(upi, upi_data) - usage_point_list.append(upi) - logging.info(" => Success") -else: - logging.warning("Aucun point de livraison détecté.") - -title("Nettoyage de la base de données...") -DB.clean_database(usage_point_list) - -swagger_configuration = { - "operationsSorter": "method", - # "defaultModelRendering": "model", - "tagsSorter": "alpha", - # "docExpansion": "none", - "deepLinking": True, -} -APP = FastAPI(title="MyElectricalData", swagger_ui_parameters=swagger_configuration) -APP.mount("/static", StaticFiles(directory=f"{APPLICATION_PATH}/static"), name="static") -ROUTER = APIRouter() -APP.include_router(info.ROUTER) -APP.include_router(html.ROUTER) -APP.include_router(data.ROUTER) -APP.include_router(action.ROUTER) -APP.include_router(account.ROUTER) +####################################################################################################################### +# JOBS +@repeat_every(seconds=APP_CONFIG.server.cycle, wait_first=False) +def job_boot(): + """Bootstap jobs.""" + Job().boot() + + +@repeat_every(seconds=3600, wait_first=True) +def job_home_assistant(): + """Home Assistant Ecowatt.""" + Job().export_home_assistant(target="ecowatt") + + +@repeat_every(seconds=600, wait_first=False) +def job_gateway_status(): + """Gateway status check.""" + Job().get_gateway_status() + + +@asynccontextmanager +async def bootstrap(app: FastAPI): # pylint: disable=unused-argument + """Bootstap jobs.""" + await job_boot() + await job_home_assistant() + await job_gateway_status() + yield + -INFO = { - "title": "MyElectricalData", - "version": get_version(), - "description": "", - "contact": { +APP = FastAPI( + title="MyElectricalData", + version=get_version(), + description="MyElectricalData", + contact={ "name": "m4dm4rtig4n", "url": "https://github.com/MyElectricalData/myelectricaldata_import/issues", }, - "license_info": { + license_info={ "name": "Apache 2.0", "url": "https://www.apache.org/licenses/LICENSE-2.0.html", }, - "routes": APP.routes, - "servers": [], -} - -OPENAPI_SCHEMA = get_openapi( - title=INFO["title"], - version=INFO["version"], - description=INFO["description"], - contact=INFO["contact"], - license_info=INFO["license_info"], - routes=INFO["routes"], - servers=INFO["servers"], + swagger_configuration={ + "operationsSorter": "method", + "tagsSorter": "alpha", + "deepLinking": True, + }, + lifespan=bootstrap, ) -OPENAPI_SCHEMA["info"]["x-logo"] = { - "url": "https://pbs.twimg.com/profile_images/1415338422143754242/axomHXR0_400x400.png" -} - -APP.openapi_schema = OPENAPI_SCHEMA - -CYCLE = CONFIG.get("cycle") -if not CYCLE: - CYCLE = 14400 -else: - if CYCLE < cycle_minimun: - logging.warning("Le cycle minimun est de 3600s") - CYCLE = cycle_minimun - CONFIG.set("cycle", cycle_minimun) - -@APP.on_event("startup") -@repeat_every(seconds=CYCLE, wait_first=False) -def import_job(): - Job().boot() - - -@APP.on_event("startup") -@repeat_every(seconds=3600, wait_first=True) -def home_assistant_export(): - Job().export_home_assistant(target="ecowatt") +####################################################################################################################### +# Static files +STATIC_FOLDER = f"{APP_CONFIG.application_path}/static" +if Path(STATIC_FOLDER).is_dir() and listdir(STATIC_FOLDER): + APP.mount("/static", StaticFiles(directory=STATIC_FOLDER), name="static") -@APP.on_event("startup") -@repeat_every(seconds=600, wait_first=False) -def gateway_status(): - Job().get_gateway_status() +####################################################################################################################### +# ROUTER +ROUTER = APIRouter() +APP.include_router(info.ROUTER) +APP.include_router(html.ROUTER) +APP.include_router(data.ROUTER) +APP.include_router(action.ROUTER) +APP.include_router(account.ROUTER) +####################################################################################################################### +# FastAPI opentelemetry configuration +APP_CONFIG.tracing_fastapi(APP) +####################################################################################################################### +# BOOTSTRAP if __name__ == "__main__": - # from pypdf import PdfReader - # import requests - # url = "https://particulier.edf.fr/content/dam/2-Actifs/Documents/Offres/Grille_prix_Tarif_Bleu.pdf" - # file = "/tmp/Grille_prix_Tarif_Bleu.pdf" - # r = requests.get(url, allow_redirects=True, verify=False) - # reader = PdfReader(file) - # text = reader.pages[0].extract_text() + "\n" - # for line in text.splitlines(): - # if line.startswith("6 "): - # print(line) - # exit() - - logo(get_version()) - log_config = uvicorn.config.LOGGING_CONFIG - log_config["formatters"]["access"]["fmt"] = LOG_FORMAT - log_config["formatters"]["access"]["datefmt"] = LOG_FORMAT_DATE - log_config["formatters"]["default"]["fmt"] = LOG_FORMAT - log_config["formatters"]["default"]["datefmt"] = LOG_FORMAT_DATE + log_config = LOGGING_CONFIG + log_config["formatters"]["access"]["fmt"] = APP_CONFIG.logging.log_format + log_config["formatters"]["access"]["datefmt"] = APP_CONFIG.logging.log_format_date + log_config["formatters"]["default"]["fmt"] = APP_CONFIG.logging.log_format + log_config["formatters"]["default"]["datefmt"] = APP_CONFIG.logging.log_format_date uvicorn_params = { - "host": "0.0.0.0", - "port": CONFIG.port(), + "reload": False, "log_config": log_config, + "host": APP_CONFIG.server.cidr, + "port": APP_CONFIG.server.port, + "log_level": "error", + "reload_dirs": None, + "reload_includes": None, + "reload_excludes": None, } - if ("DEV" in environ and str2bool(getenv("DEV"))) or ("DEBUG" in environ and str2bool(getenv("DEBUG"))): + if APP_CONFIG.logging.log_http: + uvicorn_params["log_level"] = "info" + if APP_CONFIG.dev: uvicorn_params["reload"] = True - uvicorn_params["reload_dirs"] = [APPLICATION_PATH] - - ssl_config = CONFIG.ssl_config() - if ssl_config: - uvicorn_params = {**uvicorn_params, **ssl_config} + uvicorn_params["reload_dirs"] = [APP_CONFIG.application_path] + uvicorn_params["reload_includes"] = [APP_CONFIG.application_path] + uvicorn_params["reload_excludes"] = [".venv", ".git/*", ".idea/*", ".vscode/*", ".py[cod]"] + uvicorn_params = {**uvicorn_params, **APP_CONFIG.ssl_config.__dict__} uvicorn.run("main:APP", **uvicorn_params) diff --git a/src/models/__init__.py b/src/models/__init__.py index e69de29b..a7413b77 100644 --- a/src/models/__init__.py +++ b/src/models/__init__.py @@ -0,0 +1 @@ +"""This module contains the models for the MyElectricalData application.""" \ No newline at end of file diff --git a/src/models/ajax.py b/src/models/ajax.py index 9600f9b8..82c1e236 100755 --- a/src/models/ajax.py +++ b/src/models/ajax.py @@ -1,33 +1,73 @@ -import logging +"""This module represents an Ajax object.""" +import inspect from datetime import datetime import pytz - -from dependencies import APPLICATION_PATH, get_version, title -from init import CONFIG, DB +from fastapi import Request + +from config.main import APP_CONFIG +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.max_power import DatabaseMaxPower +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from external_services.myelectricaldata.cache import Cache +from external_services.myelectricaldata.daily import Daily +from external_services.myelectricaldata.detail import Detail +from external_services.myelectricaldata.ecowatt import Ecowatt +from external_services.myelectricaldata.power import Power +from external_services.myelectricaldata.status import Status +from external_services.myelectricaldata.tempo import Tempo from models.jobs import Job -from models.query_cache import Cache -from models.query_daily import Daily -from models.query_detail import Detail -from models.query_ecowatt import Ecowatt -from models.query_power import Power -from models.query_status import Status -from models.query_tempo import Tempo from models.stat import Stat +from utils import check_format, get_version, title utc = pytz.UTC +class UsagePoint: + """Usage point configurateur config.""" + + name: str + enable: str + token: str + cache: str + plan: str + refresh_addresse: str + refresh_contract: str + consumption: str + consumption_max_power: str + consumption_max_date: str + consumption_detail: str + consumption_detail_max_date: str + consumption_price_hc: str + consumption_price_hp: str + consumption_price_base: str + offpeak_hours_0: str + offpeak_hours_1: str + offpeak_hours_2: str + offpeak_hours_3: str + offpeak_hours_4: str + offpeak_hours_5: str + offpeak_hours_6: str + production: str + production_max_date: str + production_detail: str + production_detail_max_date: str + production_price: str + + class Ajax: + """This class represents an Ajax object.""" + def __init__(self, usage_point_id=None): - self.config = CONFIG - self.db = DB - self.application_path = APPLICATION_PATH + """Initialize Ajax.""" self.usage_point_id = usage_point_id self.date_format = "%Y-%m-%d" self.date_format_detail = "%Y-%m-%d %H:%M:%S" if self.usage_point_id is not None: - self.usage_point_config = self.db.get_usage_point(self.usage_point_id) + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() if hasattr(self.usage_point_config, "token"): self.headers = { "Content-Type": "application/json", @@ -44,227 +84,275 @@ def __init__(self, usage_point_id=None): self.usage_points_id_list = "" def gateway_status(self): - if self.usage_point_id is not None: - msg = f"[{self.usage_point_id}] Check de l'état de la passerelle." - else: - msg = "Check de l'état de la passerelle." - title(msg) - return Status().ping() + """Check the status of the gateway.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if self.usage_point_id is not None: + msg = f"[{self.usage_point_id}] Check de l'état de la passerelle." + else: + msg = "Check de l'état de la passerelle." + title(msg) + return Status().ping() def account_status(self): - title(f"[{self.usage_point_id}] Check du statut du compte.") - data = Status(headers=self.headers).status(self.usage_point_id) - if isinstance(self.usage_point_config.last_call, datetime): - data["last_call"] = self.usage_point_config.last_call.strftime("%Y-%m-%d %H:%M") - else: - data["last_call"] = self.usage_point_config.last_call - return data + """Check the status of the account.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Check du statut du compte.") + data = Status(headers=self.headers).status(self.usage_point_id) + if isinstance(self.usage_point_config.last_call, datetime): + data["last_call"] = self.usage_point_config.last_call.strftime("%H:%M") + else: + data["last_call"] = self.usage_point_config.last_call + return data def fetch_tempo(self): - title(f"Récupération des jours Tempo.") - return Tempo().fetch() + """Fetch tempo day.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title("Récupération des jours Tempo.") + return Tempo().fetch() def get_tempo(self): - title(f"Affichage des jours Tempo.") - return Tempo().get() + """Fetch tempo day number.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title("Affichage des jours Tempo.") + return Tempo().get() def fetch_ecowatt(self): - title(f"Récupération des jours Ecowatt.") - return Ecowatt().fetch() + """Fetch the days of Ecowatt.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title("Récupération des jours Ecowatt.") + return Ecowatt().fetch() def get_ecowatt(self): - title(f"Affichage des jours Ecowatt.") - return Ecowatt().get() + """Get the days of Ecowatt.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title("Affichage des jours Ecowatt.") + return Ecowatt().get() def generate_price(self): - title(f"[{self.usage_point_id}] Calcul des coûts par type d'abonnements.") - return Stat(self.usage_point_id, "consumption").generate_price() + """Generate the costs by subscription type.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Calcul des coûts par type d'abonnements.") + return Stat(self.usage_point_id, "consumption").generate_price() def get_price(self): - title(f"[{self.usage_point_id}] Retourne le résultat du comparateur d'abonnements.") - return Stat(self.usage_point_id, "consumption").get_price() + """Get the result of the subscription comparator.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Retourne le résultat du comparateur d'abonnements.") + return Stat(self.usage_point_id, "consumption").get_price() def reset_all_data(self): - title(f"[{self.usage_point_id}] Reset de la consommation journalière.") - Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).reset() - title(f"[{self.usage_point_id}] Reset de la puissance maximum journalière.") - Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).reset() - title(f"[{self.usage_point_id}] Reset de la consommation détaillée.") - Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).reset() - title(f"[{self.usage_point_id}] Reset de la production journalière.") - Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).reset() - title(f"[{self.usage_point_id}] Reset de la production détaillée.") - Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).reset() - return { - "error": "false", - "notif": "Toutes les données ont été supprimées.", - } + """Reset all the data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Reset de la consommation journalière.") + Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).reset() + title(f"[{self.usage_point_id}] Reset de la puissance maximum journalière.") + Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).reset() + title(f"[{self.usage_point_id}] Reset de la consommation détaillée.") + Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).reset() + title(f"[{self.usage_point_id}] Reset de la production journalière.") + Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).reset() + title(f"[{self.usage_point_id}] Reset de la production détaillée.") + Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).reset() + return { + "error": "false", + "notif": "Toutes les données ont été supprimées.", + } def delete_all_data(self): - title(f"[{self.usage_point_id}] Suppression de la consommation journalière.") - Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).delete() - title(f"[{self.usage_point_id}] Suppression de la puissance maximum journalière.") - Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).delete() - title(f"[{self.usage_point_id}] Suppression de la consommation détaillée.") - Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).delete() - title(f"[{self.usage_point_id}] Suppression de la production journalière.") - Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).delete() - title(f"[{self.usage_point_id}] Suppression de la production détaillée.") - Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).delete() - title(f"[{self.usage_point_id}] Suppression des statistiques.") - Stat(usage_point_id=self.usage_point_id).delete() - return { - "error": "false", - "notif": "Toutes les données ont été supprimées.", - } - - def reset_gateway(self): - title(f"[{self.usage_point_id}] Reset du cache de la passerelle.") - return Cache(headers=self.headers, usage_point_id=self.usage_point_id).reset() - - def reset_data(self, target, date): - result = {} - if target == "consumption": - title(f"[{self.usage_point_id}] Reset de la consommation journalière du {date}:") - result["consumption"] = Daily(headers=self.headers, usage_point_id=self.usage_point_id).reset(date) - elif target == "consumption_detail": - # date = date.replace("---", " ") - # date = date.replace("--", ":") - title(f"[{self.usage_point_id}] Reset de la consommation détaillée du {date}:") - result["consumption_detail"] = Detail( - headers=self.headers, usage_point_id=self.usage_point_id - ).reset_daily(date) - elif target == "consumption_max_power": - title(f"[{self.usage_point_id}] Reset de la puissance maximum du {date}:") - result["consumption_max_power"] = Power(headers=self.headers, usage_point_id=self.usage_point_id).reset( - date - ) - elif target == "production": - title(f"[{self.usage_point_id}] Reset de la production journalière du {date}:") - result["production"] = Daily( + """Delete all the data.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Suppression de la consommation journalière.") + Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).delete() + title(f"[{self.usage_point_id}] Suppression de la puissance maximum journalière.") + Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).delete() + title(f"[{self.usage_point_id}] Suppression de la consommation détaillée.") + Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).delete() + title(f"[{self.usage_point_id}] Suppression de la production journalière.") + Daily( headers=self.headers, usage_point_id=self.usage_point_id, measure_type="production", - ).reset(date) - elif target == "production_detail": - # date = date.replace("---", " ") - # date = date.replace("--", ":") - title(f"[{self.usage_point_id}] Reset de la production détaillée du {date}:") - result["production_detail"] = Detail( + ).delete() + title(f"[{self.usage_point_id}] Suppression de la production détaillée.") + Detail( headers=self.headers, usage_point_id=self.usage_point_id, measure_type="production", - ).reset_daily(date) - else: - return {"error": "true", "notif": "Target inconnue.", "result": ""} - if result[target]: + ).delete() + title(f"[{self.usage_point_id}] Suppression des statistiques.") + Stat(usage_point_id=self.usage_point_id).delete() return { "error": "false", - "notif": f'Reset de la "{target}" du {date}', - "result": result[target], + "notif": "Toutes les données ont été supprimées.", } - else: + + def reset_gateway(self): + """Reset the gateway cache.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Reset du cache de la passerelle.") + return Cache(headers=self.headers, usage_point_id=self.usage_point_id).reset() + + def reset_data(self, target, date): + """Reset the specified data for the given target and date. + + Args: + target (str): The target to reset. + date (str): The date to reset. + + Returns: + dict: The result of the reset. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = {} + if target == "consumption": + title(f"[{self.usage_point_id}] Reset de la consommation journalière du {date}:") + result["consumption"] = Daily(headers=self.headers, usage_point_id=self.usage_point_id).reset(date) + elif target == "consumption_detail": + title(f"[{self.usage_point_id}] Reset de la consommation détaillée du {date}:") + result["consumption_detail"] = Detail( + headers=self.headers, usage_point_id=self.usage_point_id + ).reset_daily(date) + elif target == "consumption_max_power": + title(f"[{self.usage_point_id}] Reset de la puissance maximum du {date}:") + result["consumption_max_power"] = Power( + headers=self.headers, usage_point_id=self.usage_point_id + ).reset(date) + elif target == "production": + title(f"[{self.usage_point_id}] Reset de la production journalière du {date}:") + result["production"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).reset(date) + elif target == "production_detail": + title(f"[{self.usage_point_id}] Reset de la production détaillée du {date}:") + result["production_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).reset_daily(date) + else: + return {"error": "true", "notif": "Target inconnue.", "result": ""} + if result[target]: + return { + "error": "false", + "notif": f'Reset de la "{target}" du {date}', + "result": result[target], + } return { "error": "true", "notif": "Erreur lors du traitement.", "result": result[target], } - def fetch(self, target, date): - result = {} - if target == "consumption": - if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: + def fetch(self, target, date): # noqa: C901 + """Fetch the specified data for the given target and date. + + Args: + target (str): The target to fetch. + date (str): The date to fetch. + + Returns: + dict: The fetched data. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = {} + if ( + target == "consumption" + and hasattr(self.usage_point_config, "consumption") + and self.usage_point_config.consumption + ): title(f"[{self.usage_point_id}] Importation de la consommation journalière du {date}:") result["consumption"] = Daily( headers=self.headers, usage_point_id=self.usage_point_id, ).fetch(date) - elif target == "consumption_max_power": - if hasattr(self.usage_point_config, "consumption_max_power") and self.usage_point_config.consumption: + elif ( + target == "consumption_max_power" + and hasattr(self.usage_point_config, "consumption_max_power") + and self.usage_point_config.consumption_max_power + ): title(f"[{self.usage_point_id}] Importation de la puissance maximum journalière du {date}:") result["consumption_max_power"] = Power( headers=self.headers, usage_point_id=self.usage_point_id, ).fetch(date) - elif target == "consumption_detail": - # date = date.replace("---", " ") - # date = date.replace("--", ":") - if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail: + elif ( + target == "consumption_detail" + and hasattr(self.usage_point_config, "consumption_detail") + and self.usage_point_config.consumption_detail + ): title(f"[{self.usage_point_id}] Importation de la consommation détaillée du {date}:") result["consumption_detail"] = Detail( headers=self.headers, usage_point_id=self.usage_point_id, ).fetch(date) - elif target == "production": - if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: + elif ( + target == "production" + and hasattr(self.usage_point_config, "production") + and self.usage_point_config.production + ): title(f"[{self.usage_point_id}] Importation de la production journalière du {date}:") result["production"] = Daily( headers=self.headers, usage_point_id=self.usage_point_id, measure_type="production", ).fetch(date) - elif target == "production_detail": - # date = date.replace("---", " ") - # date = date.replace("--", ":") - if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: + elif ( + target == "production_detail" + and hasattr(self.usage_point_config, "production_detail") + and self.usage_point_config.production_detail + ): title(f"[{self.usage_point_id}] Importation de la production détaillée du {date}:") result["production_detail"] = Detail( headers=self.headers, usage_point_id=self.usage_point_id, measure_type="production", ).fetch(date) - else: - return {"error": "true", "notif": "Target inconnue.", "result": ""} - if "error" in result[target] and result[target]["error"]: - data = { - "error": "true", - "notif": result[target]["notif"], - "result": { - "value": 0, - "date": date, - "hc": "-", - "hp": "-", - "fail_count": result[target]["fail_count"], - }, - } - if "event_date" in result[target]: - data["result"]["event_date"] = result[target]["event_date"] - return data - else: + else: + return {"error": "true", "notif": "Target inconnue.", "result": ""} + + if "error" in result[target] and result[target]["error"]: + data = { + "error": "true", + "notif": result[target]["notif"], + "result": { + "value": 0, + "date": date, + "hc": "-", + "hp": "-", + "fail_count": result[target]["fail_count"], + }, + } + if "event_date" in result[target]: + data["result"]["event_date"] = result[target]["event_date"] + return data if target in result and "value" in result[target]: data = { "error": "false", @@ -289,114 +377,138 @@ def fetch(self, target, date): } return data - def blacklist(self, target, date): - result = {} - if target == "consumption": - if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: - title(f"[{self.usage_point_id}] Blacklist de la consommation journalière du {date}:") - result["consumption"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 1) - elif target == "consumption_max_power": - if ( - hasattr(self.usage_point_config, "consumption_max_power") - and self.usage_point_config.consumption_max_power - ): - title(f"[{self.usage_point_id}] Blacklist de la puissance maximum du {date}:") - result["consumption_max_power"] = Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 1) - elif target == "consumption_detail": - if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail: - title(f"[{self.usage_point_id}] Blacklist de la consommation détaillée du {date}:") - result["consumption_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 1) - elif target == "production": - if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: - title(f"[{self.usage_point_id}] Blacklist de la production journalière du {date}:") - result["production"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).blacklist(date, 1) - elif target == "production_detail": - if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: - title(f"[{self.usage_point_id}] Blacklist de la production détaillée du {date}:") - result["production_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).blacklist(date, 1) - else: - return {"error": "true", "notif": "Target inconnue.", "result": ""} - if not result[target]: - return { - "error": "true", - "notif": "Erreur lors du traitement.", - "result": result[target], - } - else: + def blacklist(self, target, date): # noqa: C901 + """Blacklist the specified target for the given date. + + Args: + target (str): The target to blacklist. + date (str): The date to blacklist. + + Returns: + dict: A dictionary containing the result of the blacklist operation. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = {} + if target == "consumption": + if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: + title(f"[{self.usage_point_id}] Blacklist de la consommation journalière du {date}:") + result["consumption"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 1) + elif target == "consumption_max_power": + if ( + hasattr(self.usage_point_config, "consumption_max_power") + and self.usage_point_config.consumption_max_power + ): + title(f"[{self.usage_point_id}] Blacklist de la puissance maximum du {date}:") + result["consumption_max_power"] = Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 1) + elif target == "consumption_detail": + if ( + hasattr(self.usage_point_config, "consumption_detail") + and self.usage_point_config.consumption_detail + ): + title(f"[{self.usage_point_id}] Blacklist de la consommation détaillée du {date}:") + result["consumption_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 1) + elif target == "production": + if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: + title(f"[{self.usage_point_id}] Blacklist de la production journalière du {date}:") + result["production"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).blacklist(date, 1) + elif target == "production_detail": + if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: + title(f"[{self.usage_point_id}] Blacklist de la production détaillée du {date}:") + result["production_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).blacklist(date, 1) + else: + return {"error": "true", "notif": "Target inconnue.", "result": ""} + if not result[target]: + return { + "error": "true", + "notif": "Erreur lors du traitement.", + "result": result[target], + } return { "error": "false", "notif": f"Blacklist de la {target} journalière du {date}", "result": result[target], } - def whitelist(self, target, date): - result = {} - if target == "consumption": - if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: - title(f"[{self.usage_point_id}] Whitelist de la consommation journalière du {date}:") - result["consumption"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 0) - elif target == "consumption_max_power": - if ( - hasattr(self.usage_point_config, "consumption_max_power") - and self.usage_point_config.consumption_max_power - ): - title(f"[{self.usage_point_id}] Whitelist de la puissance maximale journalière du {date}:") - result["consumption_max_power"] = Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 0) - elif target == "consumption_detail": - if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail: - title(f"[{self.usage_point_id}] Whitelist de la consommation détaillée du {date}:") - result["consumption_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).blacklist(date, 0) - elif target == "production": - if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: - title(f"[{self.usage_point_id}] Whitelist de la production journalière du {date}:") - result["production"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).blacklist(date, 0) - elif target == "production_detail": - if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: - title(f"[{self.usage_point_id}] Whitelist de la production détaillée du {date}:") - result["production_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).blacklist(date, 0) - else: - return {"error": "true", "notif": "Target inconnue.", "result": ""} - if not result[target]: - return { - "error": "true", - "notif": "Erreur lors du traitement.", - "result": result[target], - } - else: + def whitelist(self, target, date): # noqa: C901 + """Whitelist the specified target for the given date. + + Args: + target (str): The target to whitelist. + date (str): The date to whitelist. + + Returns: + dict: A dictionary containing the result of the whitelist operation. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = {} + if target == "consumption": + if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: + title(f"[{self.usage_point_id}] Whitelist de la consommation journalière du {date}:") + result["consumption"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 0) + elif target == "consumption_max_power": + if ( + hasattr(self.usage_point_config, "consumption_max_power") + and self.usage_point_config.consumption_max_power + ): + title(f"[{self.usage_point_id}] Whitelist de la puissance maximale journalière du {date}:") + result["consumption_max_power"] = Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 0) + elif target == "consumption_detail": + if ( + hasattr(self.usage_point_config, "consumption_detail") + and self.usage_point_config.consumption_detail + ): + title(f"[{self.usage_point_id}] Whitelist de la consommation détaillée du {date}:") + result["consumption_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).blacklist(date, 0) + elif target == "production": + if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: + title(f"[{self.usage_point_id}] Whitelist de la production journalière du {date}:") + result["production"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).blacklist(date, 0) + elif target == "production_detail": + if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: + title(f"[{self.usage_point_id}] Whitelist de la production détaillée du {date}:") + result["production_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).blacklist(date, 0) + else: + return {"error": "true", "notif": "Target inconnue.", "result": ""} + if not result[target]: + return { + "error": "true", + "notif": "Erreur lors du traitement.", + "result": result[target], + } return { "error": "false", "notif": f"Whitelist de la {target} journalière du {date}", @@ -404,387 +516,458 @@ def whitelist(self, target, date): } def import_data(self, target=None): - result = Job(self.usage_point_id).job_import_data(wait=False, target=target) - if not result: - return { - "error": "true", - "notif": "Erreur lors du traitement.", - "result": result, - } - else: - return { - "error": "false", - "notif": "Récupération de la consommation/production.", - "result": result, - } + """Import data for the specified target. + + Args: + target (str, optional): The target to import data for. Defaults to None. + + Returns: + dict: A dictionary containing the result of the import data operation. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + result = Job(self.usage_point_id).job_import_data(wait=False, target=target) + if not result: + return { + "error": "true", + "notif": "Erreur lors du traitement.", + "result": result, + } + else: + return { + "error": "false", + "notif": "Récupération de la consommation/production.", + "result": result, + } def new_account(self, configs): - self.usage_point_id = configs["usage_point_id"] - title(f"[{self.usage_point_id}] Ajout d'un nouveau point de livraison:") - output = {} - for key, value in configs.items(): - if key != "usage_point_id": - if value is None or value == "None": - value = "" - logging.info(f"{str(key)} => {str(value)}") - output[key] = value - self.config.set_usage_point_config(self.usage_point_id, key, value) - self.db.set_usage_point(self.usage_point_id, output) - return output + """Add a new account. + + Args: + configs (dict): A dictionary containing the configuration for the new account. + + Returns: + dict: A dictionary containing the output of the new account operation. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + output = UsagePoint() + self.usage_point_id = configs["usage_point_id"] + title(f"[{self.usage_point_id}] Ajout d'un nouveau point de livraison:") + if not hasattr(APP_CONFIG.myelectricaldata.usage_point_config, self.usage_point_id): + APP_CONFIG.myelectricaldata.new(self.usage_point_id) + print(APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id]) + for key, value in configs.items(): + if key != "usage_point_id": + setattr( + APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id], key, check_format(value) + ) + return output def configuration(self, configs): - title(f"[{self.usage_point_id}] Changement de configuration:") - output = {} - for key, value in configs.items(): - if value is None or value == "None": - value = "" - logging.info(f"{str(key)} => {str(value)}") - output[key] = value - self.config.set_usage_point_config(self.usage_point_id, key, value) - self.db.set_usage_point(self.usage_point_id, output) - return output - - def datatable(self, measurement_direction, args): - recordsTotal = 0 - args = args._query_params - draw = int(args.get("draw")) - length = int(args.get("length")) - search = args.get("search[value]") - start_index = int(args.get("start")) - end_index = start_index + length - order_column = int(args.get("order[0][column]")) - order_dir = args.get("order[0][dir]") - all_data = [] - data = [] - if measurement_direction == "consumption": - recordsTotal = self.db.get_daily_count( - usage_point_id=self.usage_point_id, measurement_direction="consumption" - ) - col_spec = { - 0: "date", - 1: "value", - 2: "value", - 3: "value", - 4: "value", - 5: "fail_count", - 6: "cache", - 7: "import_clean", - 8: "blacklist", - } - all_data = self.db.get_daily_datatable( - usage_point_id=self.usage_point_id, - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - measurement_direction="consumption", - ) - data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) - - elif measurement_direction == "consumption_detail": - recordsTotal = self.db.get_detail_count( - usage_point_id=self.usage_point_id, measurement_direction="consumption" - ) - col_spec = { - 0: "date", - 1: "date", - 2: "value", - 3: "value", - 4: "fail_count", - 5: "cache", - 6: "import_clean", - 7: "blacklist", - } - all_data = self.db.get_detail_datatable( - usage_point_id=self.usage_point_id, - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - measurement_direction="consumption", - ) - data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) - - elif measurement_direction == "production": - recordsTotal = self.db.get_daily_count( - usage_point_id=self.usage_point_id, measurement_direction="production" - ) - col_spec = { - 0: "date", - 1: "value", - 2: "value", - 3: "fail_count", - 4: "cache", - 5: "import_clean", - 6: "blacklist", - } - all_data = self.db.get_daily_datatable( - usage_point_id=self.usage_point_id, - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - measurement_direction="production", - ) - data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) - elif measurement_direction == "production_detail": - recordsTotal = self.db.get_detail_count( - usage_point_id=self.usage_point_id, measurement_direction="production" - ) - col_spec = { - 0: "date", - 1: "date", - 2: "value", - 3: "value", - 4: "fail_count", - 5: "cache", - 6: "import_clean", - 7: "blacklist", - } - all_data = self.db.get_detail_datatable( - usage_point_id=self.usage_point_id, - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - measurement_direction="production", - ) - data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) - elif measurement_direction == "consumption_max_power": - recordsTotal = self.db.get_daily_max_power_count(usage_point_id=self.usage_point_id) - col_spec = { - 0: "date", - 1: "date", - 2: "value", - 3: "value", - 4: "value", - 5: "fail_count", - 6: "cache", - 7: "import_clean", - 8: "blacklist", - } - all_data = self.db.get_daily_max_power_datatable( - usage_point_id=self.usage_point_id, - order_column=col_spec[order_column], - order_dir=order_dir, - search=search, - ) - data = self.datatable_max_power(all_data, start_index, end_index) - result = { - "draw": draw + 1, - "recordsTotal": recordsTotal, - "recordsFiltered": len(all_data), - "data": data, - } - return result + """Change the configuration for the specified usage point. - def datatable_button(self, measurement_direction, db_data): - date_text = db_data.date.strftime(self.date_format) - value = db_data.value - blacklist = db_data.blacklist - fail_count = db_data.fail_count - - btn_import = "" - btn_reset = "" - btn_blacklist = "" - btn_whitelist = "" - btn_import_disable = "" - btn_blacklist_disable = "" - - if fail_count == 0 and value > 0: - btn_import = "display:none" - btn_whitelist = "display:none" - btn_blacklist_disable = "datatable_button_disable" - elif blacklist == 1: - btn_blacklist = "display:none" - btn_reset = "display:none" - btn_import_disable = "datatable_button_disable" - else: - btn_reset = "display:none" - btn_whitelist = "display:none" - - cache_html = f""" -
-
-
-
+ Args: + configs (dict): A dictionary containing the new configuration values. + + Returns: + dict: A dictionary containing the updated configuration values. """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + title(f"[{self.usage_point_id}] Changement de configuration:") + for key, value in configs.items(): + setattr(APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id], key, check_format(value)) - blacklist_html = f""" -
-
-
-
+ def datatable(self, measurement_direction, args: Request): + """Retrieve datatable for the specified measurement direction. + + Args: + measurement_direction (str): The measurement direction. + args (object): The arguments. + + Returns: + dict: A dictionary containing the datatable result. """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + records_total = 0 + args = args._query_params # noqa: SLF001 # pylint: disable=W0212 + draw = int(args.get("draw")) + length = int(args.get("length")) + search = args.get("search[value]") + start_index = int(args.get("start")) + end_index = start_index + length + order_column = int(args.get("order[0][column]")) + order_dir = args.get("order[0][dir]") + all_data = [] + data = [] + if measurement_direction == "consumption": + records_total = DatabaseDaily(self.usage_point_id, "consumption").get_count() + col_spec = { + 0: "date", + 1: "value", + 2: "value", + 3: "value", + 4: "value", + 5: "fail_count", + 6: "cache", + 7: "import_clean", + 8: "blacklist", + } + all_data = DatabaseDaily(self.usage_point_id, "consumption").get_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) + + elif measurement_direction == "consumption_detail": + records_total = DatabaseDetail(self.usage_point_id, "consumption").get_count() + col_spec = { + 0: "date", + 1: "date", + 2: "value", + 3: "value", + 4: "fail_count", + 5: "cache", + 6: "import_clean", + 7: "blacklist", + } + all_data = DatabaseDetail(self.usage_point_id, "consumption").get_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) + + elif measurement_direction == "production": + records_total = DatabaseDaily(self.usage_point_id, "production").get_count() + col_spec = { + 0: "date", + 1: "value", + 2: "value", + 3: "fail_count", + 4: "cache", + 5: "import_clean", + 6: "blacklist", + } + all_data = DatabaseDaily(self.usage_point_id, "consumption").get_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) + elif measurement_direction == "production_detail": + records_total = DatabaseDetail(self.usage_point_id, "production").get_count() + col_spec = { + 0: "date", + 1: "date", + 2: "value", + 3: "value", + 4: "fail_count", + 5: "cache", + 6: "import_clean", + 7: "blacklist", + } + all_data = DatabaseDetail(self.usage_point_id, "production").get_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) + elif measurement_direction == "consumption_max_power": + records_total = DatabaseMaxPower(self.usage_point_id).get_daily_count() + col_spec = { + 0: "date", + 1: "date", + 2: "value", + 3: "value", + 4: "value", + 5: "fail_count", + 6: "cache", + 7: "import_clean", + 8: "blacklist", + } + all_data = DatabaseMaxPower(self.usage_point_id).get_daily_datatable( + order_column=col_spec[order_column], + order_dir=order_dir, + search=search, + ) + data = self.datatable_max_power(all_data, start_index, end_index) + result = { + "draw": draw + 1, + "recordsTotal": records_total, + "recordsFiltered": len(all_data), + "data": data, + } + return result - btn = {"cache": cache_html, "blacklist": blacklist_html} - return btn + def datatable_button(self, measurement_direction, db_data): + """Generate HTML code for datatable buttons based on measurement direction and database data. + + Args: + measurement_direction (str): The measurement direction. + db_data (object): The database data. + + Returns: + dict: The generated HTML code for the buttons. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + date_text = db_data.date.strftime(self.date_format) + value = db_data.value + blacklist = db_data.blacklist + fail_count = db_data.fail_count + + btn_import = "" + btn_reset = "" + btn_blacklist = "" + btn_whitelist = "" + btn_import_disable = "" + btn_blacklist_disable = "" + + if fail_count == 0 and value > 0: + btn_import = "display:none" + btn_whitelist = "display:none" + btn_blacklist_disable = "datatable_button_disable" + elif blacklist == 1: + btn_blacklist = "display:none" + btn_reset = "display:none" + btn_import_disable = "datatable_button_disable" + else: + btn_reset = "display:none" + btn_whitelist = "display:none" + + cache_html = f""" +
+ +
+
+ +
+ """ + + blacklist_html = f""" +
+ +
+
+ +
+ """ + + btn = {"cache": cache_html, "blacklist": blacklist_html} + return btn def datatable_daily(self, all_data, start_index, end_index, measurement_direction): - index = 0 - result = [] - for db_data in all_data: - if start_index <= index <= end_index: - date_text = db_data.date.strftime(self.date_format) - target = "daily" - # VALUE - conso_w = f"""
{db_data.value}
""" - conso_kw = f"""
{db_data.value / 1000}
""" - fail_count = f"""
{db_data.fail_count}
""" - # CACHE STATE - if db_data.fail_count == 0: - cache_state = ( - f'
1
' + """Generate the HTML code for the daily datatable based on the provided data. + + Args: + all_data (list): The list of database data. + start_index (int): The start index of the datatable. + end_index (int): The end index of the datatable. + measurement_direction (str): The measurement direction. + + Returns: + list: The generated HTML code for the daily datatable. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + index = 0 + result = [] + for db_data in all_data: + if start_index <= index <= end_index: + date_text = db_data.date.strftime(self.date_format) + target = "daily" + # VALUE + conso_w = f"""
{db_data.value}
""" + conso_kw = ( + f"""
{db_data.value / 1000}
""" ) - else: - cache_state = ( - f'
0
' + fail_count = ( + f"""
{db_data.fail_count}
""" ) - tempo = self.db.get_tempo_range( - db_data.date.strftime(self.date_format), db_data.date.strftime(self.date_format) - ) - if tempo and tempo[0]: - if tempo[0].color == "RED": - temp_color = ( - f'
2
' + # CACHE STATE + if db_data.fail_count == 0: + cache_state = ( + f'
1
' ) - elif tempo[0].color == "WHITE": - temp_color = ( - f'
1
' + else: + cache_state = ( + f'
0
' ) + tempo = DatabaseTempo().get_range(db_data.date, db_data.date) + if tempo and tempo[0]: + if tempo[0].color == "RED": + temp_color = f""" +
2
""" + elif tempo[0].color == "WHITE": + temp_color = f""" +
1
""" + else: + temp_color = f""" +
0
""" + else: + temp_color = f'
-
' + hc = Stat(self.usage_point_id, "consumption").get_daily(db_data.date, "hc") + if hc == 0: + hc = "-" else: - temp_color = ( - f'
0
' + hc = hc / 1000 + hp = Stat(self.usage_point_id, "consumption").get_daily(db_data.date, "hp") + if hp == 0: + hp = "-" + else: + hp = hp / 1000 + hc_kw = f'
{hc}
' + hp_kw = f'
{hp}
' + if measurement_direction == "consumption": + day_data = [ + date_text, + conso_w, + conso_kw, + hc_kw, + hp_kw, + temp_color, + fail_count, + cache_state, + self.datatable_button(measurement_direction, db_data)["cache"], + self.datatable_button(measurement_direction, db_data)["blacklist"], + ] + else: + day_data = [ + date_text, + conso_w, + conso_kw, + fail_count, + cache_state, + self.datatable_button(measurement_direction, db_data)["cache"], + self.datatable_button(measurement_direction, db_data)["blacklist"], + ] + result.append(day_data) + index = index + 1 + return result + + def datatable_detail(self, all_data, start_index, end_index, measurement_direction): + """Generate the datatable for the detailed view of the electrical data. + + Args: + all_data (list): List of all data. + start_index (int): Start index of the data. + end_index (int): End index of the data. + measurement_direction (str): Measurement direction. + + Returns: + list: Resulting datatable. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + index = 0 + result = [] + for db_data in all_data: + if start_index <= index <= end_index: + date_text = db_data.date.strftime(self.date_format) + date_hour = db_data.date.strftime("%H:%M:%S") + target = "detail" + # VALUE + conso_w = f"""
{db_data.value}
""" + conso_kw = ( + f"""
{db_data.value / 1000}
""" + ) + fail_count = ( + f"""
{db_data.fail_count}
""" + ) + # CACHE STATE + if db_data.fail_count == 0: + cache_state = ( + f'
1
' + ) + else: + cache_state = ( + f'
0
' ) - else: - temp_color = f'
-
' - hc = Stat(self.usage_point_id, "consumption").get_daily(db_data.date, "hc") - if hc == 0: - hc = "-" - else: - hc = hc / 1000 - hp = Stat(self.usage_point_id, "consumption").get_daily(db_data.date, "hp") - if hp == 0: - hp = "-" - else: - hp = hp / 1000 - hc_kw = f'
{hc}
' - hp_kw = f'
{hp}
' - if measurement_direction == "consumption": day_data = [ date_text, + date_hour, conso_w, conso_kw, - hc_kw, - hp_kw, - temp_color, fail_count, cache_state, self.datatable_button(measurement_direction, db_data)["cache"], self.datatable_button(measurement_direction, db_data)["blacklist"], ] - else: + result.append(day_data) + index = index + 1 + return result + + def datatable_max_power(self, all_data, start_index, end_index): + """Generate the datatable for the maximum power data. + + Args: + all_data (list): List of all data. + start_index (int): Start index of the data. + end_index (int): End index of the data. + + Returns: + list: Resulting datatable. + """ + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + index = 0 + result = [] + measurement_direction = "consumption_max_power" + event_date = "" + target = "daily" + contract = DatabaseContracts(self.usage_point_id).get() + if hasattr(contract, "subscribed_power") and contract.subscribed_power is not None: + max_power = int(contract.subscribed_power.split(" ")[0]) * 1000 + else: + max_power = 999000 + for db_data in all_data: + if start_index <= index <= end_index: + date_text = db_data.date.strftime(self.date_format) + ampere = f"{round(int(db_data.value) / 230, 2)}" + if isinstance(db_data.event_date, datetime): + event_date = db_data.event_date.strftime("%H:%M:%S") + # VALUE + if max_power <= int(db_data.value): + style = 'style="color:#FF0000; font-weight:bolder"' + elif (max_power * 90 / 100) <= db_data.value: + style = 'style="color:#FFB600; font-weight:bolder"' + else: + style = "" + data_text_event_date = f"""
{event_date}
""" + conso_w = ( + f"""
{db_data.value}
""" + ) + conso_kw = f"""
{db_data.value / 1000}
""" + conso_a = f"""
{ampere}
""" + fail_count = f"""
{db_data.fail_count}
""" + + # CACHE STATE + if db_data.fail_count == 0: + cache_state = ( + f'
1
' + ) + else: + cache_state = ( + f'
0
' + ) day_data = [ date_text, + data_text_event_date, conso_w, conso_kw, + conso_a, fail_count, cache_state, self.datatable_button(measurement_direction, db_data)["cache"], self.datatable_button(measurement_direction, db_data)["blacklist"], ] - result.append(day_data) - index = index + 1 - return result - - def datatable_detail(self, all_data, start_index, end_index, measurement_direction): - index = 0 - result = [] - for db_data in all_data: - if start_index <= index <= end_index: - # print(db_data) - date_text = db_data.date.strftime(self.date_format) - date_hour = db_data.date.strftime("%H:%M:%S") - target = "detail" - # VALUE - conso_w = f"""
{db_data.value}
""" - conso_kw = f"""
{db_data.value / 1000}
""" - fail_count = f"""
{db_data.fail_count}
""" - # CACHE STATE - if db_data.fail_count == 0: - cache_state = ( - f'
1
' - ) - else: - cache_state = ( - f'
0
' - ) - day_data = [ - date_text, - date_hour, - conso_w, - conso_kw, - fail_count, - cache_state, - self.datatable_button(measurement_direction, db_data)["cache"], - self.datatable_button(measurement_direction, db_data)["blacklist"], - ] - result.append(day_data) - index = index + 1 - return result - - def datatable_max_power(self, all_data, start_index, end_index): - index = 0 - result = [] - measurement_direction = "consumption_max_power" - event_date = "" - target = "daily" - contract = self.db.get_contract(self.usage_point_id) - if hasattr(contract, "subscribed_power") and contract.subscribed_power is not None: - max_power = int(contract.subscribed_power.split(" ")[0]) * 1000 - else: - max_power = 999000 - for db_data in all_data: - if start_index <= index <= end_index: - date_text = db_data.date.strftime(self.date_format) - ampere = f"{round(int(db_data.value) / 230, 2)}" - if isinstance(db_data.event_date, datetime): - event_date = db_data.event_date.strftime("%H:%M:%S") - # VALUE - if max_power <= int(db_data.value): - style = 'style="color:#FF0000; font-weight:bolder"' - elif (max_power * 90 / 100) <= db_data.value: - style = 'style="color:#FFB600; font-weight:bolder"' - else: - style = "" - data_text_event_date = ( - f"""
{event_date}
""" - ) - conso_w = f"""
{db_data.value}
""" - conso_kw = ( - f"""
{db_data.value / 1000}
""" - ) - conso_a = f"""
{ampere}
""" - fail_count = ( - f"""
{db_data.fail_count}
""" - ) - - # CACHE STATE - if db_data.fail_count == 0: - cache_state = ( - f'
1
' - ) - else: - cache_state = ( - f'
0
' - ) - day_data = [ - date_text, - data_text_event_date, - conso_w, - conso_kw, - conso_a, - fail_count, - cache_state, - self.datatable_button(measurement_direction, db_data)["cache"], - self.datatable_button(measurement_direction, db_data)["blacklist"], - ] - result.append(day_data) - index = index + 1 - return result + result.append(day_data) + index = index + 1 + return result diff --git a/src/models/config.py b/src/models/config.py deleted file mode 100755 index 4a155d03..00000000 --- a/src/models/config.py +++ /dev/null @@ -1,371 +0,0 @@ -"""Configuration class loader and checker.""" - -import logging -import re -from pathlib import Path - -import yaml - -from dependencies import APPLICATION_PATH_DATA, is_bool, is_float, separator, str2bool, title - - -class Config: - """Represent the configuration settings for the application. - - Attributes: - path (str): The path to the configuration file. - db: The database connection object. - file (str): The name of the configuration file. - path_file (str): The full path to the configuration file. - config (dict): The loaded configuration settings. - default_port (int): The default port number. - mandatory_parameters (dict): The mandatory parameters for the configuration. - default (dict): The default configuration settings. - """ - - def __init__(self, path=APPLICATION_PATH_DATA): - self.path = path - self.db = None - self.file = "config.yaml" - self.path_file = f"{self.path}/{self.file}" - self.config = {} - self.default_port = 5000 - self.mandatory_parameters = {} - self.default = { - "cycle": 14400, - "debug": False, - "log2file": False, - "tempo": { - "enable": False, - }, - "myelectricaldata": { - "pdl": { - "enable": True, - "name": "", - "token": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXX", - "cache": True, - "plan": "BASE", - "consumption": True, - "consumption_detail": True, - "consumption_price_hc": 0, - "consumption_price_hp": 0, - "consumption_price_base": 0, - "consumption_max_date": "", - "consumption_detail_max_date": "", - "production": False, - "production_detail": False, - "production_max_date": "", - "production_detail_max_date": "", - "production_price": 0, - "offpeak_hours_0": "", - "offpeak_hours_1": "", - "offpeak_hours_2": "", - "offpeak_hours_3": "", - "offpeak_hours_4": "", - "offpeak_hours_5": "", - "offpeak_hours_6": "", - "activation_date_daily": "", - "activation_date_detail": "", - "refresh_addresse": False, - "refresh_contract": False, - } - }, - "mqtt": { - "enable": False, - "hostname": "X.X.X.X", - "port": 1883, - "username": "", - "password": "", - "prefix": "myelectricaldata", - "client_id": "myelectricaldata", - "retain": True, - "qos": 0, - }, - "home_assistant": { - "enable": False, - "discovery_prefix": "homeassistant", - }, - "home_assistant_ws": {"enable": False, "ssl": True, "token": "", "url": ""}, - "influxdb": { - "enable": False, - "hostname": "influxdb", - "port": 8086, - "token": "XXXXXXXXXXX", - "org": "myelectricaldata", - "bucket": "myelectricaldata", - "method": "synchronous", - }, - "ssl": { - "gateway": True, - "certfile": None, - "keyfile": None, - }, - } - - def set_db(self, db): - """Set the database.""" - self.db = db - - def load(self): - """Load the configuration.""" - config_file = f"{self.path_file}" - if Path(config_file).exists(): - with Path(config_file).open(encoding="utf-8") as file: - self.config = yaml.safe_load(file) - - else: - with Path(config_file).open(mode="a", encoding="utf-8") as file: - file.write(yaml.dump(self.default)) - with Path(config_file).open(encoding="utf-8") as file: - self.config = yaml.safe_load(file) - - if self.config is None: - return { - "error": True, - "message": [ - "Impossible de charger le fichier de configuration.", - "", - "Vous pouvez récupérer un exemple ici :", - "https://github.com/MyElectricalData/myelectricaldata_import/wiki/03.-Configuration", - ], - } - - def check(self): - """Check the configuration for missing mandatory parameters.""" - separator() - logging.info(f"Check {self.file} :") - lost_params = [] - # CHECK HOME ASSISTANT CONFIGURATION - config_name = "home_assistant" - for key, data in self.default[config_name].items(): - mandatory = False - if key in self.mandatory_parameters: - mandatory = True - if mandatory and key not in self.config[config_name]: - lost_params.append(f"{config_name}.{key.upper()}") - elif key not in self.config[config_name]: - self.config[config_name][key] = data - - if lost_params: - msg = [ - "Some mandatory parameters are missing:", - ] - for param in lost_params: - msg.append(f"- {param}") - msg.append("") - msg.append("You can get list of parameters here :") - msg.append(" => https://github.com/m4dm4rtig4n/enedisgateway2mqtt#configuration-file") - logging.critical(msg) - else: - title("Config valid") - - return lost_params - - def display(self): - """Display the configuration settings. - - This method logs the configuration settings to the console, hiding sensitive information such as passwords - and tokens. - - Args: - None - - Returns: - None - """ - logging.info("Display configuration :") - for key, value in self.config.items(): - if isinstance(value, dict): - logging.info(f" {key}:") - for dic_key, dic_value in value.items(): - if isinstance(dic_value, dict): - logging.info(f" {dic_key}:") - for dic1_key, dic1_value in dic_value.items(): - if dic1_key in {"password", "token"}: - hidden_value = "** hidden **" - else: - hidden_value = dic1_value - if hidden_value is None or hidden_value == "None": - hidden_value = "''" - logging.info(f" {dic1_key}: {hidden_value}") - else: - if dic_key in {"password", "token"}: - hidden_value = "** hidden **" - else: - hidden_value = dic_value - if hidden_value is None or hidden_value == "None": - hidden_value = "''" - logging.info(f" {dic_key}: {hidden_value}") - else: - if key in {"password", "token"}: - hidden_value = "** hidden **" - else: - hidden_value = value - logging.info(f" {key}: {hidden_value}") - - def get(self, path=None): - """Get the value of a configuration parameter. - - Args: - path (str, optional): The path of the configuration parameter. Defaults to None. - - Returns: - Union[bool, Any]: The value of the configuration parameter if found, False otherwise. - """ - if path: - if path in self.config: - return self.config[path] - return False - return self.config - - def set(self, path, value): - """Set the value of a configuration parameter. - - Args: - path (str): The path of the configuration parameter. - value: The value to set. - - Returns: - None - """ - title(f"Switch {path} to {value}") - with Path(self.path_file).open(mode="r+", encoding="utf-8") as file: - text = file.read() - text = re.sub(rf"(?<={path}: ).*", str(value).lower(), text) - file.seek(0) - file.write(text) - file.truncate() - self.config = yaml.safe_load(text) - self.db.set_config(path, value) - - def tempo_config(self): - """Return the configuration for tempo. - - Returns: - dict: A dictionary containing the tempo configuration. - """ - if "tempo" in self.config: - return self.config["tempo"] - return False - - def storage_config(self): - """Return the configuration for storage. - - Returns: - str: The storage URI. - """ - if "storage_uri" in self.config: - return self.config["storage_uri"] - return False - - def mqtt_config(self): - """Return the configuration for MQTT. - - Returns: - dict: A dictionary containing the MQTT configuration. - """ - if "mqtt" in self.config: - return self.config["mqtt"] - return False - - def home_assistant_config(self): - """Return the configuration for Home Assistant. - - Returns: - dict: A dictionary containing the Home Assistant configuration. - """ - if "home_assistant" in self.config: - return self.config["home_assistant"] - return False - - def home_assistant_ws_config(self): - """Return the configuration for Home Assistant WebSocket. - - Returns: - dict: A dictionary containing the Home Assistant WebSocket configuration. - """ - if "home_assistant_ws" in self.config: - return self.config["home_assistant_ws"] - return False - - def influxdb_config(self): - """Return the configuration for InfluxDB. - - Returns: - dict: A dictionary containing the InfluxDB configuration. - """ - if "influxdb" in self.config: - return self.config["influxdb"] - return False - - def usage_point_id_config(self, usage_point_id): - """Return the configuration for a specific usage point. - - Args: - usage_point_id (str): The ID of the usage point. - - Returns: - dict: A dictionary containing the configuration for the specified usage point. - """ - if "myelectricaldata" in self.config and usage_point_id in self.config["myelectricaldata"]: - return self.config["myelectricaldata"][usage_point_id] - return False - - def list_usage_point(self): - """Return the list of usage points in the configuration. - - Returns: - dict: A dictionary containing the usage points. - """ - return self.config["myelectricaldata"] - - def set_usage_point_config(self, usage_point_id, key, value): - """Set the configuration for a specific usage point. - - Args: - usage_point_id (str): The ID of the usage point. - key (str): The configuration key. - value (str): The configuration value. - """ - if "myelectricaldata" in self.config: - if usage_point_id not in self.config["myelectricaldata"]: - self.config["myelectricaldata"][usage_point_id] = {} - if is_bool(value): - value = str2bool(value) - elif value is None or value == "None": - value = "" - elif is_float(value): - value = float(value) - else: - value = str(value) - self.config["myelectricaldata"][usage_point_id][key] = value - with Path(self.path_file).open(mode="w", encoding="utf-8") as outfile: - yaml.dump(self.config, outfile, default_flow_style=False) - else: - return False - - def port(self): - """Return the port configuration if it exists, otherwise returns the default port.""" - if "port" in self.config: - return self.config["port"] - return self.default_port - - def ssl_config(self): - """Return the SSL configuration if it exists, otherwise returns an empty dictionary.""" - if "ssl" in self.config: - if "keyfile" in self.config["ssl"] and "certfile" in self.config["ssl"]: - if ( - self.config["ssl"]["keyfile"] != "" - and self.config["ssl"]["keyfile"] is not None - and self.config["ssl"]["certfile"] != "" - and self.config["ssl"]["certfile"] is not None - ): - return { - "ssl_keyfile": self.config["ssl"]["keyfile"], - "ssl_certfile": self.config["ssl"]["certfile"], - } - logging.error("La configuration SSL est erronée.") - return {} - logging.error("La configuration SSL est erronée.") - return {} - return {} diff --git a/src/models/database.py b/src/models/database.py deleted file mode 100644 index d6cf2fa0..00000000 --- a/src/models/database.py +++ /dev/null @@ -1,1871 +0,0 @@ -"""Manage all database operations.""" -import hashlib -import json -import logging -import os -import traceback -from datetime import datetime, timedelta -from os.path import exists - -from sqlalchemy import asc, create_engine, delete, desc, func, inspect, select, update -from sqlalchemy.orm import scoped_session, sessionmaker -from sqlalchemy.pool import NullPool - -from config import MAX_IMPORT_TRY -from db_schema import ( - Addresses, - Config, - ConsumptionDaily, - ConsumptionDailyMaxPower, - ConsumptionDetail, - Contracts, - Ecowatt, - ProductionDaily, - ProductionDetail, - Statistique, - Tempo, - TempoConfig, - UsagePoints, -) -from dependencies import APPLICATION_PATH, APPLICATION_PATH_DATA, get_version, str2bool, title, title_warning - -# available_database = ["sqlite", "postgresql", "mysql+pymysql"] -available_database = ["sqlite", "postgresql"] - - -class Database: - """Represents a database connection and provides methods for database operations.""" - - def __init__(self, config, path=APPLICATION_PATH_DATA): - """Initialize a Database object. - - Args: - config (Config): The configuration object. - path (str, optional): The path to the database. Defaults to APPLICATION_PATH_DATA. - """ - self.config = config - self.path = path - - if not self.config.storage_config() or self.config.storage_config().startswith("sqlite"): - self.db_name = "cache.db" - self.db_path = f"{self.path}/{self.db_name}" - self.uri = f"sqlite:///{self.db_path}?check_same_thread=False" - else: - self.storage_type = self.config.storage_config().split(":")[0] - if self.storage_type in available_database: - self.uri = self.config.storage_config() - else: - logging.critical(f"Database {self.storage_type} not supported (only SQLite & PostgresSQL)") - - os.system(f"cd {APPLICATION_PATH}; DB_URL='{self.uri}' alembic upgrade head ") - - self.engine = create_engine( - self.uri, - echo=False, - query_cache_size=0, - isolation_level="READ UNCOMMITTED", - poolclass=NullPool, - ) - self.session = scoped_session(sessionmaker(self.engine, autocommit=True, autoflush=True)) - self.inspector = inspect(self.engine) - - self.lock_file = f"{self.path}/.lock" - - # MIGRATE v7 to v8 - if os.path.isfile(f"{self.path}/enedisgateway.db"): - title_warning("=> Migration de l'ancienne base de données vers la nouvelle structure.") - self.migratev7tov8() - - def migratev7tov8(self): - """Migrates the database from version 7 to version 8.""" - uri = f"sqlite:///{self.path}/enedisgateway.db" - engine = create_engine(uri, echo=True, query_cache_size=0) - session = scoped_session(sessionmaker(engine, autocommit=True, autoflush=True)) - - for measurement_direction in ["consumption", "production"]: - logging.warning(f'Migration des "{measurement_direction}_daily"') - if measurement_direction == "consumption": - table = ConsumptionDaily - else: - table = ProductionDaily - daily_data = session.execute(f"select * from {measurement_direction}_daily order by date").all() - current_date = "" - year_value = 0 - bulk_insert = [] - for daily in daily_data: - usage_point_id = daily[0] - date = datetime.strptime(daily[1], "%Y-%m-%d") - value = daily[2] - year_value = year_value + value - bulk_insert.append( - table( - usage_point_id=usage_point_id, - date=date, - value=value, - blacklist=0, - fail_count=0, - ) - ) - if current_date != date.strftime("%Y"): - logging.warning(f" - {date.strftime('%Y')} => {round(year_value / 1000, 2)}kW") - current_date = date.strftime("%Y") - year_value = 0 - self.session.add_all(bulk_insert) - - logging.warning(f'Migration des "{measurement_direction}_detail"') - if measurement_direction == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - detail_data = session.execute(f"select * from {measurement_direction}_detail order by date").all() - current_date = "" - day_value = 0 - bulk_insert = [] - for detail in detail_data: - usage_point_id = detail[0] - date = datetime.strptime(detail[1], "%Y-%m-%d %H:%M:%S") - timedelta(minutes=30) - value = detail[2] - interval = detail[3] - measure_type = detail[4] - day_value = day_value + value / (60 / interval) - bulk_insert.append( - table( - usage_point_id=usage_point_id, - date=date, - value=value, - interval=interval, - measure_type=measure_type, - blacklist=0, - fail_count=0, - ) - ) - if current_date != date.strftime("%m"): - logging.warning(f" - {date.strftime('%Y-%m')} => {round(day_value / 1000, 2)}kW") - current_date = date.strftime("%m") - day_value = 0 - self.session.add_all(bulk_insert) - os.replace(f"{self.path}/enedisgateway.db", f"{self.path}/enedisgateway.db.migrate") - - def init_database(self): - """Initialize the database with default values.""" - try: - logging.info("Configure Databases") - query = select(Config).where(Config.key == "day") - day = self.session.scalars(query).one_or_none() - if day: - day.value = datetime.now().strftime("%Y-%m-%d") - else: - self.session.add(Config(key="day", value=datetime.now().strftime("%Y-%m-%d"))) - logging.info(" => day") - query = select(Config).where(Config.key == "call_number") - if not self.session.scalars(query).one_or_none(): - self.session.add(Config(key="call_number", value="0")) - logging.info(" => call_number") - query = select(Config).where(Config.key == "max_call") - if not self.session.scalars(query).one_or_none(): - self.session.add(Config(key="max_call", value="500")) - logging.info(" => max_call") - query = select(Config).where(Config.key == "version") - version = self.session.scalars(query).one_or_none() - if version: - version.value = get_version() - else: - self.session.add(Config(key="version", value=get_version())) - logging.info(" => version") - query = select(Config).where(Config.key == "lock") - if not self.session.scalars(query).one_or_none(): - self.session.add(Config(key="lock", value="0")) - logging.info(" => lock") - query = select(Config).where(Config.key == "lastUpdate") - if not self.session.scalars(query).one_or_none(): - self.session.add(Config(key="lastUpdate", value=str(datetime.now()))) - logging.info(" => lastUpdate") - logging.info(" Success") - except Exception as e: - traceback.print_exc() - logging.error(e) - logging.critical("Database initialize failed!") - - def purge_database(self): - """Purges the SQLite database.""" - logging.separator_warning() - logging.info("Reset SQLite Database") - if os.path.exists(f"{self.path}/cache.db"): - os.remove(f"{self.path}/cache.db") - logging.info(" => Success") - else: - logging.info(" => No cache detected") - - def lock_status(self): - """Check the lock status of the database. - - Returns: - bool: True if the database is locked, False otherwise. - """ - if exists(self.lock_file): - return True - else: - return False - - def lock(self): - """Locks the database. - - Returns: - bool: True if the database is locked, False otherwise. - """ - with open(self.lock_file, "xt") as f: - f.write(str(datetime.now())) - f.close() - return self.lock_status() - - def unlock(self): - """Unlocks the database. - - Returns: - bool: True if the database is unlocked, False otherwise. - """ - if os.path.exists(self.lock_file): - os.remove(self.lock_file) - return self.lock_status() - - def clean_database(self, current_usage_point_id): - """Clean the database by removing unused data. - - Args: - current_usage_point_id (list): List of current usage point IDs. - - Returns: - bool: True if the database is cleaned successfully, False otherwise. - """ - for usage_point in self.get_usage_point_all(): - if usage_point.usage_point_id not in current_usage_point_id: - logging.warning(f"- Suppression du point de livraison {usage_point.usage_point_id}") - self.delete_usage_point(usage_point.usage_point_id) - self.delete_addresse(usage_point.usage_point_id) - self.delete_daily(usage_point.usage_point_id) - self.delete_detail(usage_point.usage_point_id) - self.delete_daily_max_power(usage_point.usage_point_id) - return True - - def refresh_object(self): - """Refreshe the ORM objects.""" - title("Refresh ORM Objects") - self.session.expire_all() - - # ---------------------------------------------------------------------------------------------------------------- - # CONFIG - # ---------------------------------------------------------------------------------------------------------------- - def get_config(self, key): - query = select(Config).where(Config.key == key) - data = self.session.scalars(query).one_or_none() - self.session.close() - return data - - def set_config(self, key, value): - query = select(Config).where(Config.key == key) - config = self.session.scalars(query).one_or_none() - if config: - config.value = json.dumps(value) - else: - self.session.add(Config(key=key, value=json.dumps(value))) - self.session.flush() - self.session.close() - self.refresh_object() - - # ---------------------------------------------------------------------------------------------------------------- - # USAGE POINTS - # ---------------------------------------------------------------------------------------------------------------- - def get_usage_point_all(self): - query = select(UsagePoints) - data = self.session.scalars(query).all() - self.session.close() - return data - - def get_usage_point(self, usage_point_id): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - data = self.session.scalars(query).one_or_none() - self.session.close() - return data - - def get_usage_point_plan(self, usage_point): - data = self.get_usage_point(usage_point) - if data.plan in ["HP/HC"]: - return "HC/HP" - return data.plan - - def set_usage_point(self, usage_point_id, data): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - usage_points = self.session.scalars(query).one_or_none() - - if usage_points is not None: - if "enable" in data and data["enable"] is not None: - usage_points.enable = str2bool(data["enable"]) - if "name" in data and data["name"] is not None: - usage_points.name = data["name"] - if "cache" in data and data["cache"] is not None: - usage_points.cache = str2bool(data["cache"]) - if "consumption" in data and data["consumption"] is not None: - usage_points.consumption = str2bool(data["consumption"]) - if "consumption_detail" in data and data["consumption_detail"] is not None: - usage_points.consumption_detail = str2bool(data["consumption_detail"]) - if "consumption_max_power" in data and data["consumption_max_power"] is not None: - usage_points.consumption_max_power = str2bool(data["consumption_max_power"]) - if "production" in data and data["production"] is not None: - usage_points.production = str2bool(data["production"]) - if "production_detail" in data and data["production_detail"] is not None: - usage_points.production_detail = str2bool(data["production_detail"]) - if "production_price" in data and data["production_price"] is not None: - usage_points.production_price = data["production_price"] - if "consumption_price_base" in data and data["consumption_price_base"] is not None: - usage_points.consumption_price_base = data["consumption_price_base"] - if "consumption_price_hc" in data and data["consumption_price_hc"] is not None: - usage_points.consumption_price_hc = data["consumption_price_hc"] - if "consumption_price_hp" in data and data["consumption_price_hp"] is not None: - usage_points.consumption_price_hp = data["consumption_price_hp"] - if "offpeak_hours_0" in data and data["offpeak_hours_0"] is not None: - usage_points.offpeak_hours_0 = data["offpeak_hours_0"] - if "offpeak_hours_1" in data and data["offpeak_hours_1"] is not None: - usage_points.offpeak_hours_1 = data["offpeak_hours_1"] - if "offpeak_hours_2" in data and data["offpeak_hours_2"] is not None: - usage_points.offpeak_hours_2 = data["offpeak_hours_2"] - if "offpeak_hours_3" in data and data["offpeak_hours_3"] is not None: - usage_points.offpeak_hours_3 = data["offpeak_hours_3"] - if "offpeak_hours_4" in data and data["offpeak_hours_4"] is not None: - usage_points.offpeak_hours_4 = data["offpeak_hours_4"] - if "offpeak_hours_5" in data and data["offpeak_hours_5"] is not None: - usage_points.offpeak_hours_5 = data["offpeak_hours_5"] - if "offpeak_hours_6" in data and data["offpeak_hours_6"] is not None: - usage_points.offpeak_hours_6 = data["offpeak_hours_6"] - if "plan" in data and data["plan"] is not None: - usage_points.plan = data["plan"] - else: - usage_points.plan = "BASE" - if "refresh_addresse" in data and data["refresh_addresse"] is not None: - usage_points.refresh_addresse = str2bool(data["refresh_addresse"]) - if "refresh_contract" in data and data["refresh_contract"] is not None: - usage_points.refresh_contract = str2bool(data["refresh_contract"]) - if "token" in data and data["token"] is not None: - usage_points.token = data["token"] - if "progress" in data and data["progress"] is not None: - usage_points.progress = data["progress"] - if "progress_status" in data and data["progress_status"] is not None: - usage_points.progress_status = data["progress_status"] - if "consumption_max_date" in data: - if data["consumption_max_date"] and data["consumption_max_date"] is not None: - consumption_max_date = data["consumption_max_date"] - if isinstance(consumption_max_date, datetime): - usage_points.consumption_max_date = consumption_max_date - else: - usage_points.consumption_max_date = datetime.strptime(consumption_max_date, "%Y-%m-%d") - if "consumption_detail_max_date" in data: - if data["consumption_detail_max_date"] and data["consumption_detail_max_date"] is not None: - consumption_detail_max_date = data["consumption_detail_max_date"] - if isinstance(consumption_detail_max_date, datetime): - usage_points.consumption_detail_max_date = consumption_detail_max_date - else: - usage_points.consumption_detail_max_date = datetime.strptime( - consumption_detail_max_date, "%Y-%m-%d" - ) - if "production_max_date" in data: - if data["production_max_date"] and data["production_max_date"] is not None: - production_max_date = data["production_max_date"] - if isinstance(production_max_date, datetime): - usage_points.production_max_date = production_max_date - else: - usage_points.production_max_date = datetime.strptime(production_max_date, "%Y-%m-%d") - if "production_detail_max_date" in data: - if data["production_detail_max_date"] and data["production_detail_max_date"] is not None: - production_detail_max_date = data["production_detail_max_date"] - if isinstance(production_detail_max_date, datetime): - usage_points.production_detail_max_date = production_detail_max_date - else: - usage_points.production_detail_max_date = datetime.strptime( - production_detail_max_date, "%Y-%m-%d" - ) - if "call_number" in data and data["call_number"] is not None: - usage_points.call_number = data["call_number"] - if "quota_reached" in data and data["quota_reached"] is not None: - usage_points.quota_reached = str2bool(data["quota_reached"]) - if "quota_limit" in data and data["quota_limit"] is not None: - usage_points.quota_limit = data["quota_limit"] - if "quota_reset_at" in data and data["quota_reset_at"] is not None: - usage_points.quota_reset_at = data["quota_reset_at"] - if "last_call" in data and data["last_call"] is not None: - usage_points.last_call = data["last_call"] - if "ban" in data and data["ban"] is not None: - usage_points.ban = str2bool(data["ban"]) - if "consentement_expiration" in data and data["consentement_expiration"] is not None: - usage_points.consentement_expiration = data["consentement_expiration"] - else: - if "enable" in data and data["enable"] is not None: - enable = data["enable"] - else: - enable = True - if "name" in data and data["name"] is not None: - name = data["name"] - else: - name = "" - if "cache" in data and data["cache"] is not None: - cache = data["cache"] - else: - cache = True - if "consumption" in data and data["consumption"] is not None: - consumption = data["consumption"] - else: - consumption = True - if "consumption_max_power" in data and data["consumption_max_power"] is not None: - consumption_max_power = data["consumption_max_power"] - else: - consumption_max_power = True - if "consumption_detail" in data and data["consumption_detail"] is not None: - consumption_detail = data["consumption_detail"] - else: - consumption_detail = True - if "production" in data and data["production"] is not None: - production = data["production"] - else: - production = False - if "production_detail" in data and data["production_detail"] is not None: - production_detail = data["production_detail"] - else: - production_detail = False - if "production_price" in data and data["production_price"] is not None: - production_price = data["production_price"] - else: - production_price = 0 - if ( - "consumption_price_base" in data - and data["consumption_price_base"] is not None - and data["consumption_price_base"] != "" - ): - consumption_price_base = data["consumption_price_base"] - else: - consumption_price_base = 0 - if ( - "consumption_price_hc" in data - and data["consumption_price_hc"] is not None - and data["consumption_price_hc"] != "" - ): - consumption_price_hc = data["consumption_price_hc"] - else: - consumption_price_hc = 0 - if ( - "consumption_price_hp" in data - and data["consumption_price_hp"] is not None - and data["consumption_price_hp"] != "" - ): - consumption_price_hp = data["consumption_price_hp"] - else: - consumption_price_hp = 0 - if "offpeak_hours_0" in data and data["offpeak_hours_0"] is not None: - offpeak_hours_0 = data["offpeak_hours_0"] - else: - offpeak_hours_0 = "" - if "offpeak_hours_1" in data and data["offpeak_hours_1"] is not None: - offpeak_hours_1 = data["offpeak_hours_1"] - else: - offpeak_hours_1 = "" - if "offpeak_hours_2" in data and data["offpeak_hours_2"] is not None: - offpeak_hours_2 = data["offpeak_hours_2"] - else: - offpeak_hours_2 = "" - if "offpeak_hours_3" in data and data["offpeak_hours_3"] is not None: - offpeak_hours_3 = data["offpeak_hours_3"] - else: - offpeak_hours_3 = "" - if "offpeak_hours_4" in data and data["offpeak_hours_4"] is not None: - offpeak_hours_4 = data["offpeak_hours_4"] - else: - offpeak_hours_4 = "" - if "offpeak_hours_5" in data and data["offpeak_hours_5"] is not None: - offpeak_hours_5 = data["offpeak_hours_5"] - else: - offpeak_hours_5 = "" - if "offpeak_hours_6" in data and data["offpeak_hours_6"] is not None: - offpeak_hours_6 = data["offpeak_hours_6"] - else: - offpeak_hours_6 = "" - if "plan" in data and data["plan"] is not None: - plan = data["plan"] - else: - plan = "BASE" - if "refresh_addresse" in data and data["refresh_addresse"] is not None: - refresh_addresse = data["refresh_addresse"] - else: - refresh_addresse = False - if "refresh_contract" in data and data["refresh_contract"] is not None: - refresh_contract = data["refresh_contract"] - else: - refresh_contract = False - if "token" in data and data["token"] is not None: - token = data["token"] - else: - token = "" - progress = 0 - if "progress" in data and data["progress"] is not None: - progress = data["progress"] - progress_status = "" - if "progress_status" in data and data["progress_status"] is not None: - progress_status = data["progress_status"] - consumption_max_date = None - if "consumption_max_date" in data: - if not data["consumption_max_date"] or data["consumption_max_date"] is None: - consumption_max_date = None - else: - consumption_max_date = data["consumption_max_date"] - if not isinstance(consumption_max_date, datetime): - consumption_max_date = datetime.strptime(consumption_max_date, "%Y-%m-%d") - consumption_detail_max_date = None - if "consumption_detail_max_date" in data: - if "consumption_detail_max_date" in data or data["consumption_detail_max_date"] is None: - if not data["consumption_detail_max_date"] or data["consumption_detail_max_date"] is None: - consumption_detail_max_date = None - else: - consumption_detail_max_date = data["consumption_detail_max_date"] - if not isinstance(consumption_detail_max_date, datetime): - consumption_detail_max_date = datetime.strptime(consumption_detail_max_date, "%Y-%m-%d") - production_max_date = None - if "production_max_date" in data: - if not data["production_max_date"] or data["production_max_date"] is None: - production_max_date = None - else: - production_max_date = data["production_max_date"] - if not isinstance(production_max_date, datetime): - production_max_date = datetime.strptime(production_max_date, "%Y-%m-%d") - production_detail_max_date = None - if "production_detail_max_date" in data: - if not data["production_detail_max_date"] or data["production_detail_max_date"] is None: - production_detail_max_date = None - else: - production_detail_max_date = data["production_detail_max_date"] - if isinstance(production_detail_max_date, datetime): - production_detail_max_date = production_detail_max_date - else: - production_detail_max_date = datetime.strptime(production_detail_max_date, "%Y-%m-%d") - - if "call_number" in data and data["call_number"] is not None: - call_number = data["call_number"] - else: - call_number = 0 - if "quota_reached" in data and data["quota_reached"] is not None: - quota_reached = str2bool(data["quota_reached"]) - else: - quota_reached = False - if "quota_limit" in data and data["quota_limit"] is not None: - quota_limit = data["quota_limit"] - else: - quota_limit = 0 - if "quota_reset_at" in data and data["quota_reset_at"] is not None: - quota_reset_at = data["quota_reset_at"] - else: - quota_reset_at = None - if "last_call" in data and data["last_call"] is not None: - last_call = data["last_call"] - else: - last_call = None - if "ban" in data and data["ban"] is not None: - ban = str2bool(data["ban"]) - else: - ban = False - if "consentement_expiration" in data and data["consentement_expiration"] is not None: - consentement_expiration = data["consentement_expiration"] - else: - consentement_expiration = None - - self.session.add( - UsagePoints( - usage_point_id=usage_point_id, - name=name, - cache=str2bool(cache), - consumption=str2bool(consumption), - consumption_detail=str2bool(consumption_detail), - consumption_max_power=str2bool(consumption_max_power), - production=str2bool(production), - production_detail=str2bool(production_detail), - production_price=production_price, - consumption_price_base=consumption_price_base, - consumption_price_hc=consumption_price_hc, - consumption_price_hp=consumption_price_hp, - offpeak_hours_0=offpeak_hours_0, - offpeak_hours_1=offpeak_hours_1, - offpeak_hours_2=offpeak_hours_2, - offpeak_hours_3=offpeak_hours_3, - offpeak_hours_4=offpeak_hours_4, - offpeak_hours_5=offpeak_hours_5, - offpeak_hours_6=offpeak_hours_6, - plan=plan, - refresh_addresse=str2bool(refresh_addresse), - refresh_contract=str2bool(refresh_contract), - token=token, - progress=progress, - progress_status=progress_status, - enable=str2bool(enable), - consumption_max_date=consumption_max_date, - consumption_detail_max_date=consumption_detail_max_date, - production_max_date=production_max_date, - production_detail_max_date=production_detail_max_date, - call_number=call_number, - quota_reached=str2bool(quota_reached), - quota_limit=quota_limit, - quota_reset_at=quota_reset_at, - last_call=last_call, - ban=str2bool(ban), - consentement_expiration=consentement_expiration, - ) - ) - self.session.flush() - self.session.close() - - def progress(self, usage_point_id, increment): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - usage_points = self.session.scalars(query).one_or_none() - usage_points.progress = usage_points.progress + increment - self.session.close() - - def last_call_update(self, usage_point_id): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - usage_points = self.session.scalars(query).one_or_none() - usage_points.last_call = datetime.now() - self.session.flush() - self.session.close() - - def usage_point_update( - self, - usage_point_id, - consentement_expiration=None, - call_number=None, - quota_reached=None, - quota_limit=None, - quota_reset_at=None, - last_call=None, - ban=None, - ): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - usage_points = self.session.scalars(query).one_or_none() - if consentement_expiration is not None: - usage_points.consentement_expiration = consentement_expiration - if call_number is not None: - usage_points.call_number = call_number - if quota_reached is not None: - usage_points.quota_reached = quota_reached - if quota_limit is not None: - usage_points.quota_limit = quota_limit - if quota_reset_at is not None: - usage_points.quota_reset_at = quota_reset_at - if last_call is not None: - usage_points.last_call = last_call - if ban is not None: - usage_points.ban = ban - self.session.flush() - self.session.close() - - def delete_usage_point(self, usage_point_id): - self.session.execute(delete(Addresses).where(Addresses.usage_point_id == usage_point_id)) - self.session.execute(delete(Contracts).where(Contracts.usage_point_id == usage_point_id)) - self.session.execute( - delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.usage_point_id == usage_point_id) - ) - self.session.execute(delete(ConsumptionDetail).where(ConsumptionDetail.usage_point_id == usage_point_id)) - self.session.execute(delete(ConsumptionDaily).where(ConsumptionDaily.usage_point_id == usage_point_id)) - self.session.execute(delete(ProductionDetail).where(ProductionDetail.usage_point_id == usage_point_id)) - self.session.execute(delete(ProductionDaily).where(ProductionDaily.usage_point_id == usage_point_id)) - self.session.execute(delete(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id)) - self.session.flush() - self.session.close() - return True - - def get_error_log(self, usage_point_id): - data = self.get_usage_point(usage_point_id) - return data.last_error - - def set_error_log(self, usage_point_id, message): - values = {UsagePoints.last_error: message} - self.session.execute(update(UsagePoints, values=values).where(UsagePoints.usage_point_id == usage_point_id)) - self.session.flush() - return True - - # ---------------------------------------------------------------------------------------------------------------- - # ADDRESSES - # ---------------------------------------------------------------------------------------------------------------- - def get_addresse(self, usage_point_id): - query = ( - select(Addresses).join(UsagePoints.relation_addressess).where(UsagePoints.usage_point_id == usage_point_id) - ) - data = self.session.scalars(query).one_or_none() - self.session.close() - return data - - def set_addresse(self, usage_point_id, data, count=0): - query = ( - select(Addresses).join(UsagePoints.relation_addressess).where(Addresses.usage_point_id == usage_point_id) - ) - addresses = self.session.scalars(query).one_or_none() - if addresses is not None: - addresses.street = data["street"] - addresses.locality = data["locality"] - addresses.postal_code = data["postal_code"] - addresses.insee_code = data["insee_code"] - addresses.city = data["city"] - addresses.country = data["country"] - addresses.geo_points = data["geo_points"] - addresses.count = count - else: - self.session.add( - Addresses( - usage_point_id=usage_point_id, - street=data["street"], - locality=data["locality"], - postal_code=data["postal_code"], - insee_code=data["insee_code"], - city=data["city"], - country=data["country"], - geo_points=data["geo_points"], - count=count, - ) - ) - self.session.flush() - self.session.close() - - def delete_addresse(self, usage_point_id): - self.session.execute(delete(Addresses).where(Addresses.usage_point_id == usage_point_id)) - self.session.flush() - self.session.close() - return True - - # ---------------------------------------------------------------------------------------------------------------- - # CONTRACTS - # ---------------------------------------------------------------------------------------------------------------- - def get_contract(self, usage_point_id): - query = ( - select(Contracts).join(UsagePoints.relation_contract).where(UsagePoints.usage_point_id == usage_point_id) - ) - data = self.session.scalars(query).one_or_none() - self.session.close() - return data - - def set_contract( - self, - usage_point_id, - data, - count=0, - ): - query = ( - select(Contracts).join(UsagePoints.relation_contract).where(UsagePoints.usage_point_id == usage_point_id) - ) - contract = self.session.scalars(query).one_or_none() - if contract is not None: - contract.usage_point_status = data["usage_point_status"] - contract.meter_type = data["meter_type"] - contract.segment = data["segment"] - contract.subscribed_power = data["subscribed_power"] - contract.last_activation_date = data["last_activation_date"] - contract.distribution_tariff = data["distribution_tariff"] - contract.offpeak_hours_0 = data["offpeak_hours_0"] - contract.offpeak_hours_1 = data["offpeak_hours_1"] - contract.offpeak_hours_2 = data["offpeak_hours_2"] - contract.offpeak_hours_3 = data["offpeak_hours_3"] - contract.offpeak_hours_4 = data["offpeak_hours_4"] - contract.offpeak_hours_5 = data["offpeak_hours_5"] - contract.offpeak_hours_6 = data["offpeak_hours_6"] - contract.contract_status = data["contract_status"] - contract.last_distribution_tariff_change_date = data["last_distribution_tariff_change_date"] - contract.count = count - else: - self.session.add( - Contracts( - usage_point_id=usage_point_id, - usage_point_status=data["usage_point_status"], - meter_type=data["meter_type"], - segment=data["segment"], - subscribed_power=data["subscribed_power"], - last_activation_date=data["last_activation_date"], - distribution_tariff=data["distribution_tariff"], - offpeak_hours_0=data["offpeak_hours_0"], - offpeak_hours_1=data["offpeak_hours_1"], - offpeak_hours_2=data["offpeak_hours_2"], - offpeak_hours_3=data["offpeak_hours_3"], - offpeak_hours_4=data["offpeak_hours_4"], - offpeak_hours_5=data["offpeak_hours_5"], - offpeak_hours_6=data["offpeak_hours_6"], - contract_status=data["contract_status"], - last_distribution_tariff_change_date=data["last_distribution_tariff_change_date"], - count=count, - ) - ) - self.session.flush() - self.session.close() - - # ---------------------------------------------------------------------------------------------------------------- - # DAILY - # ---------------------------------------------------------------------------------------------------------------- - def get_daily_all(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - data = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .order_by(table.date.desc()) - ).all() - self.session.close() - return data - - def get_daily_datatable( - self, - usage_point_id, - order_column="date", - order_dir="asc", - search=None, - measurement_direction="consumption", - ): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - - sort = asc(order_column) if order_dir == "desc" else desc(order_column) - - yesterday = datetime.combine(datetime.now() - timedelta(days=1), datetime.max.time()) - if search is not None and search != "": - result = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .where((table.date.like(f"%{search}%")) | (table.value.like(f"%{search}%"))) - .where(table.date <= yesterday) - .order_by(sort) - ) - else: - result = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .where(table.date <= yesterday) - .order_by(sort) - ) - return result.all() - - def get_daily_count(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - data = self.session.scalars( - select([func.count()]) - .select_from(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - ).one_or_none() - self.session.close() - return data - - def get_daily_date(self, usage_point_id, date, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - data = self.session.scalars(select(table).join(relation).where(table.id == unique_id)).first() - self.session.flush() - self.session.close() - return data - - def get_daily_state(self, usage_point_id, date, measurement_direction="consumption"): - if self.get_daily_date(usage_point_id, date, measurement_direction) is not None: - return True - else: - return False - - def get_daily_last_date(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - current_data = self.session.scalars( - select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(table.date) - ).first() - self.session.flush() - self.session.close() - if current_data is None: - return False - else: - return current_data.date - - def get_daily_last(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - current_data = self.session.scalars( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.value != 0) - .order_by(table.date.desc()) - ).first() - self.session.flush() - self.session.close() - if current_data is None: - return False - else: - return current_data - - def get_daily_first_date(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(table.date.desc()) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).first() - if current_data is None: - return False - else: - return current_data.date - - def get_daily_fail_count(self, usage_point_id, date, measurement_direction="consumption"): - result = self.get_daily_date(usage_point_id, date, measurement_direction) - if hasattr(result, "fail_count"): - return result.fail_count - else: - return 0 - - def daily_fail_increment(self, usage_point_id, date, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = select(table).join(relation).where(table.id == unique_id) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - daily = self.session.scalars(query).one_or_none() - if daily is not None: - fail_count = int(daily.fail_count) + 1 - if fail_count >= MAX_IMPORT_TRY: - blacklist = 1 - fail_count = 0 - else: - blacklist = 0 - daily.id = unique_id - daily.usage_point_id = usage_point_id - daily.date = date - daily.value = 0 - daily.blacklist = blacklist - daily.fail_count = fail_count - else: - fail_count = 0 - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=0, - blacklist=0, - fail_count=0, - ) - ) - self.session.flush() - return fail_count - - def get_daily_range(self, usage_point_id, begin, end, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = ( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date >= begin) - .where(table.date <= end) - .order_by(table.date.desc()) - ) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).all() - if current_data is None: - return False - else: - return current_data - - def get_daily(self, usage_point_id, begin, end, measurement_direction="consumption"): - delta = end - begin - result = {"missing_data": False, "date": {}, "count": 0} - for i in range(delta.days + 1): - checkDate = begin + timedelta(days=i) - checkDate = datetime.combine(checkDate, datetime.min.time()) - query_result = self.get_daily_date(usage_point_id, checkDate, measurement_direction) - checkDate = checkDate.strftime("%Y-%m-%d") - if query_result is None: - # NEVER QUERY - result["date"][checkDate] = { - "status": False, - "blacklist": 0, - "value": 0, - } - result["missing_data"] = True - else: - consumption = query_result.value - blacklist = query_result.blacklist - if consumption == 0: - # ENEDIS RETURN NO DATA - result["date"][checkDate] = { - "status": False, - "blacklist": blacklist, - "value": consumption, - } - result["missing_data"] = True - else: - # SUCCESS or BLACKLIST - result["date"][checkDate] = { - "status": True, - "blacklist": blacklist, - "value": consumption, - } - return result - - def insert_daily( - self, - usage_point_id, - date, - value, - blacklist=0, - fail_count=0, - measurement_direction="consumption", - ): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = select(table).join(relation).where(table.id == unique_id) - daily = self.session.scalars(query).one_or_none() - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - if daily is not None: - daily.id = unique_id - daily.usage_point_id = usage_point_id - daily.date = date - daily.value = value - daily.blacklist = blacklist - daily.fail_count = fail_count - else: - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=value, - blacklist=blacklist, - fail_count=fail_count, - ) - ) - self.session.flush() - - def reset_daily(self, usage_point_id, date=None, mesure_type="consumption"): - data = self.get_daily_date(usage_point_id, date, mesure_type) - if mesure_type == "consumption": - table = ConsumptionDaily - else: - table = ProductionDaily - if data is not None: - values = { - table.value: 0, - table.blacklist: 0, - table.fail_count: 0, - } - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(update(table, values=values).where(table.id == unique_id)) - self.session.flush() - return True - else: - return False - - def delete_daily(self, usage_point_id, date=None, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - else: - table = ProductionDaily - if date is not None: - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(delete(table).where(table.id == unique_id)) - else: - self.session.execute(delete(table).where(table.usage_point_id == usage_point_id)) - self.session.flush() - return True - - def blacklist_daily(self, usage_point_id, date, action=True, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = select(table).join(relation).where(table.id == unique_id) - daily = self.session.scalars(query).one_or_none() - if daily is not None: - daily.blacklist = action - else: - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=0, - blacklist=action, - fail_count=0, - ) - ) - self.session.flush() - return True - - def get_daily_date_range(self, usage_point_id): - return { - "begin": self.get_daily_last_date(usage_point_id), - "end": self.get_daily_first_date(usage_point_id), - } - - # ----------------------------------------------------------------------------------------------------------------- - # DETAIL CONSUMPTION - # ----------------------------------------------------------------------------------------------------------------- - def get_detail_all( - self, - usage_point_id, - begin=None, - end=None, - measurement_direction="consumption", - order_dir="desc", - ): - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - sort = asc("date") if order_dir == "desc" else desc("date") - if begin is None and end is None: - return self.session.scalars( - select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(sort) - ).all() - elif begin is not None and end is None: - return self.session.scalars( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .filter(table.date >= begin) - .order_by(sort) - ).all() - elif end is not None and begin is None: - return self.session.scalars( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .filter(table.date <= end) - .order_by(sort) - ).all() - else: - return self.session.scalars( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .filter(table.date <= end) - .filter(table.date >= begin) - .order_by(sort) - ).all() - - def get_detail_datatable( - self, - usage_point_id, - order_column="date", - order_dir="asc", - search=None, - measurement_direction="consumption", - ): - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - yesterday = datetime.combine(datetime.now() - timedelta(days=1), datetime.max.time()) - sort = asc(order_column) if order_dir == "desc" else desc(order_column) - if search is not None and search != "": - result = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .where((table.date.like(f"%{search}%")) | (table.value.like(f"%{search}%"))) - .where(table.date <= yesterday) - .order_by(sort) - ) - else: - result = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .where(table.date <= yesterday) - .order_by(sort) - ) - return result.all() - - def get_detail_count(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - return self.session.scalars( - select([func.count()]) - .select_from(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - ).one_or_none() - - def get_detail_date(self, usage_point_id, date, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - return self.session.scalars(select(table).join(relation).where(table.id == unique_id)).first() - - def get_detail_range( - self, - usage_point_id, - begin, - end, - measurement_direction="consumption", - order="desc", - ): - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - if order == "desc": - order = table.date.desc() - else: - order = table.date.asc() - query = ( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date >= begin) - .where(table.date <= end) - .order_by(order) - ) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).all() - if current_data is None: - return False - else: - return current_data - - def get_detail(self, usage_point_id, begin, end, measurement_direction="consumption"): - # begin = datetime.combine(begin, datetime.min.time()) - # end = datetime.combine(end, datetime.max.time()) - - delta = begin - begin - - result = {"missing_data": False, "date": {}, "count": 0} - - for i in range(delta.days + 1): - query_result = self.get_detail_all( - usage_point_id=usage_point_id, - begin=begin, - end=end, - measurement_direction=measurement_direction, - ) - time_delta = abs(int((begin - end).total_seconds() / 60)) - total_internal = 0 - for query in query_result: - total_internal = total_internal + query.interval - total_time = abs(total_internal - time_delta) - if total_time > 300: - logging.info(f" - {total_time}m absente du relevé.") - result["missing_data"] = True - else: - for query in query_result: - result["date"][query.date] = { - "value": query.value, - "interval": query.interval, - "measure_type": query.measure_type, - "blacklist": query.blacklist, - } - return result - - def get_detail_state(self, usage_point_id, date, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - current_data = self.session.scalars(select(table).join(relation).where(table.id == unique_id)).one_or_none() - if current_data is None: - return False - else: - return True - - # def insert_detail_bulk(self, data, mesure_type="consumption"): - # if mesure_type == "consumption": - # table = ConsumptionDetail - # else: - # table = ProductionDetail - # begin = "" - # end = "" - # for scalar in data: - # if begin == "": - # begin = scalar.date - # end = scalar.date - # self.session.execute( - # table.__table__.delete().filter(ConsumptionDetail.date.between(begin, end)) - # ) - # self.session.add_all(data) - - def insert_detail( - self, - usage_point_id, - date, - value, - interval, - measure_type, - blacklist=0, - fail_count=0, - mesure_type="consumption", - ): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if mesure_type == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - detail = self.get_detail_date(usage_point_id, date, mesure_type) - if detail is not None: - detail.id = unique_id - detail.usage_point_id = usage_point_id - detail.date = date - detail.value = value - detail.interval = interval - detail.measure_type = measure_type - detail.blacklist = blacklist - detail.fail_count = fail_count - else: - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=value, - interval=interval, - measure_type=measure_type, - blacklist=blacklist, - fail_count=fail_count, - ) - ) - self.session.flush() - - def reset_detail(self, usage_point_id, date=None, mesure_type="consumption"): - detail = self.get_detail_date(usage_point_id, date, mesure_type) - if detail is not None: - detail.value = 0 - detail.interval = 0 - detail.blacklist = 0 - detail.fail_count = 0 - self.session.flush() - return True - else: - return False - - def reset_detail_range(self, usage_point_id, begin, end, mesure_type="consumption"): - detail = self.get_detail_range(usage_point_id, begin, end, mesure_type) - if detail is not None: - for row in detail: - row.value = 0 - row.interval = 0 - row.blacklist = 0 - row.fail_count = 0 - self.session.flush() - return True - else: - return False - - def delete_detail(self, usage_point_id, date=None, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - if date is not None: - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(delete(table).where(table.id == unique_id)) - else: - self.session.execute(delete(table).where(table.usage_point_id == usage_point_id)) - self.session.flush() - return True - - def delete_detail_range(self, usage_point_id, date, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - if date is not None: - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(delete(table).where(table.id == unique_id)) - else: - self.session.execute(delete(table).where(table.usage_point_id == usage_point_id)) - self.session.flush() - return True - - def get_ratio_hc_hp(self, usage_point_id, begin, end, mesure_type="consumption"): - result = { - "HC": 0, - "HP": 0, - } - detail_data = self.get_detail_all( - usage_point_id=usage_point_id, - begin=begin, - end=end, - measurement_direction=mesure_type, - ) - for data in detail_data: - result[data.measure_type] = result[data.measure_type] + data.value - return result - - def get_detail_fail_count(self, usage_point_id, date, mesure_type="consumption"): - return self.get_detail_date(usage_point_id, date, mesure_type).fail_count - - def detail_fail_increment(self, usage_point_id, date, mesure_type="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if mesure_type == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - query = select(table).join(relation).where(table.id == unique_id) - detail = self.session.scalars(query).one_or_none() - if detail is not None: - fail_count = int(detail.fail_count) + 1 - if fail_count >= MAX_IMPORT_TRY: - blacklist = 1 - fail_count = 0 - else: - blacklist = 0 - detail.usage_point_id = usage_point_id - detail.date = date - detail.value = 0 - detail.interval = 0 - detail.measure_type = "HP" - detail.blacklist = blacklist - detail.fail_count = fail_count - else: - fail_count = 0 - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=0, - interval=0, - measure_type="HP", - blacklist=0, - fail_count=0, - ) - ) - self.session.flush() - return fail_count - - def get_detail_last_date(self, usage_point_id, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - current_data = self.session.scalars( - select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(table.date) - ).first() - if current_data is None: - return False - else: - return current_data.date - - def get_detail_first_date(self, usage_point_id, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - query = select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(table.date.desc()) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).first() - if current_data is None: - return False - else: - return current_data.date - - def get_detail_date_range(self, usage_point_id): - return { - "begin": self.get_detail_last_date(usage_point_id), - "end": self.get_detail_first_date(usage_point_id), - } - - # ----------------------------------------------------------------------------------------------------------------- - # DAILY POWER - # ----------------------------------------------------------------------------------------------------------------- - def get_daily_max_power_all(self, usage_point_id, order="desc"): - if order == "desc": - order = ConsumptionDailyMaxPower.date.desc() - else: - order = ConsumptionDailyMaxPower.date.asc() - return self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(UsagePoints.usage_point_id == usage_point_id) - .order_by(order) - ).all() - - def get_daily_max_power_range(self, usage_point_id, begin, end): - query = ( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(ConsumptionDailyMaxPower.usage_point_id == usage_point_id) - .where(ConsumptionDailyMaxPower.date >= begin) - .where(ConsumptionDailyMaxPower.date <= end) - .order_by(ConsumptionDailyMaxPower.date.desc()) - ) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).all() - if current_data is None: - return False - else: - return current_data - - def get_daily_power(self, usage_point_id, begin, end): - delta = end - begin - result = {"missing_data": False, "date": {}, "count": 0} - for i in range(delta.days + 1): - checkDate = begin + timedelta(days=i) - checkDate = datetime.combine(checkDate, datetime.min.time()) - query_result = self.get_daily_max_power_date(usage_point_id, checkDate) - checkDate = checkDate.strftime("%Y-%m-%d") - if query_result is None: - # NEVER QUERY - result["date"][checkDate] = { - "status": False, - "blacklist": 0, - "value": 0, - } - result["missing_data"] = True - else: - consumption = query_result.value - blacklist = query_result.blacklist - if consumption == 0: - # ENEDIS RETURN NO DATA - result["date"][checkDate] = { - "status": False, - "blacklist": blacklist, - "value": consumption, - } - result["missing_data"] = True - else: - # SUCCESS or BLACKLIST - result["date"][checkDate] = { - "status": True, - "blacklist": blacklist, - "value": consumption, - } - return result - - def get_daily_max_power_last_date(self, usage_point_id): - current_data = self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(ConsumptionDailyMaxPower.usage_point_id == usage_point_id) - .order_by(ConsumptionDailyMaxPower.date) - ).first() - if current_data is None: - return False - else: - return current_data.date - - def get_daily_max_power_date(self, usage_point_id, date): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - return self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(ConsumptionDailyMaxPower.id == unique_id) - ).one_or_none() - - def insert_daily_max_power(self, usage_point_id, date, event_date, value, blacklist=0, fail_count=0): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - daily = self.get_daily_max_power_date(usage_point_id, date) - if daily is not None: - daily.id = unique_id - daily.usage_point_id = usage_point_id - daily.date = date - daily.event_date = event_date - daily.value = value - daily.blacklist = blacklist - daily.fail_count = fail_count - else: - self.session.add( - ConsumptionDailyMaxPower( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - event_date=event_date, - value=value, - blacklist=blacklist, - fail_count=fail_count, - ) - ) - self.session.flush() - - def get_daily_max_power_count(self, usage_point_id): - return self.session.scalars( - select([func.count()]) - .select_from(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(UsagePoints.usage_point_id == usage_point_id) - ).one_or_none() - - def get_daily_max_power_datatable(self, usage_point_id, order_column="date", order_dir="asc", search=None): - yesterday = datetime.combine(datetime.now() - timedelta(days=1), datetime.max.time()) - sort = asc(order_column) if order_dir == "desc" else desc(order_column) - if search is not None and search != "": - result = self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(UsagePoints.usage_point_id == usage_point_id) - .where( - (ConsumptionDailyMaxPower.date.like(f"%{search}%")) - | (ConsumptionDailyMaxPower.value.like(f"%{search}%")) - ) - .where(ConsumptionDailyMaxPower.date <= yesterday) - .order_by(sort) - ) - else: - result = self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(UsagePoints.usage_point_id == usage_point_id) - .where(ConsumptionDailyMaxPower.date <= yesterday) - .order_by(sort) - ) - return result.all() - - def daily_max_power_fail_increment(self, usage_point_id, date): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - daily = self.get_daily_max_power_date(usage_point_id, date) - if daily is not None: - fail_count = int(daily.fail_count) + 1 - if fail_count >= MAX_IMPORT_TRY: - blacklist = 1 - fail_count = 0 - else: - blacklist = 0 - daily.id = unique_id - daily.usage_point_id = usage_point_id - daily.date = date - daily.event_date = None - daily.value = 0 - daily.blacklist = blacklist - daily.fail_count = fail_count - else: - fail_count = 0 - self.session.add( - ConsumptionDailyMaxPower( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - event_date=None, - value=0, - blacklist=0, - fail_count=0, - ) - ) - self.session.flush() - return fail_count - - def reset_daily_max_power(self, usage_point_id, date=None): - daily = self.get_daily_max_power_date(usage_point_id, date) - if daily is not None: - daily.event_date = None - daily.value = 0 - daily.blacklist = 0 - daily.fail_count = 0 - self.session.flush() - return True - else: - return False - - def delete_daily_max_power(self, usage_point_id, date=None): - if date is not None: - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.id == unique_id)) - else: - self.session.execute( - delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.usage_point_id == usage_point_id) - ) - self.session.flush() - return True - - def blacklist_daily_max_power(self, usage_point_id, date, action=True): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - daily = self.get_daily_max_power_date(usage_point_id, date) - if daily is not None: - daily.blacklist = action - else: - self.session.add( - ConsumptionDailyMaxPower( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=0, - blacklist=action, - fail_count=0, - ) - ) - self.session.flush() - return True - - def get_daily_max_power_fail_count(self, usage_point_id, date): - result = self.get_daily_max_power_date(usage_point_id, date) - if hasattr(result, "fail_count"): - return result.fail_count - else: - return 0 - - # ----------------------------------------------------------------------------------------------------------------- - # TEMPO - # ----------------------------------------------------------------------------------------------------------------- - def get_tempo(self, order="desc"): - if order == "desc": - order = Tempo.date.desc() - else: - order = Tempo.date.asc() - return self.session.scalars(select(Tempo).order_by(order)).all() - - def get_tempo_range(self, begin, end, order="desc"): - if order == "desc": - order = Tempo.date.desc() - else: - order = Tempo.date.asc() - return self.session.scalars( - select(Tempo).where(Tempo.date >= begin).where(Tempo.date <= end).order_by(order) - ).all() - - def set_tempo(self, date, color): - date = datetime.combine(date, datetime.min.time()) - tempo = self.get_tempo_range(date, date) - if tempo: - for item in tempo: - item.color = color - else: - self.session.add(Tempo(date=date, color=color)) - self.session.flush() - return True - - # ----------------------------------------------------------------------------------------------------------------- - # TEMPO CONFIG - # ----------------------------------------------------------------------------------------------------------------- - def get_tempo_config(self, key): - query = select(TempoConfig).where(TempoConfig.key == key) - data = self.session.scalars(query).one_or_none() - if data is not None: - data = json.loads(data.value) - self.session.close() - return data - - def set_tempo_config(self, key, value): - query = select(TempoConfig).where(TempoConfig.key == key) - config = self.session.scalars(query).one_or_none() - if config: - config.value = json.dumps(value) - else: - self.session.add(TempoConfig(key=key, value=json.dumps(value))) - self.session.flush() - self.session.close() - - # ----------------------------------------------------------------------------------------------------------------- - # ECOWATT - # ----------------------------------------------------------------------------------------------------------------- - def get_ecowatt(self, order="desc"): - if order == "desc": - order = Ecowatt.date.desc() - else: - order = Ecowatt.date.asc() - return self.session.scalars(select(Ecowatt).order_by(order)).all() - - def get_ecowatt_range(self, begin, end, order="desc"): - if order == "desc": - order = Ecowatt.date.desc() - else: - order = Ecowatt.date.asc() - return self.session.scalars( - select(Ecowatt).where(Ecowatt.date >= begin).where(Ecowatt.date <= end).order_by(order) - ).all() - - def set_ecowatt(self, date, value, message, detail): - date = datetime.combine(date, datetime.min.time()) - ecowatt = self.get_ecowatt_range(date, date) - if ecowatt: - for item in ecowatt: - item.value = value - item.message = message - item.detail = detail - else: - self.session.add(Ecowatt(date=date, value=value, message=message, detail=detail)) - self.session.flush() - return True - - # ---------------------------------------------------------------------------------------------------------------- - # STATISTIQUES - # ---------------------------------------------------------------------------------------------------------------- - def get_stat(self, usage_point_id, key): - return self.session.scalars( - select(Statistique) - .join(UsagePoints.relation_stats) - .where(Statistique.usage_point_id == usage_point_id) - .where(Statistique.key == key) - ).all() - - def set_stat(self, usage_point_id, key, value): - current_value = self.get_stat(usage_point_id, key) - if current_value: - for item in current_value: - item.value = value - else: - self.session.add(Statistique(usage_point_id=usage_point_id, key=key, value=value)) - self.session.flush() - return True - - def del_stat(self, usage_point_id): - self.session.execute(delete(Statistique).where(Statistique.usage_point_id == usage_point_id)) diff --git a/src/models/export_home_assistant.py b/src/models/export_home_assistant.py deleted file mode 100644 index b12dc654..00000000 --- a/src/models/export_home_assistant.py +++ /dev/null @@ -1,863 +0,0 @@ -"""This module contains the code for exporting data to Home Assistant.""" - -import json -import logging -from datetime import datetime, timedelta - -import pytz -from dateutil.relativedelta import relativedelta - -from dependencies import get_version, truncate -from init import CONFIG, DB, MQTT -from models.stat import Stat - -UTC = pytz.UTC - - -def convert_kw(value): - """Convert a value from kilowatts to watts. - - Args: - value (float): The value in kilowatts. - - Returns: - float: The value in watts. - """ - return truncate(value / 1000, 2) - - -def convert_kw_to_euro(value, price): - """Convert a value from kilowatts to euros. - - Args: - value (float): The value in kilowatts. - price (float): The price per kilowatt-hour. - - Returns: - float: The value in euros. - """ - if isinstance(price, str): - price = float(price.replace(",", ".")) - return round(value / 1000 * price, 1) - - -def convert_price(price): - """Convert a price from string to float. - - Args: - price (str): The price as a string. - - Returns: - float: The price as a float. - """ - if isinstance(price, str): - price = price.replace(",", ".") - return float(price) - - -class HomeAssistant: # pylint: disable=R0902 - """Represents a Home Assistant instance.""" - - class Config: # pylint: disable=R0902 - """Default configuration for Home Assistant.""" - - def __init__(self) -> None: - """Initialize the ExportHomeAssistant object. - - Attributes: - - consumption (bool): Flag indicating if consumption data is enabled. - - consumption_detail (bool): Flag indicating if detailed consumption data is enabled. - - production (bool): Flag indicating if production data is enabled. - - production_detail (bool): Flag indicating if detailed production data is enabled. - - consumption_price_base (float): The base consumption price. - - consumption_price_hp (float): The consumption price for high peak hours. - - consumption_price_hc (float): The consumption price for low peak hours. - - production_price (float): The production price. - - discovery_prefix (str): The prefix for Home Assistant discovery. - - activation_date (datetime): The date of the last activation. - - subscribed_power (str): The subscribed power value. - - consumption_max_power (bool): Flag indicating if maximum power consumption is enabled. - - offpeak_hours_0 (str): Off-peak hours for day 0 - Monday. - - offpeak_hours_1 (str): Off-peak hours for day 1 - Tuesday. - - offpeak_hours_2 (str): Off-peak hours for day 2 - Wednesday. - - offpeak_hours_3 (str): Off-peak hours for day 3 - Thursday. - - offpeak_hours_4 (str): Off-peak hours for day 4 - Friday. - - offpeak_hours_5 (str): Off-peak hours for day 5 - Saturday. - - offpeak_hours_6 (str): Off-peak hours for day 6 - Sunday. - """ - self.consumption: bool = True - self.consumption_detail: bool = True - self.production: bool = False - self.production_detail: bool = False - self.consumption_price_base: float = 0 - self.consumption_price_hp: float = 0 - self.consumption_price_hc: float = 0 - self.production_price: float = 0 - self.discovery_prefix: str = "home_assistant" - self.activation_date: datetime = None - self.subscribed_power: str = None - self.consumption_max_power: bool = True - self.offpeak_hours_0: str = None - self.offpeak_hours_1: str = None - self.offpeak_hours_2: str = None - self.offpeak_hours_3: str = None - self.offpeak_hours_4: str = None - self.offpeak_hours_5: str = None - self.offpeak_hours_6: str = None - - def __init__(self, usage_point_id): - self.usage_point_id = usage_point_id - self.date_format = "%Y-%m-%d" - self.date_format_detail = "%Y-%m-%d %H:%M:%S" - self.config_usage_point = DB.get_usage_point(self.usage_point_id) - self.config = None - self.load_config() - self.usage_point = DB.get_usage_point(self.usage_point_id) - self.mqtt = MQTT - self.tempo_color = None - - def load_config(self): - """Load the configuration for Home Assistant. - - This method loads the configuration values from the usage point and contract objects. - """ - self.config = self.Config() - for key in self.config.__dict__: - if hasattr(self.config_usage_point, key): - setattr(self.config, key, getattr(self.config_usage_point, key)) - - config_ha_config = CONFIG.home_assistant_config() - for key in self.config.__dict__: - if key in config_ha_config: - setattr(self.config, key, config_ha_config[key]) - - contract = DB.get_contract(self.usage_point_id) - for key in self.config.__dict__: - if hasattr(contract, key): - setattr(self.config, key, getattr(contract, key)) - - def export(self): - """Export data to Home Assistant. - - This method exports consumption, production, tempo, and ecowatt data to Home Assistant. - """ - if self.config.consumption or self.config.consumption_detail: - logging.info("Consommation :") - self.myelectricaldata_usage_point_id("consumption") - self.last_x_day(5, "consumption") - self.history_usage_point_id("consumption") - - if self.config.production or self.config.production_detail: - logging.info("Production :") - self.myelectricaldata_usage_point_id("production") - self.last_x_day(5, "production") - self.history_usage_point_id("production") - - self.tempo() - self.tempo_info() - self.tempo_days() - self.tempo_price() - self.ecowatt() - - def sensor(self, **kwargs): - """Publish sensor data to Home Assistant. - - This method publishes sensor data to Home Assistant using MQTT. - """ - logging.info( - f"- sensor.{kwargs['device_name'].lower().replace(' ', '_')}_{kwargs['name'].lower().replace(' ', '_')}" - ) - topic = f"{self.config.discovery_prefix}/sensor/{kwargs['topic']}" - if "device_class" not in kwargs: - device_class = None - else: - device_class = kwargs["device_class"] - config = { - "name": f"{kwargs['name']}", - "uniq_id": kwargs["uniq_id"], - "stat_t": f"{topic}/state", - "json_attr_t": f"{topic}/attributes", - "device_class": device_class, - "device": { - "identifiers": kwargs["device_identifiers"], - "name": kwargs["device_name"], - "model": kwargs["device_model"], - "manufacturer": "MyElectricalData", - }, - } - if "unit_of_measurement" in kwargs: - config["unit_of_measurement"] = kwargs["unit_of_measurement"] - if "numPDL" in kwargs: - config["numPDL"] = kwargs["numPDL"] - attributes_params = {} - if "attributes" in kwargs: - attributes_params = kwargs["attributes"] - attributes = { - **attributes_params, - **{ - "version": get_version(), - "activationDate": self.config.activation_date, - "lastUpdate": datetime.now(tz=UTC).strftime(self.date_format_detail), - "timeLastCall": datetime.now(tz=UTC).strftime(self.date_format_detail), - }, - } - - data = { - "config": json.dumps(config), - "state": kwargs["state"], - "attributes": json.dumps(attributes), - } - return self.mqtt.publish_multiple(data, topic) - - def last_x_day(self, days, measurement_direction): - """Get data for the last x days and publish it to Home Assistant. - - Args: - days (int): The number of days to retrieve data for. - measurement_direction (str): The direction of the measurement (e.g., consumption or production). - """ - uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_last{days}day" - end = datetime.combine(datetime.now(tz=UTC) - timedelta(days=1), datetime.max.time()) - begin = datetime.combine(end - timedelta(days), datetime.min.time()) - range = DB.get_detail_range(self.usage_point_id, begin, end, measurement_direction) - attributes = {"time": [], measurement_direction: []} - for data in range: - attributes["time"].append(data.date.strftime("%Y-%m-%d %H:%M:%S")) - attributes[measurement_direction].append(data.value) - self.sensor( - topic=f"myelectricaldata_{measurement_direction}_last_{days}_day/{self.usage_point_id}", - name=f"{measurement_direction}.last{days}day", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=uniq_id, - unit_of_measurement="kWh", - attributes=attributes, - state=days, - device_class="energy", - numPDL=self.usage_point_id, - ) - - def history_usage_point_id(self, measurement_direction): - """Retrieve the historical usage point ID and publishes it to Home Assistant. - - Args: - measurement_direction (str): The direction of the measurement (e.g., "consumption", "production"). - """ - uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_history" - stats = Stat(self.usage_point_id, measurement_direction) - state = DB.get_daily_last(self.usage_point_id, measurement_direction) - if state: - state = state.value - else: - state = 0 - state = convert_kw(state) - attributes = {"yesterdayDate": stats.daily(0)["begin"]} - self.sensor( - topic=f"myelectricaldata_{measurement_direction}_history/{self.usage_point_id}", - name=f"{measurement_direction}.history", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=uniq_id, - unit_of_measurement="kWh", - attributes=attributes, - state=state, - device_class="energy", - numPDL=self.usage_point_id, - ) - - def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR0912, PLR0915, C901 - """Retrieve the usage point ID and calculates various statistics related to energy consumption. - - Args: - measurement_direction (str): The direction of the measurement (e.g., "consumption", "production"). - - Returns: - dict: A dictionary containing various statistics related to energy consumption, such as daily, weekly, - monthly, and yearly values. - """ - stats = Stat(self.usage_point_id, measurement_direction) - state = DB.get_daily_last(self.usage_point_id, measurement_direction) - if state: - state = state.value - else: - state = 0 - - offpeak_hours_enedis = ( - f"Lundi ({self.config.offpeak_hours_0});" - f"Mardi ({self.config.offpeak_hours_1});" - f"Mercredi ({self.config.offpeak_hours_2});" - f"Jeudi ({self.config.offpeak_hours_3});" - f"Vendredi ({self.config.offpeak_hours_4});" - f"Samedi ({self.config.offpeak_hours_5});" - f"Dimanche ({self.config.offpeak_hours_6});" - ) - - offpeak_hours = [] - idx = 0 - while idx <= 6: - _offpeak_hours = [] - offpeak_hour = getattr(self.config, f"offpeak_hours_{idx}") - if not isinstance(offpeak_hour, str): - logging.error( - [ - f"offpeak_hours_{idx} n'est pas une chaine de caractères", - " Format si une seule période : 00H00-06H00", - " Format si plusieurs périodes : 00H00-06H00;12H00-14H00", - ] - ) - else: - for offpeak_hours_data in getattr(self.config, f"offpeak_hours_{idx}").split(";"): - if isinstance(offpeak_hours_data, str): - _offpeak_hours.append(offpeak_hours_data.split("-")) - - offpeak_hours.append(_offpeak_hours) - idx = idx + 1 - - yesterday = datetime.combine(datetime.now(tz=UTC) - relativedelta(days=1), datetime.max.time()) - previous_week = datetime.combine(yesterday - relativedelta(days=7), datetime.min.time()) - yesterday_last_year = yesterday - relativedelta(years=1) - - info = { - "yesterday": yesterday.strftime(self.date_format), - "previous_week": previous_week.strftime(self.date_format), - "yesterday_last_year": yesterday_last_year.strftime(self.date_format), - } - - # current_week - current_week = stats.current_week() - current_week_value = current_week["value"] - info["current_week"] = { - "begin": current_week["begin"], - "end": current_week["end"], - } - # last_week - last_week = stats.last_week() - last_week_value = last_week["value"] - info["last_week"] = {"begin": last_week["begin"], "end": last_week["end"]} - # current_week_last_year - current_week_last_year = stats.current_week_last_year() - current_week_last_year_value = current_week_last_year["value"] - info["current_week_last_year"] = { - "begin": current_week_last_year["begin"], - "end": current_week_last_year["end"], - } - # last_month - last_month = stats.last_month() - last_month_value = last_month["value"] - info["last_month"] = {"begin": last_month["begin"], "end": last_month["end"]} - # current_month - current_month = stats.current_month() - current_month_value = current_month["value"] - info["current_month"] = { - "begin": current_month["begin"], - "end": current_month["end"], - } - # current_month_last_year - current_month_last_year = stats.current_month_last_year() - current_month_last_year_value = current_month_last_year["value"] - info["current_month_last_year"] = { - "begin": current_month_last_year["begin"], - "end": current_month_last_year["end"], - } - # last_month_last_year - last_month_last_year = stats.last_month_last_year() - last_month_last_year_value = last_month_last_year["value"] - info["last_month_last_year"] = { - "begin": last_month_last_year["begin"], - "end": last_month_last_year["end"], - } - # current_year - current_year = stats.current_year() - current_year_value = current_year["value"] - info["current_year"] = { - "begin": current_year["begin"], - "end": current_year["end"], - } - # current_year_last_year - current_year_last_year = stats.current_year_last_year() - current_year_last_year_value = current_year_last_year["value"] - info["current_year_last_year"] = { - "begin": current_year_last_year["begin"], - "end": current_year_last_year["end"], - } - # last_year - last_year = stats.last_year() - last_year_value = last_year["value"] - info["last_year"] = {"begin": last_year["begin"], "end": last_year["end"]} - # yesterday_hc_hp - yesterday_hc_hp = stats.yesterday_hc_hp() - yesterday_hc_value = yesterday_hc_hp["value"]["hc"] - yesterday_hp_value = yesterday_hc_hp["value"]["hp"] - info["yesterday_hc_hp"] = { - "begin": yesterday_hc_hp["begin"], - "end": yesterday_hc_hp["end"], - } - - # evolution - peak_offpeak_percent = stats.peak_offpeak_percent() - current_week_evolution = stats.current_week_evolution() - current_month_evolution = stats.current_month_evolution() - yesterday_evolution = stats.yesterday_evolution() - monthly_evolution = stats.monthly_evolution() - yearly_evolution = stats.yearly_evolution() - yesterday_last_year = DB.get_daily_date( - self.usage_point_id, - datetime.combine(yesterday_last_year, datetime.min.time()), - ) - dailyweek_cost = [] - dailyweek_hp = [] - dailyweek_cost_hp = [] - dailyweek_hc = [] - dailyweek_cost_hc = [] - yesterday_hp_value_cost = 0 - if measurement_direction == "consumption": - daily_cost = 0 - plan = DB.get_usage_point_plan(self.usage_point_id).upper() - if plan in ("HC/HP", "HC/HP"): - for i in range(7): - hp = stats.detail(i, "HP")["value"] - hc = stats.detail(i, "HC")["value"] - dailyweek_hp.append(convert_kw(hp)) - dailyweek_hc.append(convert_kw(hc)) - cost_hp = convert_kw_to_euro(hp, self.config.consumption_price_hp) - cost_hc = convert_kw_to_euro(hc, self.config.consumption_price_hc) - dailyweek_cost_hp.append(cost_hp) - dailyweek_cost_hc.append(cost_hc) - value = cost_hp + cost_hc - if i == 0: - daily_cost = value - elif i == 1: - yesterday_hp_value_cost = convert_kw_to_euro(hp, self.config.consumption_price_hp) - dailyweek_cost.append(round(value, 1)) - elif plan == "TEMPO": - tempo_config = DB.get_tempo_config("price") - for i in range(7): - tempo_data = stats.tempo(i)["value"] - hp = tempo_data["blue_hp"] + tempo_data["white_hp"] + tempo_data["red_hp"] - hc = tempo_data["blue_hc"] + tempo_data["white_hc"] + tempo_data["red_hc"] - dailyweek_hp.append(convert_kw(hp)) - dailyweek_hc.append(convert_kw(hc)) - cost_hp = ( - convert_kw_to_euro( - tempo_data["blue_hp"], - convert_price(tempo_config["blue_hp"]), - ) - + convert_kw_to_euro( - tempo_data["white_hp"], - convert_price(tempo_config["white_hp"]), - ) - + convert_kw_to_euro(tempo_data["red_hp"], convert_price(tempo_config["red_hp"])) - ) - cost_hc = ( - convert_kw_to_euro( - tempo_data["blue_hc"], - convert_price(tempo_config["blue_hc"]), - ) - + convert_kw_to_euro( - tempo_data["white_hc"], - convert_price(tempo_config["white_hc"]), - ) - + convert_kw_to_euro(tempo_data["red_hc"], convert_price(tempo_config["red_hc"])) - ) - dailyweek_cost_hp.append(cost_hp) - dailyweek_cost_hc.append(cost_hc) - value = cost_hp + cost_hc - if i == 0: - daily_cost = value - elif i == 1: - yesterday_hp_value_cost = cost_hp - dailyweek_cost.append(round(value, 1)) - else: - for i in range(7): - hour_hp = stats.detail(i, "HP")["value"] - hour_hc = stats.detail(i, "HC")["value"] - dailyweek_hp.append(convert_kw(hour_hp)) - dailyweek_hc.append(convert_kw(hour_hc)) - dailyweek_cost_hp.append(convert_kw_to_euro(hour_hp, self.config.consumption_price_base)) - dailyweek_cost_hc.append(convert_kw_to_euro(hour_hc, self.config.consumption_price_base)) - dailyweek_cost.append( - convert_kw_to_euro(stats.daily(i)["value"], self.config.consumption_price_base) - ) - if i == 0: - daily_cost = convert_kw_to_euro(stats.daily(0)["value"], self.config.consumption_price_base) - elif i == 1: - yesterday_hp_value_cost = convert_kw_to_euro(hour_hp, self.config.consumption_price_base) - else: - daily_cost = convert_kw_to_euro(stats.daily(0)["value"], self.config.production_price) - for i in range(7): - dailyweek_cost.append(convert_kw_to_euro(stats.daily(i)["value"], self.config.production_price)) - - if not dailyweek_hp: - dailyweek_hp = [0, 0, 0, 0, 0, 0, 0, 0] - if not dailyweek_cost_hp: - dailyweek_cost_hp = [0, 0, 0, 0, 0, 0, 0, 0] - if not dailyweek_hc: - dailyweek_hc = [0, 0, 0, 0, 0, 0, 0, 0] - if not dailyweek_cost_hc: - dailyweek_cost_hc = [0, 0, 0, 0, 0, 0, 0, 0] - - yesterday_consumption_max_power = 0 - if self.config.consumption_max_power: - yesterday_consumption_max_power = stats.max_power(0)["value"] - - error_last_call = DB.get_error_log(self.usage_point_id) - if error_last_call is None: - error_last_call = "" - - attributes = { - "yesterdayDate": stats.daily(0)["begin"], - "yesterday": convert_kw(stats.daily(0)["value"]), - "serviceEnedis": "myElectricalData", - "yesterdayLastYearDate": (datetime.now(tz=UTC) - relativedelta(years=1)).strftime(self.date_format), - "yesterdayLastYear": convert_kw(yesterday_last_year.value) if hasattr(yesterday_last_year, "value") else 0, - "daily": [ - convert_kw(stats.daily(0)["value"]), - convert_kw(stats.daily(1)["value"]), - convert_kw(stats.daily(2)["value"]), - convert_kw(stats.daily(3)["value"]), - convert_kw(stats.daily(4)["value"]), - convert_kw(stats.daily(5)["value"]), - convert_kw(stats.daily(6)["value"]), - ], - "current_week": convert_kw(current_week_value), - "last_week": convert_kw(last_week_value), - "day_1": convert_kw(stats.daily(0)["value"]), - "day_2": convert_kw(stats.daily(1)["value"]), - "day_3": convert_kw(stats.daily(2)["value"]), - "day_4": convert_kw(stats.daily(3)["value"]), - "day_5": convert_kw(stats.daily(4)["value"]), - "day_6": convert_kw(stats.daily(5)["value"]), - "day_7": convert_kw(stats.daily(6)["value"]), - "current_week_last_year": convert_kw(current_week_last_year_value), - "last_month": convert_kw(last_month_value), - "current_month": convert_kw(current_month_value), - "current_month_last_year": convert_kw(current_month_last_year_value), - "last_month_last_year": convert_kw(last_month_last_year_value), - "last_year": convert_kw(last_year_value), - "current_year": convert_kw(current_year_value), - "current_year_last_year": convert_kw(current_year_last_year_value), - "dailyweek": [ - stats.daily(0)["begin"], - stats.daily(1)["begin"], - stats.daily(2)["begin"], - stats.daily(3)["begin"], - stats.daily(4)["begin"], - stats.daily(5)["begin"], - stats.daily(6)["begin"], - ], - "dailyweek_cost": dailyweek_cost, - # TODO : If current_day = 0, dailyweek_hp & dailyweek_hc just next day... - "dailyweek_costHP": dailyweek_cost_hp, - "dailyweek_HP": dailyweek_hp, - "dailyweek_costHC": dailyweek_cost_hc, - "dailyweek_HC": dailyweek_hc, - "daily_cost": daily_cost, - "yesterday_HP_cost": yesterday_hp_value_cost, - "yesterday_HP": convert_kw(yesterday_hp_value), - "day_1_HP": stats.detail(0, "HP")["value"], - "day_2_HP": stats.detail(1, "HP")["value"], - "day_3_HP": stats.detail(2, "HP")["value"], - "day_4_HP": stats.detail(3, "HP")["value"], - "day_5_HP": stats.detail(4, "HP")["value"], - "day_6_HP": stats.detail(5, "HP")["value"], - "day_7_HP": stats.detail(6, "HP")["value"], - "yesterday_HC_cost": convert_kw_to_euro(yesterday_hc_value, self.config.consumption_price_hc), - "yesterday_HC": convert_kw(yesterday_hc_value), - "day_1_HC": stats.detail(0, "HC")["value"], - "day_2_HC": stats.detail(1, "HC")["value"], - "day_3_HC": stats.detail(2, "HC")["value"], - "day_4_HC": stats.detail(3, "HC")["value"], - "day_5_HC": stats.detail(4, "HC")["value"], - "day_6_HC": stats.detail(5, "HC")["value"], - "day_7_HC": stats.detail(6, "HC")["value"], - "peak_offpeak_percent": round(peak_offpeak_percent, 2), - "yesterdayConsumptionMaxPower": yesterday_consumption_max_power, - "dailyweek_MP": [ - convert_kw(stats.max_power(0)["value"]), - convert_kw(stats.max_power(1)["value"]), - convert_kw(stats.max_power(2)["value"]), - convert_kw(stats.max_power(3)["value"]), - convert_kw(stats.max_power(4)["value"]), - convert_kw(stats.max_power(5)["value"]), - convert_kw(stats.max_power(6)["value"]), - ], - "dailyweek_MP_time": [ - (stats.max_power_time(0)["value"]), - (stats.max_power_time(1)["value"]), - (stats.max_power_time(2)["value"]), - (stats.max_power_time(3)["value"]), - (stats.max_power_time(4)["value"]), - (stats.max_power_time(5)["value"]), - (stats.max_power_time(6)["value"]), - ], - "dailyweek_MP_over": [ - stats.max_power_over(0)["value"], - stats.max_power_over(1)["value"], - stats.max_power_over(2)["value"], - stats.max_power_over(3)["value"], - stats.max_power_over(4)["value"], - stats.max_power_over(5)["value"], - stats.max_power_over(6)["value"], - ], - "dailyweek_Tempo": [ - stats.tempo_color(0)["value"], - stats.tempo_color(1)["value"], - stats.tempo_color(2)["value"], - stats.tempo_color(3)["value"], - stats.tempo_color(4)["value"], - stats.tempo_color(5)["value"], - stats.tempo_color(6)["value"], - ], - "monthly_evolution": round(monthly_evolution, 2), - "current_week_evolution": round(current_week_evolution, 2), - "current_month_evolution": round(current_month_evolution, 2), - "yesterday_evolution": round(yesterday_evolution, 2), - "yearly_evolution": round(yearly_evolution, 2), - "friendly_name": f"myelectricaldata.{self.usage_point_id}", - "errorLastCall": error_last_call, - "errorLastCallInterne": "", - "current_week_number": yesterday.strftime("%V"), - "offpeak_hours_enedis": offpeak_hours_enedis, - "offpeak_hours": offpeak_hours, - "subscribed_power": self.config.subscribed_power, - # "info": info - } - - uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}" - self.sensor( - topic=f"myelectricaldata_{measurement_direction}/{self.usage_point_id}", - name=f"{measurement_direction}", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=uniq_id, - unit_of_measurement="kWh", - attributes=attributes, - state=convert_kw(state), - device_class="energy", - numPDL=self.usage_point_id, - ) - - def tempo(self): - """Add a sensor to Home Assistant with the tempo data for today and tomorrow. - - Returns: - None - - """ - uniq_id = "myelectricaldata_tempo_today" - begin = datetime.combine(datetime.now(tz=UTC), datetime.min.time()) - end = datetime.combine(datetime.now(tz=UTC), datetime.max.time()) - tempo_data = DB.get_tempo_range(begin, end, "asc") - if tempo_data: - date = tempo_data[0].date.strftime(self.date_format_detail) - state = tempo_data[0].color - else: - date = begin.strftime(self.date_format_detail) - state = "Inconnu" - attributes = {"date": date} - self.tempo_color = state - self.sensor( - topic="myelectricaldata_rte/tempo_today", - name="Today", - device_name="RTE Tempo", - device_model="RTE", - device_identifiers="rte_tempo", - uniq_id=uniq_id, - attributes=attributes, - state=state, - ) - - uniq_id = "myelectricaldata_tempo_tomorrow" - begin = begin + timedelta(days=1) - end = end + timedelta(days=1) - tempo_data = DB.get_tempo_range(begin, end, "asc") - if tempo_data: - date = tempo_data[0].date.strftime(self.date_format_detail) - state = tempo_data[0].color - else: - date = begin.strftime(self.date_format_detail) - state = "Inconnu" - attributes = {"date": date} - self.sensor( - topic="myelectricaldata_rte/tempo_tomorrow", - name="Tomorrow", - device_name="RTE Tempo", - device_model="RTE", - device_identifiers="rte_tempo", - uniq_id=uniq_id, - attributes=attributes, - state=state, - ) - - def tempo_days(self): - """Add tempo days sensors to Home Assistant. - - This method retrieves tempo days configuration from the database - and creates sensors for each color and corresponding number of days. - - Returns: - None - """ - tempo_days = DB.get_tempo_config("days") - for color, days in tempo_days.items(): - self.tempo_days_sensor(f"{color}", days) - - def tempo_days_sensor(self, color, days): - """Add a sensor to Home Assistant with the given name and state. - - Args: - color (str): The color of the tempo (e.g. blue, white, red). - days (int): The number of days in the tempo. - - Returns: - None - - """ - uniq_id = f"myelectricaldata_tempo_days_{color}" - self.sensor( - topic=f"myelectricaldata_edf/tempo_days_{color}", - name=f"Days {color.capitalize()}", - device_name="EDF Tempo", - device_model="EDF", - device_identifiers="edf_tempo", - uniq_id=uniq_id, - state=days, - ) - - def tempo_info(self): - """Add tempo information sensor to Home Assistant. - - This method retrieves tempo configuration from the database - and creates a sensor with information about tempo days and prices. - - Returns: - None - """ - uniq_id = "myelectricaldata_tempo_info" - tempo_days = DB.get_tempo_config("days") - tempo_price = DB.get_tempo_config("price") - if 22 > int(datetime.now(tz=UTC).strftime("%H")) < 6: - measure_type = "hc" - else: - measure_type = "hp" - current_price = None - if self.tempo_color.lower() in ["blue", "white", "red"]: - current_price = convert_price(tempo_price[f"{self.tempo_color.lower()}_{measure_type}"].replace(",", ".")) - attributes = { - "days_blue": f'{tempo_days["blue"]} / 300', - "days_white": f'{tempo_days["white"]} / 43', - "days_red": f'{tempo_days["red"]} / 22', - "price_blue_hp": convert_price(tempo_price["blue_hp"]), - "price_blue_hc": convert_price(tempo_price["blue_hc"]), - "price_white_hp": convert_price(tempo_price["white_hp"]), - "price_white_hc": convert_price(tempo_price["white_hc"]), - "price_red_hp": convert_price(tempo_price["red_hp"]), - "price_red_hc": convert_price(tempo_price["red_hc"]), - } - self.sensor( - topic="myelectricaldata_edf/tempo_info", - name="Info", - device_name="EDF Tempo", - device_model="EDF", - device_identifiers="edf_tempo", - uniq_id=uniq_id, - attributes=attributes, - state=current_price, - unit_of_measurement="EUR/kWh", - ) - - def tempo_price(self): - """Add tempo price sensors to Home Assistant. - - This method retrieves tempo price configuration from the database - and creates sensors for each color with corresponding price. - - Returns: - None - """ - tempo_price = DB.get_tempo_config("price") - for color, price in tempo_price.items(): - self.tempo_price_sensor( - f"{color}", - float(price.replace(",", ".")), - f"{color.split('_')[0].capitalize()}{color.split('_')[1].capitalize()}", - ) - - def tempo_price_sensor(self, color, price, name): - """Add tempo price sensor to Home Assistant. - - This method creates a sensor for a specific tempo color with the corresponding price. - - Args: - color (str): The color of the tempo. - price (float): The price of the tempo. - name (str): The name of the tempo. - - Returns: - None - """ - uniq_id = f"myelectricaldata_tempo_price_{color}" - name = f"{name[0:-2]} {name[-2:]}" - self.sensor( - topic=f"myelectricaldata_edf/tempo_price_{color}", - name=f"Price {name}", - device_name="EDF Tempo", - device_model="EDF", - device_identifiers="edf_tempo", - uniq_id=uniq_id, - state=convert_price(price), - unit_of_measurement="EUR/kWh", - ) - - def ecowatt(self): - """Calculate the ecowatt sensor values for different delta values. - - This method calculates the ecowatt sensor values for different delta values (0, 1, and 2). - It calls the `ecowatt_delta` method with the corresponding delta values. - - Returns: - None - """ - self.ecowatt_delta("J0", 0) - self.ecowatt_delta("J1", 1) - self.ecowatt_delta("J2", 2) - - def ecowatt_delta(self, name, delta): - """Calculate the delta value for the ecowatt sensor. - - Args: - name (str): The name of the ecowatt sensor. - delta (int): The number of days to calculate the delta. - - Returns: - None - """ - uniq_id = f"myelectricaldata_ecowatt_{name}" - current_date = datetime.combine(datetime.now(tz=UTC), datetime.min.time()) + timedelta(days=delta) - fetch_date = current_date - timedelta(days=1) - ecowatt_data = DB.get_ecowatt_range(fetch_date, fetch_date, "asc") - day_value = 0 - if ecowatt_data: - forecast = {} - for data in ecowatt_data: - day_value = data.value - for date, value in json.loads(data.detail.replace("'", '"')).items(): - date = datetime.strptime(date, self.date_format_detail) - forecast[f'{date.strftime("%H")} h'] = value - attributes = { - "date": current_date.strftime(self.date_format), - "forecast": forecast, - } - self.sensor( - topic=f"myelectricaldata_rte/ecowatt_{name}", - name=f"{name}", - device_name="RTE EcoWatt", - device_model="RTE", - device_identifiers="rte_ecowatt", - uniq_id=uniq_id, - attributes=attributes, - state=day_value, - ) diff --git a/src/models/export_home_assistant_ws.py b/src/models/export_home_assistant_ws.py deleted file mode 100644 index dc82ad9a..00000000 --- a/src/models/export_home_assistant_ws.py +++ /dev/null @@ -1,561 +0,0 @@ -"""Import data in statistique recorder of Home Assistant.""" - -import json -import logging -import ssl -import traceback -from datetime import datetime, timedelta - -import pytz -import websocket - -from dependencies import chunks_list, is_integer, str2bool, truncate -from init import CONFIG, DB -from models.export_home_assistant import HomeAssistant -from models.stat import Stat - -TZ_PARIS = pytz.timezone("Europe/Paris") - - -class HomeAssistantWs: - """Class to interact with Home Assistant WebSocket API.""" - - def __init__(self, usage_point_id): - """Initialize the class with the usage point id. - - Args: - usage_point_id (str): The usage point id - """ - self.websocket = None - self.usage_point_id = usage_point_id - self.usage_point_id_config = DB.get_usage_point(self.usage_point_id) - self.config = None - self.url = None - self.ssl = None - self.token = None - self.id = 1 - self.purge = False - self.purge_force = True - self.batch_size = 1000 - self.current_stats = [] - if self.load_config(): - if self.connect(): - self.mqtt = CONFIG.mqtt_config() - # self.mqtt = False - self.import_data() - else: - logging.critical("La configuration Home Assistant WebSocket est erronée") - if self.websocket.connected: - self.websocket.close() - - def load_config(self): - """Load the Home Assistant WebSocket configuration from the configuration file. - - Returns: - bool: True if the configuration is loaded, False otherwise - """ - self.config = CONFIG.home_assistant_ws_config() - if self.config is not None: - if "url" in self.config: - self.url = self.config["url"] - if self.config.get("ssl"): - url_prefix = "wss" - else: - url_prefix = "ws" - self.url = f"{url_prefix}://{self.url}/api/websocket" - else: - logging.critical("L'url du WebSocket Home Assistant est obligatoire") - return False - if "token" in self.config: - self.token = self.config["token"] - else: - logging.critical("Le token du WebSocket Home Assistant est obligatoire") - return False - if "purge" in self.config: - self.purge = str2bool(self.config["purge"]) - if "batch_size" in self.config: - if not is_integer(self.config["batch_size"]): - logging.error("Le paramètre batch_size du WebSocket Home Assistant doit être un entier") - else: - self.batch_size = int(self.config["batch_size"]) - return True - - def connect(self): - """Connect to the Home Assistant WebSocket server. - - Returns: - bool: True if the connection is successful, False otherwise - """ - try: - check_ssl = CONFIG.get("ssl") - sslopt = None - if check_ssl and "gateway" in check_ssl: - sslopt = {"cert_reqs": ssl.CERT_NONE} - self.websocket = websocket.WebSocket(sslopt=sslopt) - logging.info("Connexion au WebSocket Home Assistant %s", self.url) - self.websocket.connect( - self.url, - timeout=5, - ) - output = json.loads(self.websocket.recv()) - if "type" in output and output["type"] == "auth_required": - logging.info("Authentification requise") - return self.authentificate() - return True - except Exception as _e: - self.websocket.close() - logging.error(_e) - logging.critical("Connexion impossible vers Home Assistant") - logging.warning( - " => ATTENTION, le WebSocket est également soumis au ban en cas de plusieurs échec d'authentification." - ) - logging.warning(" => ex: 403: Forbidden") - - def authentificate(self): - """Authenticate with the Home Assistant WebSocket server. - - Returns: - bool: True if the authentication is successful, False otherwise - """ - data = {"type": "auth", "access_token": self.token} - auth_output = self.send(data) - if auth_output["type"] == "auth_ok": - logging.info(" => OK") - return True - logging.error(" => Authentification impossible, merci de vérifier votre url & token.") - return False - - def send(self, data): - """Send data to the Home Assistant WebSocket server. - - Args: - data (dict): The data to send - Returns: - dict: The output from the server - """ - self.websocket.send(json.dumps(data)) - self.id = self.id + 1 - output = json.loads(self.websocket.recv()) - if "type" in output and output["type"] == "result": - if not output["success"]: - logging.error(f"Erreur d'envoi : {data}") - logging.error(output) - return output - - def list_data(self): - """List the data already cached in Home Assistant. - - Returns: - dict: The list of data - """ - logging.info("Liste les données déjà en cache.") - import_statistics = { - "id": self.id, - "type": "recorder/list_statistic_ids", - "statistic_type": "sum", - } - current_stats = self.send(import_statistics) - for stats in current_stats["result"]: - if stats["statistic_id"].startswith("myelectricaldata:"): - self.current_stats.append(stats["statistic_id"]) - return current_stats - - def clear_data(self, statistic_ids): - """Clear the data imported into Energy. - - Args: - statistic_ids (list): The list of statistic ids - Returns: - dict: The output from clearing the data - """ - logging.info("Effacement des données importées dans Energy.") - for key in statistic_ids: - logging.info(f" - {key}") - clear_statistics = { - "id": self.id, - "type": "recorder/clear_statistics", - "statistic_ids": statistic_ids, - } - for data in self.current_stats: - logging.info(f" - {data}") - clear_stat = self.send(clear_statistics) - return clear_stat - - def get_data(self, statistic_ids, begin, end): - """Get the data for a given period. - - Args: - statistic_ids (list): The list of statistic ids - begin (datetime): The start of the period - end (datetime): The end of the period - Returns: - dict: The data for the period - """ - statistics_during_period = { - "id": self.id, - "type": "recorder/statistics_during_period", - "start_time": begin.isoformat(), - "end_time": end.isoformat(), - "statistic_ids": [statistic_ids], - "period": "hour", - } - stat_period = self.send(statistics_during_period) - return stat_period - - def import_data(self): # noqa: C901 - """Import the data for the usage point into Home Assistant.""" - logging.info(f"Importation des données du point de livraison : {self.usage_point_id}") - try: - plan = DB.get_usage_point_plan(self.usage_point_id).upper() - if self.usage_point_id_config.consumption_detail: - logging.info("Consommation") - measurement_direction = "consumption" - if "max_date" in self.config: - logging.warning("Max date détectée %s", self.config["max_date"]) - begin = datetime.strptime(self.config["max_date"], "%Y-%m-%d") - detail = DB.get_detail_all(begin=begin, usage_point_id=self.usage_point_id, order_dir="desc") - else: - detail = DB.get_detail_all(usage_point_id=self.usage_point_id, order_dir="desc") - - cost = 0 - last_year = None - last_month = None - - stats_kwh = {} - stats_euro = {} - - db_tempo_price = DB.get_tempo_config("price") - tempo_color_ref = {} - for tempo_data in DB.get_tempo(): - tempo_color_ref[tempo_data.date] = tempo_data.color - - stats = Stat(usage_point_id=self.usage_point_id, measurement_direction="consumption") - - for data in detail: - year = int(f'{data.date.strftime("%Y")}') - if last_year is None or year != last_year: - logging.info(f"{year} :") - month = int(f'{data.date.strftime("%m")}') - if last_month is None or month != last_month: - logging.info(f"- {month}") - last_year = year - last_month = month - hour_minute = int(f'{data.date.strftime("%H")}{data.date.strftime("%M")}') - name = f"MyElectricalData - {self.usage_point_id}" - statistic_id = f"myelectricaldata:{self.usage_point_id}" - value = data.value / (60 / data.interval) - if plan == "BASE": - name = f"{name} {plan} {measurement_direction}" - statistic_id = f"{statistic_id}_{plan.lower()}_{measurement_direction}" - cost = value * self.usage_point_id_config.consumption_price_base / 1000 - tag = "base" - elif plan == "HC/HP": - measure_type = stats.get_mesure_type(data.date) - if measure_type == "HC": - name = f"{name} HC {measurement_direction}" - statistic_id = f"{statistic_id}_hc_{measurement_direction}" - cost = value * self.usage_point_id_config.consumption_price_hc / 1000 - tag = "hc" - else: - name = f"{name} HP {measurement_direction}" - statistic_id = f"{statistic_id}_hp_{measurement_direction}" - cost = value * self.usage_point_id_config.consumption_price_hp / 1000 - tag = "hp" - elif plan == "TEMPO": - if 600 <= hour_minute < 2200: - hour_type = "HP" - else: - hour_type = "HC" - if 600 <= hour_minute <= 2330: - date = datetime.combine(data.date, datetime.min.time()) - else: - date = datetime.combine(data.date - timedelta(days=1), datetime.min.time()) - - if date not in tempo_color_ref: - logging.error(f"Import impossible, pas de donnée tempo sur la date du {data.date}") - else: - day_color = tempo_color_ref[date] - tempo_color = f"{day_color}{hour_type}" - tempo_color_price_key = f"{day_color.lower()}_{hour_type.lower()}" - tempo_price = float(db_tempo_price[tempo_color_price_key]) - cost = value / 1000 * tempo_price - name = f"{name} {tempo_color} {measurement_direction}" - statistic_id = f"{statistic_id}_{tempo_color.lower()}_{measurement_direction}" - tag = tempo_color.lower() - else: - logging.error(f"Plan {plan} inconnu.") - - date = TZ_PARIS.localize(data.date, "%Y-%m-%d %H:%M:%S").replace(minute=0, second=0, microsecond=0) - key = date.strftime("%Y-%m-%d %H:%M:%S") - - # KWH - if statistic_id not in stats_kwh: - stats_kwh[statistic_id] = {"name": name, "sum": 0, "data": {}} - if key not in stats_kwh[statistic_id]["data"]: - stats_kwh[statistic_id]["data"][key] = { - "start": date.isoformat(), - "state": 0, - "sum": 0, - } - value = value / 1000 - stats_kwh[statistic_id]["data"][key]["state"] = ( - stats_kwh[statistic_id]["data"][key]["state"] + value - ) - stats_kwh[statistic_id]["tag"] = tag - stats_kwh[statistic_id]["sum"] += value - stats_kwh[statistic_id]["data"][key]["sum"] = stats_kwh[statistic_id]["sum"] - - # EURO - statistic_id = f"{statistic_id}_cost" - if statistic_id not in stats_euro: - stats_euro[statistic_id] = { - "name": f"{name} Cost", - "sum": 0, - "data": {}, - } - if key not in stats_euro[statistic_id]["data"]: - stats_euro[statistic_id]["data"][key] = { - "start": date.isoformat(), - "state": 0, - "sum": 0, - } - stats_euro[statistic_id]["tag"] = tag - stats_euro[statistic_id]["data"][key]["state"] += cost - stats_euro[statistic_id]["sum"] += cost - stats_euro[statistic_id]["data"][key]["sum"] = stats_euro[statistic_id]["sum"] - - # CLEAN OLD DATA - if self.purge or self.purge_force: - logging.info(f"Clean old data import In Home Assistant Recorder {self.usage_point_id}") - list_statistic_ids = [] - for statistic_id, _ in stats_kwh.items(): - list_statistic_ids.append(statistic_id) - self.clear_data(list_statistic_ids) - CONFIG.set("purge", False) - - - for statistic_id, data in stats_kwh.items(): - metadata = { - "has_mean": False, - "has_sum": True, - "name": data["name"], - "source": "myelectricaldata", - "statistic_id": statistic_id, - "unit_of_measurement": "kWh", - } - - - chunks = list(chunks_list(list(data["data"].values()), self.batch_size)) - chunks_len = len(chunks) - for i, chunk in enumerate(chunks): - logging.info("Envoi des données de conso %s vers Home Assistant %s/%s (%s => %s)", - data["tag"].upper(), - i+1, - chunks_len, - chunk[-1]["start"], - chunk[0]["start"] - ) - self.send({ - "id": self.id, - "type": "recorder/import_statistics", - "metadata": metadata, - "stats": chunk, - }) - - if self.mqtt and "enable" in self.mqtt and str2bool(self.mqtt["enable"]): - HomeAssistant(self.usage_point_id).sensor( - topic=f"myelectricaldata_{data["tag"]}_{measurement_direction}/{self.usage_point_id}_energy", - name=f"{data["tag"]} {measurement_direction}", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=statistic_id, - unit_of_measurement="kWh", - state=truncate(data["sum"]), - device_class="energy", - numPDL=self.usage_point_id, - ) - - for statistic_id, data in stats_euro.items(): - metadata = { - "has_mean": False, - "has_sum": True, - "name": data["name"], - "source": "myelectricaldata", - "statistic_id": statistic_id, - "unit_of_measurement": "EURO", - } - chunks = list(chunks_list(list(data["data"].values()), self.batch_size)) - chunks_len = len(chunks) - for i, chunk in enumerate(chunks): - logging.info("Envoi des données de coût %s vers Home Assistant %s/%s (%s => %s)", - data["tag"].upper(), - i+1, - chunks_len, - chunk[0]["start"], - chunk[-1]["start"] - ) - self.send({ - "id": self.id, - "type": "recorder/import_statistics", - "metadata": metadata, - "stats": list(chunk), - }) - if self.mqtt and "enable" in self.mqtt and str2bool(self.mqtt["enable"]): - HomeAssistant(self.usage_point_id).sensor( - topic=f"myelectricaldata_{data["tag"]}_{measurement_direction}/{self.usage_point_id}_cost", - name=f"{data["tag"]} {measurement_direction} cost", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=statistic_id, - unit_of_measurement="EURO", - state=truncate(data["sum"]), - device_class="monetary", - numPDL=self.usage_point_id, - ) - - if self.usage_point_id_config.production_detail: - logging.info("Production") - measurement_direction = "production" - if "max_date" in self.config: - logging.warning("Max date détectée %s", self.config["max_date"]) - begin = datetime.strptime(self.config["max_date"], "%Y-%m-%d") - detail = DB.get_detail_all( - begin=begin, - usage_point_id=self.usage_point_id, - measurement_direction="production", - order_dir="desc", - ) - else: - detail = DB.get_detail_all( - usage_point_id=self.usage_point_id, measurement_direction="production", order_dir="desc" - ) - - cost = 0 - last_year = None - last_month = None - - stats_kwh = {} - stats_euro = {} - for data in detail: - year = int(f'{data.date.strftime("%Y")}') - if last_year is None or year != last_year: - logging.info(f"{year} :") - month = int(f'{data.date.strftime("%m")}') - if last_month is None or month != last_month: - logging.info(f"- {month}") - last_year = year - last_month = month - hour_minute = int(f'{data.date.strftime("%H")}{data.date.strftime("%M")}') - name = f"MyElectricalData - {self.usage_point_id} {measurement_direction}" - statistic_id = f"myelectricaldata:{self.usage_point_id}_{measurement_direction}" - value = data.value / (60 / data.interval) - cost = value * self.usage_point_id_config.production_price / 1000 - date = TZ_PARIS.localize(data.date, "%Y-%m-%d %H:%M:%S").replace(minute=0, second=0, microsecond=0) - key = date.strftime("%Y-%m-%d %H:%M:%S") - - # KWH - if statistic_id not in stats_kwh: - stats_kwh[statistic_id] = {"name": name, "sum": 0, "data": {}} - if key not in stats_kwh[statistic_id]["data"]: - stats_kwh[statistic_id]["data"][key] = { - "start": date.isoformat(), - "state": 0, - "sum": 0, - } - value = value / 1000 - stats_kwh[statistic_id]["data"][key]["state"] = ( - stats_kwh[statistic_id]["data"][key]["state"] + value - ) - stats_kwh[statistic_id]["sum"] += value - stats_kwh[statistic_id]["data"][key]["sum"] = stats_kwh[statistic_id]["sum"] - - # EURO - statistic_id = f"{statistic_id}_revenue" - if statistic_id not in stats_euro: - stats_euro[statistic_id] = { - "name": f"{name} Revenue", - "sum": 0, - "data": {}, - } - if key not in stats_euro[statistic_id]["data"]: - stats_euro[statistic_id]["data"][key] = { - "start": date.isoformat(), - "state": 0, - "sum": 0, - } - stats_euro[statistic_id]["data"][key]["state"] += cost - stats_euro[statistic_id]["sum"] += cost - stats_euro[statistic_id]["data"][key]["sum"] = stats_euro[statistic_id]["sum"] - - if self.purge or self.purge_force: - list_statistic_ids = [] - for statistic_id, _ in stats_kwh.items(): - list_statistic_ids.append(statistic_id) - self.clear_data(list_statistic_ids) - CONFIG.set("purge", False) - - for statistic_id, data in stats_kwh.items(): - metadata = { - "has_mean": False, - "has_sum": True, - "name": data["name"], - "source": "myelectricaldata", - "statistic_id": statistic_id, - "unit_of_measurement": "kWh", - } - import_statistics = { - "id": self.id, - "type": "recorder/import_statistics", - "metadata": metadata, - "stats": list(data["data"].values()), - } - self.send(import_statistics) - if self.mqtt and "enable" in self.mqtt and str2bool(self.mqtt["enable"]): - HomeAssistant(self.usage_point_id).sensor( - topic=f"myelectricaldata_{measurement_direction}/{self.usage_point_id}_energy", - name=f"{measurement_direction} energy", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=statistic_id, - unit_of_measurement="kWh", - state=truncate(data["sum"]), - device_class="energy", - numPDL=self.usage_point_id, - ) - for statistic_id, data in stats_euro.items(): - metadata = { - "has_mean": False, - "has_sum": True, - "name": data["name"], - "source": "myelectricaldata", - "statistic_id": statistic_id, - "unit_of_measurement": "EURO", - } - import_statistics = { - "id": self.id, - "type": "recorder/import_statistics", - "metadata": metadata, - "stats": list(data["data"].values()), - } - self.send(import_statistics) - if self.mqtt and "enable" in self.mqtt and str2bool(self.mqtt["enable"]): - HomeAssistant(self.usage_point_id).sensor( - topic=f"myelectricaldata_{measurement_direction}/{self.usage_point_id}_cost", - name=f"{measurement_direction} cost", - device_name=f"Linky {self.usage_point_id}", - device_model=f"linky {self.usage_point_id}", - device_identifiers=f"{self.usage_point_id}", - uniq_id=statistic_id, - unit_of_measurement="EURO", - state=truncate(data["sum"]), - device_class="monetary", - numPDL=self.usage_point_id, - ) - except Exception as _e: - self.websocket.close() - traceback.print_exc() - logging.error(_e) - logging.critical("Erreur lors de l'export des données vers Home Assistant") diff --git a/src/models/export_influxdb.py b/src/models/export_influxdb.py deleted file mode 100755 index c6e0115b..00000000 --- a/src/models/export_influxdb.py +++ /dev/null @@ -1,203 +0,0 @@ -import ast -import logging -from datetime import datetime - -import pytz - -from dependencies import title -from init import DB, INFLUXDB -from models.stat import Stat - - -def forceRound(x, n): - import decimal - - d = decimal.Decimal(repr(x)) - targetdigit = decimal.Decimal("1e%d" % -n) - chopped = d.quantize(targetdigit, decimal.ROUND_DOWN) - return float(chopped) - - -class ExportInfluxDB: - def __init__(self, influxdb_config, usage_point_config, measurement_direction="consumption"): - self.influxdb_config = influxdb_config - self.db = DB - self.usage_point_config = usage_point_config - self.usage_point_id = self.usage_point_config.usage_point_id - self.measurement_direction = measurement_direction - self.stat = Stat(self.usage_point_id, measurement_direction=measurement_direction) - self.time_format = "%Y-%m-%dT%H:%M:%SZ" - if "timezone" not in self.influxdb_config or self.influxdb_config["timezone"] == "UTC": - self.tz = pytz.UTC - else: - self.tz = pytz.timezone(self.influxdb_config["timezone"]) - - def daily(self, measurement_direction="consumption"): - current_month = "" - if measurement_direction == "consumption": - price = self.usage_point_config.consumption_price_base - else: - price = self.usage_point_config.production_price - logging.info(f'Envoi des données "{measurement_direction.upper()}" dans influxdb') - get_daily_all = self.db.get_daily_all(self.usage_point_id) - get_daily_all_count = len(get_daily_all) - last_data = self.db.get_daily_last_date(self.usage_point_id, measurement_direction) - first_data = self.db.get_daily_first_date(self.usage_point_id, measurement_direction) - if last_data and first_data: - start = datetime.strftime(last_data, self.time_format) - end = datetime.strftime(first_data, self.time_format) - influxdb_data = INFLUXDB.count(start, end, measurement_direction) - count = 1 - for data in influxdb_data: - for record in data.records: - count += record.get_value() - if get_daily_all_count != count: - logging.info(f" Cache : {get_daily_all_count} / InfluxDb : {count}") - for daily in get_daily_all: - date = daily.date - # start = datetime.strftime(date, "%Y-%m-%dT00:00:00Z") - # end = datetime.strftime(date, "%Y-%m-%dT23:59:59Z") - if current_month != date.strftime("%m"): - logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") - # if len(INFLUXDB.get(start, end, measurement_direction)) == 0: - watt = daily.value - kwatt = watt / 1000 - euro = kwatt * price - INFLUXDB.write( - measurement=measurement_direction, - date=self.tz.localize(date), - tags={ - "usage_point_id": self.usage_point_id, - "year": daily.date.strftime("%Y"), - "month": daily.date.strftime("%m"), - }, - fields={ - "Wh": float(watt), - "kWh": float(forceRound(kwatt, 5)), - "price": float(forceRound(euro, 5)), - }, - ) - current_month = date.strftime("%m") - logging.info(f" => OK") - else: - logging.info(f" => Données synchronisées ({count} valeurs)") - else: - logging.info(f" => Aucune donnée") - - def detail(self, measurement_direction="consumption"): - current_month = "" - measurement = f"{measurement_direction}_detail" - logging.info(f'Envoi des données "{measurement.upper()}" dans influxdb') - get_detail_all = self.db.get_detail_all( - usage_point_id=self.usage_point_id, measurement_direction=measurement_direction - ) - get_detail_all_count = len(get_detail_all) - last_data = self.db.get_detail_last_date(self.usage_point_id, measurement_direction) - first_data = self.db.get_detail_first_date(self.usage_point_id, measurement_direction) - if last_data and first_data: - start = datetime.strftime(last_data, self.time_format) - end = datetime.strftime(first_data, self.time_format) - influxdb_data = INFLUXDB.count(start, end, measurement) - count = 1 - for data in influxdb_data: - for record in data.records: - count += record.get_value() - - # print(len(get_detail_all)) - # print(count) - if get_detail_all_count != count: - logging.info(f" Cache : {get_detail_all_count} / InfluxDb : {count}") - for index, detail in enumerate(get_detail_all): - date = detail.date - # start = datetime.strftime(date, self.time_format) - if current_month != date.strftime("%m"): - logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") - # if index < (len(get_detail_all) - 1): - # next_item = get_detail_all[index + 1] - # end = datetime.strftime(next_item.date, self.time_format) - # else: - # end = datetime.strftime(date, "%Y-%m-%dT23:59:59Z") - # if len(INFLUXDB.get(start, end, measurement)) == 0: - watt = detail.value - kwatt = watt / 1000 - watth = watt / (60 / detail.interval) - kwatth = watth / 1000 - if measurement_direction == "consumption": - measure_type = self.stat.get_mesure_type(date) - if measure_type == "HP": - euro = kwatth * self.usage_point_config.consumption_price_hp - else: - euro = kwatth * self.usage_point_config.consumption_price_hc - else: - euro = kwatth * self.usage_point_config.production_price - INFLUXDB.write( - measurement=measurement, - date=self.tz.localize(date), - tags={ - "usage_point_id": self.usage_point_id, - "year": detail.date.strftime("%Y"), - "month": detail.date.strftime("%m"), - "internal": detail.interval, - "measure_type": measure_type, - }, - fields={ - "W": float(watt), - "kW": float(forceRound(kwatt, 5)), - "Wh": float(watth), - "kWh": float(forceRound(kwatth, 5)), - "price": float(forceRound(euro, 5)), - }, - ) - current_month = date.strftime("%m") - logging.info(f" => OK") - else: - logging.info(f" => Données synchronisées ({count} valeurs)") - else: - logging.info(f" => Aucune donnée") - - def tempo(self): - measurement = "tempo" - logging.info('Envoi des données "TEMPO" dans influxdb') - tempo_data = self.db.get_tempo() - if tempo_data: - for data in tempo_data: - INFLUXDB.write( - measurement=measurement, - date=self.tz.localize(data.date), - tags={ - "usage_point_id": self.usage_point_id, - }, - fields={"color": data.color}, - ) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def ecowatt(self): - measurement = "ecowatt" - logging.info(f'Envoi des données "ECOWATT" dans influxdb') - ecowatt_data = self.db.get_ecowatt() - if ecowatt_data: - for data in ecowatt_data: - INFLUXDB.write( - measurement=f"{measurement}_daily", - date=self.tz.localize(data.date), - tags={ - "usage_point_id": self.usage_point_id, - }, - fields={"value": data.value, "message": data.message}, - ) - data_detail = ast.literal_eval(data.detail) - for date, value in data_detail.items(): - date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") - INFLUXDB.write( - measurement=f"{measurement}_detail", - date=self.tz.localize(date), - tags={ - "usage_point_id": self.usage_point_id, - }, - fields={"value": value}, - ) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") diff --git a/src/models/export_mqtt.py b/src/models/export_mqtt.py deleted file mode 100644 index fd067d0a..00000000 --- a/src/models/export_mqtt.py +++ /dev/null @@ -1,507 +0,0 @@ -import ast -import logging -from datetime import datetime, timedelta - -from dateutil.relativedelta import relativedelta - -from dependencies import title -from init import CONFIG, DB, MQTT -from models.stat import Stat - - -class ExportMqtt: - def __init__(self, usage_point_id): - self.config = CONFIG - self.db = DB - self.usage_point_id = usage_point_id - self.date_format = "%Y-%m-%d" - self.date_format_detail = "%Y-%m-%d %H:%M:%S" - self.mqtt = MQTT - - def status(self): - logging.info("Statut du compte.") - usage_point_id_config = self.db.get_usage_point(self.usage_point_id) - # consentement_expiration_date = usage_point_id_config.consentement_expiration.strftime("%Y-%m-%d %H:%M:%S") - if ( - hasattr(usage_point_id_config, "consentement_expiration") - and usage_point_id_config.consentement_expiration is not None - ): - consentement_expiration_date = usage_point_id_config.consentement_expiration.strftime("%Y-%m-%d %H:%M:%S") - else: - consentement_expiration_date = "" - if hasattr(usage_point_id_config, "call_number") and usage_point_id_config.call_number is not None: - call_number = usage_point_id_config.call_number - else: - call_number = "" - if hasattr(usage_point_id_config, "quota_reached") and usage_point_id_config.quota_reached is not None: - quota_reached = usage_point_id_config.quota_reached - else: - quota_reached = "" - if hasattr(usage_point_id_config, "quota_limit") and usage_point_id_config.quota_limit is not None: - quota_limit = usage_point_id_config.quota_limit - else: - quota_limit = "" - if hasattr(usage_point_id_config, "quota_reset_at") and usage_point_id_config.quota_reset_at is not None: - quota_reset_at = (usage_point_id_config.quota_reset_at.strftime("%Y-%m-%d %H:%M:%S"),) - else: - quota_reset_at = "" - if hasattr(usage_point_id_config, "last_call") and usage_point_id_config.last_call is not None: - last_call = (usage_point_id_config.last_call.strftime("%Y-%m-%d %H:%M:%S"),) - else: - last_call = "" - if hasattr(usage_point_id_config, "ban") and usage_point_id_config.ban is not None: - ban = usage_point_id_config.ban - else: - ban = "" - consentement_expiration = { - f"{self.usage_point_id}/status/consentement_expiration": consentement_expiration_date, - f"{self.usage_point_id}/status/call_number": str(call_number), - f"{self.usage_point_id}/status/quota_reached": str(quota_reached), - f"{self.usage_point_id}/status/quota_limit": str(quota_limit), - f"{self.usage_point_id}/status/quota_reset_at": str(quota_reset_at), - f"{self.usage_point_id}/status/last_call": str(last_call), - f"{self.usage_point_id}/status/ban": str(ban), - } - # print(consentement_expiration) - self.mqtt.publish_multiple(consentement_expiration) - logging.info(" => OK") - - def contract(self): - logging.info("Génération des messages du contrat") - contract_data = self.db.get_contract(self.usage_point_id) - if hasattr(contract_data, "__table__"): - output = {} - for column in contract_data.__table__.columns: - output[f"{self.usage_point_id}/contract/{column.name}"] = str(getattr(contract_data, column.name)) - self.mqtt.publish_multiple(output) - logging.info(" => OK") - else: - logging.info(" => ERREUR") - - def address(self): - logging.info(f"Génération des messages d'addresse") - address_data = self.db.get_addresse(self.usage_point_id) - if hasattr(address_data, "__table__"): - output = {} - for column in address_data.__table__.columns: - output[f"{self.usage_point_id}/address/{column.name}"] = str(getattr(address_data, column.name)) - self.mqtt.publish_multiple(output) - logging.info(" => OK") - else: - logging.info(" => ERREUR") - - def daily_annual(self, price, measurement_direction="consumption"): - logging.info("Génération des données annuelles") - date_range = self.db.get_daily_date_range(self.usage_point_id) - stat = Stat(self.usage_point_id, measurement_direction) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) - finish = False - while not finish: - year = int(date_begin_current.strftime("%Y")) - get_daily_year = stat.get_year(year=year) - get_daily_month = stat.get_month(year=year) - get_daily_week = stat.get_week(year=year) - if year == int(datetime.now().strftime("%Y")): - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" - else: - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" - mqtt_data = { - # thisYear - f"{sub_prefix}/thisYear/dateBegin": get_daily_year["begin"], - f"{sub_prefix}/thisYear/dateEnd": get_daily_year["end"], - f"{sub_prefix}/thisYear/base/Wh": get_daily_year["value"], - f"{sub_prefix}/thisYear/base/kWh": round(get_daily_year["value"] / 1000, 2), - f"{sub_prefix}/thisYear/base/euro": round(get_daily_year["value"] / 1000 * price, 2), - # thisMonth - f"{sub_prefix}/thisMonth/dateBegin": get_daily_month["begin"], - f"{sub_prefix}/thisMonth/dateEnd": get_daily_month["end"], - f"{sub_prefix}/thisMonth/base/Wh": get_daily_month["value"], - f"{sub_prefix}/thisMonth/base/kWh": round(get_daily_month["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/base/euro": round(get_daily_month["value"] / 1000 * price, 2), - # thisWeek - f"{sub_prefix}/thisWeek/dateBegin": get_daily_week["begin"], - f"{sub_prefix}/thisWeek/dateEnd": get_daily_week["end"], - f"{sub_prefix}/thisWeek/base/Wh": get_daily_week["value"], - f"{sub_prefix}/thisWeek/base/kWh": round(get_daily_week["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/base/euro": round(get_daily_week["value"] / 1000 * price, 2), - } - - for week in range(7): - begin = stat.daily(week)["begin"] - begin_day = datetime.strptime(stat.daily(week)["begin"], self.date_format).strftime("%A") - end = stat.daily(week)["end"] - value = stat.daily(week)["value"] - mqtt_data[f"{sub_prefix}/week/{begin_day}/dateBegin"] = begin - mqtt_data[f"{sub_prefix}/week/{begin_day}/dateEnd"] = end - mqtt_data[f"{sub_prefix}/week/{begin_day}/base/Wh"] = value - mqtt_data[f"{sub_prefix}/week/{begin_day}/base/kWh"] = round(value / 1000, 2) - mqtt_data[f"{sub_prefix}/week/{begin_day}/base/euro"] = round(value / 1000 * price, 2) - - for month in range(1, 13): - get_daily_month = stat.get_month(year=year, month=month) - mqtt_data[f"{sub_prefix}/month/{month}/dateBegin"] = get_daily_month["begin"] - mqtt_data[f"{sub_prefix}/month/{month}/dateEnd"] = get_daily_month["end"] - mqtt_data[f"{sub_prefix}/month/{month}/base/Wh"] = get_daily_month["value"] - mqtt_data[f"{sub_prefix}/month/{month}/base/kWh"] = round(get_daily_month["value"] / 1000, 2) - mqtt_data[f"{sub_prefix}/month/{month}/base/euro"] = round( - get_daily_month["value"] / 1000 * price, 2 - ) - - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine( - (date_end - relativedelta(years=1)).replace(month=12, day=31), - datetime.max.time(), - ) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = date_begin - - self.mqtt.publish_multiple(mqtt_data) - - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def daily_linear(self, price, measurement_direction="consumption"): - logging.info("Génération des données linéaires journalières.") - date_range = self.db.get_daily_date_range(self.usage_point_id) - stat = Stat(self.usage_point_id, measurement_direction) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = date_end - relativedelta(years=1) - idx = 0 - finish = False - while not finish: - if idx == 0: - key = "year" - else: - key = f"year-{idx}" - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/linear/{key}" - get_daily_year_linear = stat.get_year_linear( - idx, - ) - get_daily_month_linear = stat.get_month_linear(idx) - get_daily_week_linear = stat.get_week_linear(idx) - mqtt_data = { - # thisYear - f"{sub_prefix}/thisYear/dateBegin": get_daily_year_linear["begin"], - f"{sub_prefix}/thisYear/dateEnd": get_daily_year_linear["end"], - f"{sub_prefix}/thisYear/base/Wh": get_daily_year_linear["value"], - f"{sub_prefix}/thisYear/base/kWh": round(get_daily_year_linear["value"] / 1000, 2), - f"{sub_prefix}/thisYear/base/euro": round(get_daily_year_linear["value"] / 1000 * price, 2), - # thisMonth - f"{sub_prefix}/thisMonth/dateBegin": get_daily_month_linear["begin"], - f"{sub_prefix}/thisMonth/dateEnd": get_daily_month_linear["end"], - f"{sub_prefix}/thisMonth/base/Wh": get_daily_month_linear["value"], - f"{sub_prefix}/thisMonth/base/kWh": round(get_daily_month_linear["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/base/euro": round(get_daily_month_linear["value"] / 1000 * price, 2), - # thisWeek - f"{sub_prefix}/thisWeek/dateBegin": get_daily_week_linear["begin"], - f"{sub_prefix}/thisWeek/dateEnd": get_daily_week_linear["end"], - f"{sub_prefix}/thisWeek/base/Wh": get_daily_week_linear["value"], - f"{sub_prefix}/thisWeek/base/kWh": round(get_daily_week_linear["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/base/euro": round(get_daily_week_linear["value"] / 1000 * price, 2), - } - - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = datetime.combine(date_begin, datetime.min.time()) - idx = idx + 1 - - self.mqtt.publish_multiple(mqtt_data) - - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption"): - logging.info("Génération des données annuelles détaillé.") - date_range = self.db.get_daily_date_range(self.usage_point_id) - stat = Stat(self.usage_point_id, measurement_direction) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) - finish = False - while not finish: - year = int(date_begin_current.strftime("%Y")) - month = int(datetime.now().strftime("%m")) - get_detail_year_hp = stat.get_year(year=year, measure_type="HP") - get_detail_year_hc = stat.get_year(year=year, measure_type="HC") - get_detail_month_hp = stat.get_month(year=year, month=month, measure_type="HP") - get_detail_month_hc = stat.get_month(year=year, month=month, measure_type="HC") - get_detail_week_hp = stat.get_week( - year=year, - month=month, - measure_type="HP", - ) - get_detail_week_hc = stat.get_week( - year=year, - month=month, - measure_type="HC", - ) - - if year == int(datetime.now().strftime("%Y")): - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" - else: - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" - mqtt_data = { - # thisYear - HP - f"{sub_prefix}/thisYear/hp/Wh": get_detail_year_hp["value"], - f"{sub_prefix}/thisYear/hp/kWh": round(get_detail_year_hp["value"] / 1000, 2), - f"{sub_prefix}/thisYear/hp/euro": round(get_detail_year_hp["value"] / 1000 * price_hp, 2), - # thisYear - HC - f"{sub_prefix}/thisYear/hc/Wh": get_detail_year_hc["value"], - f"{sub_prefix}/thisYear/hc/kWh": round(get_detail_year_hc["value"] / 1000, 2), - f"{sub_prefix}/thisYear/hc/euro": round(get_detail_year_hc["value"] / 1000 * price_hc, 2), - # thisMonth - HP - f"{sub_prefix}/thisMonth/hp/Wh": get_detail_month_hp["value"], - f"{sub_prefix}/thisMonth/hp/kWh": round(get_detail_month_hp["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/hp/euro": round(get_detail_month_hp["value"] / 1000 * price_hp, 2), - # thisMonth - HC - f"{sub_prefix}/thisMonth/hc/Wh": get_detail_month_hc["value"], - f"{sub_prefix}/thisMonth/hc/kWh": round(get_detail_month_hc["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/hc/euro": round(get_detail_month_hc["value"] / 1000 * price_hc, 2), - # thisWeek - HP - f"{sub_prefix}/thisWeek/hp/Wh": get_detail_week_hp["value"], - f"{sub_prefix}/thisWeek/hp/kWh": round(get_detail_week_hp["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/hp/euro": round(get_detail_week_hp["value"] / 1000 * price_hp, 2), - # thisWeek - HC - f"{sub_prefix}/thisWeek/hc/Wh": get_detail_week_hc["value"], - f"{sub_prefix}/thisWeek/hc/kWh": round(get_detail_week_hc["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/hc/euro": round(get_detail_week_hc["value"] / 1000 * price_hc, 2), - } - - for week in range(7): - # HP - begin_hp_day = datetime.strptime(stat.detail(week, "HP")["begin"], self.date_format).strftime("%A") - value_hp = stat.detail(week, "HP")["value"] - prefix = f"{sub_prefix}/week/{begin_hp_day}/hp" - mqtt_data[f"{prefix}/Wh"] = value_hp - mqtt_data[f"{prefix}/kWh"] = round(value_hp / 1000, 2) - mqtt_data[f"{prefix}/euro"] = round(value_hp / 1000 * price_hp, 2) - # HC - begin_hc_day = datetime.strptime(stat.detail(week, "HC")["begin"], self.date_format).strftime("%A") - value_hc = stat.detail(week, "HC")["value"] - prefix = f"{sub_prefix}/week/{begin_hc_day}/hc" - mqtt_data[f"{prefix}/Wh"] = value_hc - mqtt_data[f"{prefix}/kWh"] = round(value_hc / 1000, 2) - mqtt_data[f"{prefix}/euro"] = round(value_hc / 1000 * price_hc, 2) - - for month in range(12): - month = month + 1 - # HP - get_detail_month_hp = stat.get_month(year=year, month=month, measure_type="HP") - prefix = f"{sub_prefix}/month/{month}/hp" - mqtt_data[f"{prefix}/Wh"] = get_detail_month_hp["value"] - mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hp["value"] / 1000, 2) - mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hp["value"] / 1000 * price_hp, 2) - # HC - get_detail_month_hc = stat.get_month(year=year, month=month, measure_type="HC") - prefix = f"{sub_prefix}/month/{month}/hc" - mqtt_data[f"{prefix}/Wh"] = get_detail_month_hc["value"] - mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hc["value"] / 1000, 2) - mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hc["value"] / 1000 * price_hc, 2) - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine( - (date_end - relativedelta(years=1)).replace(month=12, day=31), - datetime.max.time(), - ) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = date_begin - - self.mqtt.publish_multiple(mqtt_data) - - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def detail_linear(self, price_hp, price_hc=0, measurement_direction="consumption"): - logging.info("Génération des données linéaires détaillées") - date_range = self.db.get_detail_date_range(self.usage_point_id) - stat = Stat(self.usage_point_id, measurement_direction) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = date_end - relativedelta(years=1) - idx = 0 - finish = False - while not finish: - if idx == 0: - key = "year" - else: - key = f"year-{idx}" - sub_prefix = f"{self.usage_point_id}/{measurement_direction}/linear/{key}" - get_daily_year_linear_hp = stat.get_year_linear(idx, "HP") - get_daily_year_linear_hc = stat.get_year_linear(idx, "HC") - get_detail_month_linear_hp = stat.get_month_linear(idx, "HP") - get_detail_month_linear_hc = stat.get_month_linear(idx, "HC") - get_detail_week_linear_hp = stat.get_week_linear(idx, "HP") - get_detail_week_linear_hc = stat.get_week_linear( - idx, - "HC", - ) - mqtt_data = { - # thisYear - f"{sub_prefix}/thisYear/hp/Wh": get_daily_year_linear_hp["value"], - f"{sub_prefix}/thisYear/hp/kWh": round(get_daily_year_linear_hp["value"] / 1000, 2), - f"{sub_prefix}/thisYear/hp/euro": round(get_daily_year_linear_hp["value"] / 1000 * price_hp, 2), - f"{sub_prefix}/thisYear/hc/Wh": get_daily_year_linear_hc["value"], - f"{sub_prefix}/thisYear/hc/kWh": round(get_daily_year_linear_hc["value"] / 1000, 2), - f"{sub_prefix}/thisYear/hc/euro": round(get_daily_year_linear_hc["value"] / 1000 * price_hc, 2), - # thisMonth - f"{sub_prefix}/thisMonth/hp/Wh": get_detail_month_linear_hp["value"], - f"{sub_prefix}/thisMonth/hp/kWh": round(get_detail_month_linear_hp["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/hp/euro": round(get_detail_month_linear_hp["value"] / 1000 * price_hp, 2), - f"{sub_prefix}/thisMonth/hc/Wh": get_detail_month_linear_hc["value"], - f"{sub_prefix}/thisMonth/hc/kWh": round(get_detail_month_linear_hc["value"] / 1000, 2), - f"{sub_prefix}/thisMonth/hc/euro": round(get_detail_month_linear_hc["value"] / 1000 * price_hc, 2), - # thisWeek - f"{sub_prefix}/thisWeek/hp/Wh": get_detail_week_linear_hp["value"], - f"{sub_prefix}/thisWeek/hp/kWh": round(get_detail_week_linear_hp["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/hp/euro": round(get_detail_week_linear_hp["value"] / 1000 * price_hp, 2), - f"{sub_prefix}/thisWeek/hc/Wh": get_detail_week_linear_hc["value"], - f"{sub_prefix}/thisWeek/hc/kWh": round(get_detail_week_linear_hc["value"] / 1000, 2), - f"{sub_prefix}/thisWeek/hc/euro": round(get_detail_week_linear_hc["value"] / 1000 * price_hc, 2), - } - - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = datetime.combine(date_begin, datetime.min.time()) - idx = idx + 1 - - self.mqtt.publish_multiple(mqtt_data) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def max_power(self): - logging.info("Génération des données de puissance max journalières.") - max_power_data = self.db.get_daily_max_power_all(self.usage_point_id, order="asc") - mqtt_data = {} - contract = self.db.get_contract(self.usage_point_id) - max_value = 0 - if max_power_data: - if hasattr(contract, "subscribed_power"): - max_value = int(contract.subscribed_power.split(" ")[0]) * 1000 - for data in max_power_data: - if data.event_date is not None: - date = data.event_date.strftime("%A") - sub_prefix = f"{self.usage_point_id}/power_max/{date}" - mqtt_data[f"{sub_prefix}/date"] = data.event_date.strftime("%Y-%m-%d") - mqtt_data[f"{sub_prefix}/event_hour"] = data.event_date.strftime("%H:%M:%S") - mqtt_data[f"{sub_prefix}/value"] = data.value - value_w = data.value - if max_value != 0 and max_value >= value_w: - mqtt_data[f"{sub_prefix}/threshold_exceeded"] = 0 - else: - mqtt_data[f"{sub_prefix}/threshold_exceeded"] = 1 - threshold_usage = int(100 * value_w / max_value) - mqtt_data[f"{sub_prefix}/percentage_usage"] = threshold_usage - self.mqtt.publish_multiple(mqtt_data) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def ecowatt(self): - logging.info("Génération des données Ecowatt") - begin = datetime.combine(datetime.now() - relativedelta(days=1), datetime.min.time()) - end = begin + timedelta(days=7) - ecowatt = self.db.get_ecowatt_range(begin, end) - today = datetime.combine(datetime.now(), datetime.min.time()) - mqtt_data = {} - if ecowatt: - for data in ecowatt: - if data.date == today: - queue = "j0" - elif data.date == today + timedelta(days=1): - queue = "j1" - else: - queue = "j2" - mqtt_data[f"ecowatt/{queue}/date"] = data.date.strftime(self.date_format_detail) - mqtt_data[f"ecowatt/{queue}/value"] = data.value - mqtt_data[f"ecowatt/{queue}/message"] = data.message - for date, value in ast.literal_eval(data.detail).items(): - date = datetime.strptime(date, self.date_format_detail).strftime("%H") - mqtt_data[f"ecowatt/{queue}/detail/{date}"] = value - self.mqtt.publish_multiple(mqtt_data) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") - - def tempo(self): - logging.info("Envoie des données Tempo") - mqtt_data = {} - tempo_data = self.db.get_stat(self.usage_point_id, "price_consumption") - tempo_price = self.db.get_tempo_config("price") - if tempo_price: - for color, price in tempo_price.items(): - mqtt_data[f"tempo/price/{color}"] = price - tempo_days = self.db.get_tempo_config("days") - if tempo_days: - for color, days in tempo_days.items(): - mqtt_data[f"tempo/days/{color}"] = days - today = datetime.combine(datetime.now(), datetime.min.time()) - tempo_color = self.db.get_tempo_range(today, today) - if tempo_color: - mqtt_data[f"tempo/color/today"] = tempo_color[0].color - tomorrow = today + timedelta(days=1) - tempo_color = self.db.get_tempo_range(tomorrow, tomorrow) - if tempo_color: - mqtt_data[f"tempo/color/tomorrow"] = tempo_color[0].color - if tempo_data: - for year, data in ast.literal_eval(tempo_data[0].value).items(): - if year == datetime.now().strftime("%Y"): - year = "current" - for color, tempo in data["TEMPO"].items(): - mqtt_data[f"{self.usage_point_id}/consumption/annual/{year}/thisYear/tempo/{color}/Wh"] = round( - tempo["Wh"], 2 - ) - mqtt_data[f"{self.usage_point_id}/consumption/annual/{year}/thisYear/tempo/{color}/kWh"] = round( - tempo["kWh"], 2 - ) - mqtt_data[f"{self.usage_point_id}/consumption/annual/{year}/thisYear/tempo/{color}/euro"] = round( - tempo["euro"], 2 - ) - for month, month_data in data["month"].items(): - for month_color, month_tempo in month_data["TEMPO"].items(): - if month == datetime.strftime(datetime.now(), "%m"): - if month_tempo: - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/thisMonth/tempo/{month_color}/Wh" - ] = round(month_tempo["Wh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/thisMonth/tempo/{month_color}/kWh" - ] = round(month_tempo["kWh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/thisMonth/tempo/{month_color}/euro" - ] = round(month_tempo["euro"], 2) - if month_tempo: - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/month/{int(month)}/tempo/{month_color}/Wh" - ] = round(month_tempo["Wh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/month/{int(month)}/tempo/{month_color}/kWh" - ] = round(month_tempo["kWh"], 2) - mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/month/{int(month)}/tempo/{month_color}/euro" - ] = round(month_tempo["euro"], 2) - self.mqtt.publish_multiple(mqtt_data) - logging.info(" => OK") - else: - logging.info(" => Pas de donnée") diff --git a/src/models/export_mqttv1.py b/src/models/export_mqttv1.py deleted file mode 100644 index 5231295c..00000000 --- a/src/models/export_mqttv1.py +++ /dev/null @@ -1,441 +0,0 @@ -import logging -from datetime import datetime - -from dateutil.relativedelta import relativedelta - -from dependencies import title -from init import CONFIG, DB -from models.mqtt import Mqtt - - -class ExportMqtt: - def __init__(self, usage_point_id, measurement_direction="consumption"): - self.config = CONFIG - self.db = DB - self.mqtt_config = (self.config.mqtt_config(),) - self.usage_point_id = usage_point_id - self.measurement_direction = measurement_direction - self.date_format = "%Y-%m-%d" - if "enable" in self.mqtt_config and self.mqtt_config["enable"]: - if ["hostname"] not in self.mqtt_config: - self.connect() - else: - logging.warning("MQTT config is incomplete.") - else: - logging.info("MQTT disable") - - def connect(self): - MQTT = Mqtt( - hostname=self.mqtt_config["hostname"], - port=self.mqtt_config["port"], - username=self.mqtt_config["username"], - password=self.mqtt_config["password"], - client_id=self.mqtt_config["client_id"], - prefix=self.mqtt_config["prefix"], - retain=self.mqtt_config["retain"], - qos=self.mqtt_config["qos"], - ) - MQTT.connect() - - def status(self): - title(f"[{self.usage_point_id}] Statut du compte.") - usage_point_id_config = self.db.get_usage_point(self.usage_point_id) - # consentement_expiration_date = usage_point_id_config.consentement_expiration.strftime("%Y-%m-%d %H:%M:%S") - if ( - hasattr(usage_point_id_config, "consentement_expiration") - and usage_point_id_config.consentement_expiration is not None - ): - consentement_expiration_date = usage_point_id_config.consentement_expiration.strftime("%Y-%m-%d %H:%M:%S") - else: - consentement_expiration_date = "" - if hasattr(usage_point_id_config, "call_number") and usage_point_id_config.call_number is not None: - call_number = usage_point_id_config.call_number - else: - call_number = "" - if hasattr(usage_point_id_config, "quota_reached") and usage_point_id_config.quota_reached is not None: - quota_reached = usage_point_id_config.quota_reached - else: - quota_reached = "" - if hasattr(usage_point_id_config, "quota_limit") and usage_point_id_config.quota_limit is not None: - quota_limit = usage_point_id_config.quota_limit - else: - quota_limit = "" - if hasattr(usage_point_id_config, "quota_reset_at") and usage_point_id_config.quota_reset_at is not None: - quota_reset_at = (usage_point_id_config.quota_reset_at.strftime("%Y-%m-%d %H:%M:%S"),) - else: - quota_reset_at = "" - if hasattr(usage_point_id_config, "last_call") and usage_point_id_config.last_call is not None: - last_call = (usage_point_id_config.last_call.strftime("%Y-%m-%d %H:%M:%S"),) - else: - last_call = "" - if hasattr(usage_point_id_config, "ban") and usage_point_id_config.ban is not None: - ban = usage_point_id_config.ban - else: - ban = "" - consentement_expiration = { - f"{self.usage_point_id}/status/consentement_expiration": consentement_expiration_date, - f"{self.usage_point_id}/status/call_number": str(call_number), - f"{self.usage_point_id}/status/quota_reached": str(quota_reached), - f"{self.usage_point_id}/status/quota_limit": str(quota_limit), - f"{self.usage_point_id}/status/quota_reset_at": str(quota_reset_at), - f"{self.usage_point_id}/status/last_call": str(last_call), - f"{self.usage_point_id}/status/ban": str(ban), - } - # print(consentement_expiration) - self.mqtt_config.publish_multiple(consentement_expiration) - title("Finish") - - def contract(self): - title(f"[{self.usage_point_id}] Exportation de données dans self.mqtt_config.") - - logging.info("Génération des messages du contrat") - contract_data = self.db.get_contract(self.usage_point_id) - if hasattr(contract_data, "__table__"): - output = {} - for column in contract_data.__table__.columns: - output[f"{self.usage_point_id}/contract/{column.name}"] = str(getattr(contract_data, column.name)) - self.mqtt_config.publish_multiple(output) - title("Finish") - else: - title("Failed") - - def address(self): - logging.info(f"[{self.usage_point_id}] Génération des messages d'addresse") - address_data = self.db.get_addresse(self.usage_point_id) - if hasattr(address_data, "__table__"): - output = {} - for column in address_data.__table__.columns: - output[f"{self.usage_point_id}/address/{column.name}"] = str(getattr(address_data, column.name)) - self.mqtt_config.publish_multiple(output) - title("Finish") - else: - title("Failed") - - def load_daily_data(self, begin, end, price, sub_prefix): - logging.info(f" {begin.strftime(self.date_format)} => {end.strftime(self.date_format)}") - prefix = f"{sub_prefix}" - self.mqtt_config.publish_multiple( - { - f"{prefix}/dateBegin": begin.strftime(self.date_format), - f"{prefix}/dateEnded": end.strftime(self.date_format), - } - ) - # DATA FORMATTING - this_year_watt = 0 - this_year_euro = 0 - this_year_begin = datetime.now() - this_year_end = datetime.now() - this_month_watt = 0 - this_month_euro = 0 - this_month_begin = datetime.now() - this_month_end = datetime.now() - month_watt = {} - month_euro = {} - month_begin = {} - month_end = {} - week_watt = {} - week_euro = {} - week_begin = datetime.now() - week_end = datetime.now() - week_idx = 0 - current_month_year = "" - current_this_month_year = "" - - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): - date = data.date - watt = data.value - kwatt = data.value / 1000 - euro = kwatt * price - this_year_begin = date - if this_year_end == "": - this_year_end = date - this_year_watt = this_year_watt + watt - this_year_euro = this_year_euro + euro - - if current_month_year == "": - current_month_year = date.strftime("%Y") - if date.strftime("%Y") == current_month_year: - if date.strftime("%m") not in month_watt: - month_watt[date.strftime("%m")] = watt - month_euro[date.strftime("%m")] = euro - month_end[date.strftime("%m")] = date - else: - month_watt[date.strftime("%m")] = month_watt[date.strftime("%m")] + watt - month_euro[date.strftime("%m")] = month_euro[date.strftime("%m")] + euro - month_begin[date.strftime("%m")] = date - - if week_idx < 7: - week_begin = date - if week_end == "": - week_end = date - if date not in week_watt: - week_watt[date] = watt - week_euro[date] = euro - else: - week_watt[date] = week_watt[date] + watt - week_euro[date] = week_euro[date] + euro - - if current_this_month_year == "": - current_this_month_year = date.strftime("%Y") - if date.strftime("%m") == datetime.now().strftime("%m") and date.strftime("%Y") == current_this_month_year: - this_month_begin = date - if this_month_end == "": - this_month_end = date - this_month_watt = this_month_watt + watt - this_month_euro = this_month_euro + euro - week_idx = week_idx + 1 - # MQTT FORMATTING - mqtt_data = { - f"{prefix}/thisYear/dateBegin": this_year_begin.strftime(self.date_format), - f"{prefix}/thisYear/dateEnd": this_year_end.strftime(self.date_format), - f"{prefix}/thisYear/base/Wh": this_year_watt, - f"{prefix}/thisYear/base/kWh": round(this_year_watt / 1000, 2), - f"{prefix}/thisYear/base/euro": round(this_year_euro, 2), - f"{prefix}/thisMonth/dateBegin": this_month_begin.strftime(self.date_format), - f"{prefix}/thisMonth/dateEnd": this_month_end.strftime(self.date_format), - f"{prefix}/thisMonth/base/Wh": this_month_watt, - f"{prefix}/thisMonth/base/kWh": round(this_month_watt / 1000, 2), - f"{prefix}/thisMonth/base/euro": round(this_month_euro, 2), - f"{prefix}/thisWeek/dateBegin": week_begin.strftime(self.date_format), - f"{prefix}/thisWeek/dateEnd": week_end.strftime(self.date_format), - } - for date, watt in month_watt.items(): - mqtt_data[f"{prefix}/months/{date}/base/Wh"] = watt - mqtt_data[f"{prefix}/months/{date}/base/kWh"] = round(watt / 1000, 2) - for date, euro in month_euro.items(): - mqtt_data[f"{prefix}/months/{date}/base/euro"] = round(euro, 2) - for date, value in month_begin.items(): - mqtt_data[f"{prefix}/months/{date}/dateBegin"] = value.strftime(self.date_format) - for date, value in month_end.items(): - mqtt_data[f"{prefix}/months/{date}/dateEnd"] = value.strftime(self.date_format) - - for date, watt in week_watt.items(): - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/date"] = date.strftime(self.date_format) - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/base/Wh"] = watt - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/base/kWh"] = round(watt / 1000, 2) - for date, euro in week_euro.items(): - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/base/euro"] = round(euro, 2) - - # SEND TO self.mqtt_config - self.mqtt_config.publish_multiple(mqtt_data) - - def daily_annual(self, price): - logging.info("Génération des données annuelles") - date_range = self.db.get_daily_date_range(self.usage_point_id) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) - finish = False - while not finish: - if date_begin_current.strftime("%Y") == datetime.now().strftime("%Y"): - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/annual/current" - else: - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/annual/{date_begin_current.strftime('%Y')}" - self.load_daily_data(date_begin_current, date_end, price, sub_prefix) - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine( - (date_end - relativedelta(years=1)).replace(month=12, day=31), - datetime.max.time(), - ) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = date_begin - title("Finish") - else: - title("No data") - - def daily_linear(self, price): - logging.info("Génération des données linéaires") - date_range = self.db.get_daily_date_range(self.usage_point_id) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = date_end - relativedelta(years=1) - idx = 0 - finish = False - while not finish: - if idx == 0: - key = "year" - else: - key = f"year-{idx}" - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/linear/{key}" - self.load_daily_data(date_begin_current, date_end, price, sub_prefix) - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = datetime.combine(date_begin, datetime.min.time()) - idx = idx + 1 - title("Finish") - else: - title("No data") - - def load_detail_data(self, begin, end, price_hp, price_hc, sub_prefix): - logging.info(f" {begin.strftime(self.date_format)} => {end.strftime(self.date_format)}") - prefix = f"{sub_prefix}" - # DATA FORMATTING - week_idx = 0 - current_month_year = "" - current_this_month_year = "" - output = { - "hp": { - "this_year_watt": 0, - "this_year_euro": 0, - "month_watt": {}, - "month_euro": {}, - "week_watt": {}, - "week_euro": {}, - "this_month_watt": 0, - "this_month_euro": 0, - }, - "hc": { - "this_year_watt": 0, - "this_year_euro": 0, - "month_watt": {}, - "month_euro": {}, - "week_watt": {}, - "week_euro": {}, - "this_month_watt": 0, - "this_month_euro": 0, - }, - "base": { - "this_year_watt": 0, - "this_year_euro": 0, - "month_watt": {}, - "month_euro": {}, - "week_watt": {}, - "week_euro": {}, - "this_month_watt": 0, - "this_month_euro": 0, - }, - } - - for data in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): - date = data.date - watt = data.value / (60 / data.interval) - kwatt = watt / 1000 - - measure_type = data.measure_type.lower() - output[measure_type]["this_year_watt"] = output[measure_type]["this_year_watt"] + watt - if measure_type == "hp": - euro = kwatt * price_hp - else: - euro = kwatt * price_hc - output[measure_type]["this_year_euro"] = output[measure_type]["this_year_euro"] + euro - - if current_month_year == "": - current_month_year = date.strftime("%Y") - if date.strftime("%Y") == current_month_year: - if date.strftime("%m") not in output[measure_type]["month_watt"]: - output[measure_type]["month_watt"][date.strftime("%m")] = watt - output[measure_type]["month_euro"][date.strftime("%m")] = euro - else: - output[measure_type]["month_watt"][date.strftime("%m")] = ( - output[measure_type]["month_watt"][date.strftime("%m")] + watt - ) - output[measure_type]["month_euro"][date.strftime("%m")] = ( - output[measure_type]["month_euro"][date.strftime("%m")] + euro - ) - - if week_idx < 7: - if date not in output[measure_type]["week_watt"]: - output[measure_type]["week_watt"][date] = watt - output[measure_type]["week_euro"][date] = euro - else: - output[measure_type]["week_watt"][date] = output[measure_type]["week_watt"][date] + watt - output[measure_type]["week_euro"][date] = output[measure_type]["week_euro"][date] + euro - - # print(output) - - if current_this_month_year == "": - current_this_month_year = date.strftime("%Y") - if date.strftime("%m") == datetime.now().strftime("%m") and date.strftime("%Y") == current_this_month_year: - output[measure_type]["this_month_watt"] = output[measure_type]["this_month_watt"] + watt - output[measure_type]["this_month_euro"] = output[measure_type]["this_month_euro"] + euro - week_idx = week_idx + 1 - - # MQTT FORMATTING - for measure_type, data in output.items(): - mqtt_data = { - f"{prefix}/thisYear/{measure_type}/Wh": output[measure_type]["this_year_watt"], - f"{prefix}/thisYear/{measure_type}/kWh": round(output[measure_type]["this_year_watt"] / 1000, 2), - f"{prefix}/thisYear/{measure_type}/euro": round(output[measure_type]["this_year_euro"], 2), - f"{prefix}/thisMonth/{measure_type}/Wh": output[measure_type]["this_month_watt"], - f"{prefix}/thisMonth/{measure_type}/kWh": round(output[measure_type]["this_month_watt"] / 1000, 2), - f"{prefix}/thisMonth/{measure_type}/euro": round(output[measure_type]["this_month_euro"], 2), - } - for date, watt in output[measure_type]["month_watt"].items(): - mqtt_data[f"{prefix}/months/{date}/{measure_type}/Wh"] = watt - mqtt_data[f"{prefix}/months/{date}/{measure_type}/kWh"] = round(watt / 1000, 2) - for date, euro in output[measure_type]["month_euro"].items(): - mqtt_data[f"{prefix}/months/{date}/{measure_type}/euro"] = round(euro, 2) - - for date, watt in output[measure_type]["week_watt"].items(): - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/{measure_type}/Wh"] = watt - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/{measure_type}/kWh"] = round(watt / 1000, 2) - for date, euro in output[measure_type]["week_euro"].items(): - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/{measure_type}/euro"] = round(euro, 2) - - # SEND TO MQTT - self.mqtt_config.publish_multiple(mqtt_data) - - def detail_annual(self, price_hp, price_hc=0): - logging.info("Génération des données annuelles détaillées") - date_range = self.db.get_detail_date_range(self.usage_point_id) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) - finish = False - while not finish: - if date_begin_current.strftime("%Y") == datetime.now().strftime("%Y"): - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/annual/current" - else: - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/annual/{date_begin_current.strftime('%Y')}" - self.load_detail_data(date_begin_current, date_end, price_hp, price_hc, sub_prefix) - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine( - (date_end - relativedelta(years=1)).replace(month=12, day=31), - datetime.max.time(), - ) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = date_begin - title("Finish") - else: - title("No data") - - def detail_linear(self, price_hp, price_hc=0): - logging.info("Génération des données linéaires détaillées") - date_range = self.db.get_detail_date_range(self.usage_point_id) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = date_end - relativedelta(years=1) - idx = 0 - finish = False - while not finish: - if idx == 0: - key = "year" - else: - key = f"year-{idx}" - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/linear/{key}" - self.load_detail_data(date_begin_current, date_end, price_hp, price_hc, sub_prefix) - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = datetime.combine(date_begin, datetime.min.time()) - idx = idx + 1 - title("Finish") - else: - title("No data") diff --git a/src/models/influxdb.py b/src/models/influxdb.py deleted file mode 100644 index d7865c0c..00000000 --- a/src/models/influxdb.py +++ /dev/null @@ -1,216 +0,0 @@ -import datetime -import logging - -import influxdb_client -from dateutil.tz import tzlocal -from influxdb_client.client.util import date_utils -from influxdb_client.client.util.date_utils import DateHelper -from influxdb_client.client.write_api import ASYNCHRONOUS, SYNCHRONOUS - -from dependencies import separator, separator_warning, title - - -class InfluxDB: - def __init__( - self, - scheme: str, - hostname: str, - port: int, - token: str, - org: str = "myelectricaldata.fr", - bucket: str = "myelectricaldata", - method="SYNCHRONOUS", - write_options=None, - ): - if write_options is None: - write_options = {} - self.scheme = scheme - self.hostname = hostname - self.port = port - self.token = token - self.org = org - self.bucket = bucket - self.influxdb = {} - self.query_api = {} - self.write_api = {} - self.delete_api = {} - self.buckets_api = {} - self.method = method - self.write_options = {} - if "batch_size" in write_options: - self.write_options["batch_size"] = write_options["batch_size"] - else: - self.write_options["batch_size"] = 1000 - if "flush_interval" in write_options: - self.write_options["flush_interval"] = write_options["flush_interval"] - else: - self.write_options["flush_interval"] = 1000 - if "jitter_interval" in write_options: - self.write_options["jitter_interval"] = write_options["jitter_interval"] - else: - self.write_options["jitter_interval"] = 0 - if "retry_interval" in write_options: - self.write_options["retry_interval"] = write_options["retry_interval"] - else: - self.write_options["retry_interval"] = 5000 - if "max_retry_time" in write_options: - self.write_options["max_retry_time"] = write_options["max_retry_time"] - else: - self.write_options["max_retry_time"] = "180_000" - if "max_retries" in write_options: - self.write_options["max_retries"] = write_options["max_retries"] - else: - self.write_options["max_retries"] = 5 - if "max_retry_delay" in write_options: - self.write_options["max_retry_delay"] = write_options["max_retry_delay"] - else: - self.write_options["max_retry_delay"] = 125_000 - if "exponential_base" in write_options: - self.write_options["exponential_base"] = write_options["exponential_base"] - else: - self.write_options["exponential_base"] = 2 - self.connect() - self.retention = 0 - self.max_retention = None - self.get_list_retention_policies() - if self.retention != 0: - day = int(self.retention / 60 / 60 / 24) - logging.warning(f" ATTENTION, InfluxDB est configuré avec une durée de rétention de {day} jours.") - logging.warning( - f" Toutes les données supérieures à {day} jours ne seront jamais insérées dans celui-ci." - ) - else: - logging.warning(" => Aucune durée de rétention de données détectée.") - - def connect(self): - separator() - logging.info(f"Connect to InfluxDB {self.hostname}:{self.port}") - date_utils.date_helper = DateHelper(timezone=tzlocal()) - self.influxdb = influxdb_client.InfluxDBClient( - url=f"{self.scheme}://{self.hostname}:{self.port}", - token=self.token, - org=self.org, - timeout="600000", - ) - health = self.influxdb.health() - if health.status == "pass": - title("Connection success") - else: - logging.critical( - """ - -Impossible de se connecter à la base influxdb. - -Vous pouvez récupérer un exemple ici : -https://github.com/m4dm4rtig4n/enedisgateway2mqtt#configuration-file -""" - ) - exit(1) - - title(f"Méthode d'importation : {self.method.upper()}") - if self.method.upper() == "ASYNCHRONOUS": - logging.warning( - ' ATTENTION, le mode d\'importation "ASYNCHRONOUS" est très consommateur de ressources système.' - ) - self.write_api = self.influxdb.write_api(write_options=ASYNCHRONOUS) - elif self.method.upper() == "SYNCHRONOUS": - self.write_api = self.influxdb.write_api(write_options=SYNCHRONOUS) - else: - self.write_api = self.influxdb.write_api( - write_options=influxdb_client.WriteOptions( - batch_size=self.write_options["batch_size"], - flush_interval=self.write_options["flush_interval"], - jitter_interval=self.write_options["jitter_interval"], - retry_interval=self.write_options["retry_interval"], - max_retries=self.write_options["max_retries"], - max_retry_delay=self.write_options["max_retry_delay"], - exponential_base=self.write_options["exponential_base"], - ) - ) - self.query_api = self.influxdb.query_api() - self.delete_api = self.influxdb.delete_api() - self.buckets_api = self.influxdb.buckets_api() - self.get_list_retention_policies() - - def purge_influxdb(self): - separator_warning() - logging.warning(f"Wipe influxdb database {self.hostname}:{self.port}") - start = "1970-01-01T00:00:00Z" - stop = datetime.datetime.utcnow() - measurement = [ - "consumption", - "production", - "consumption_detail", - "production_detail", - ] - for mesure in measurement: - self.delete_api.delete(start, stop, f'_measurement="{mesure}"', self.bucket, org=self.org) - # CONFIG.set("wipe_influxdb", False) - logging.warning(f" => Data reset") - - def get_list_retention_policies(self): - if self.org == f"-": # InfluxDB 1.8 - self.retention = 0 - self.max_retention = 0 - return - else: - buckets = self.buckets_api.find_buckets().buckets - for bucket in buckets: - if bucket.name == self.bucket: - self.retention = bucket.retention_rules[0].every_seconds - self.max_retention = datetime.datetime.now() - datetime.timedelta(seconds=self.retention) - - def get(self, start, end, measurement): - if self.org != f"-": - query = f""" -from(bucket: "{self.bucket}") - |> range(start: {start}, stop: {end}) - |> filter(fn: (r) => r["_measurement"] == "{measurement}") -""" - logging.debug(query) - output = self.query_api.query(query) - else: - # Skip for InfluxDB 1.8 - output = [] - return output - - def count(self, start, end, measurement): - if self.org != f"-": - query = f""" -from(bucket: "{self.bucket}") - |> range(start: {start}, stop: {end}) - |> filter(fn: (r) => r["_measurement"] == "{measurement}") - |> filter(fn: (r) => r["_field"] == "Wh") - |> count() - |> yield(name: "count") -""" - logging.debug(query) - output = self.query_api.query(query) - else: - # Skip for InfluxDB 1.8 - output = [] - return output - - def delete(self, date, measurement): - self.delete_api.delete(date, date, f'_measurement="{measurement}"', self.bucket, org=self.org) - - def write(self, tags, date=None, fields=None, measurement="log"): - date_max = self.max_retention - if date is None: - date_object = datetime.datetime.now() - else: - date_object = date - if self.retention == 0 or (date.replace(tzinfo=None) > date_max.replace(tzinfo=None)): - record = { - "measurement": measurement, - "time": date_object, - "tags": {}, - "fields": {}, - } - if tags: - for key, value in tags.items(): - record["tags"][key] = value - if fields is not None: - for key, value in fields.items(): - record["fields"][key] = value - self.write_api.write(bucket=self.bucket, org=self.org, record=record) diff --git a/src/models/jobs.py b/src/models/jobs.py index 3e0eaa8c..4fd39c1d 100644 --- a/src/models/jobs.py +++ b/src/models/jobs.py @@ -1,149 +1,156 @@ +"""This module contains the Job class, which is responsible for importing data from the API.""" + import logging import time import traceback -from os import environ, getenv - -from dependencies import export_finish, finish, get_version, log_usage_point_id, str2bool, title -from init import CONFIG, DB -from models.export_home_assistant import HomeAssistant -from models.export_home_assistant_ws import HomeAssistantWs -from models.export_influxdb import ExportInfluxDB -from models.export_mqtt import ExportMqtt -from models.query_address import Address -from models.query_contract import Contract -from models.query_daily import Daily -from models.query_detail import Detail -from models.query_ecowatt import Ecowatt -from models.query_power import Power -from models.query_status import Status -from models.query_tempo import Tempo +from typing import List + +from config.main import APP_CONFIG +from config.myelectricaldata import UsagePointId +from database import DB +from database.usage_points import DatabaseUsagePoints +from external_services.home_assistant.main import HomeAssistant +from external_services.home_assistant_ws.main import HomeAssistantWs +from external_services.influxdb.main import ExportInfluxDB +from external_services.mqtt.main import ExportMqtt +from external_services.myelectricaldata.address import Address +from external_services.myelectricaldata.contract import Contract +from external_services.myelectricaldata.daily import Daily +from external_services.myelectricaldata.detail import Detail +from external_services.myelectricaldata.ecowatt import Ecowatt +from external_services.myelectricaldata.power import Power +from external_services.myelectricaldata.status import Status +from external_services.myelectricaldata.tempo import Tempo from models.stat import Stat +from utils import export_finish, finish, get_version, log_usage_point_id, title class Job: + """Represents a job for importing data.""" + def __init__(self, usage_point_id=None): - self.config = CONFIG - self.db = DB self.usage_point_id = usage_point_id - self.usage_point_config = {} - self.mqtt_config = self.config.mqtt_config() - self.home_assistant_config = self.config.home_assistant_config() - self.home_assistant_ws_config = self.config.home_assistant_ws_config() - self.influxdb_config = self.config.influxdb_config() - self.wait_job_start = 10 - self.tempo_enable = False - + self.usage_point_config: UsagePointId = {} + self.wait_job_start: int = 10 + self.tempo_enable: bool = False if self.usage_point_id is None: - self.usage_points = self.db.get_usage_point_all() + self.usage_points_all: List[UsagePointId] = DatabaseUsagePoints().get_all() else: - self.usage_points = [self.db.get_usage_point(self.usage_point_id)] + self.usage_points_all: List[UsagePointId] = [DatabaseUsagePoints(self.usage_point_id).get()] def boot(self): - if str2bool(getenv("DEV")) or str2bool(getenv("DEBUG")): + """Boots the import job.""" + if APP_CONFIG.dev: logging.warning("=> Import job disable") else: self.job_import_data() - def job_import_data(self, wait=True, target=None): - if self.db.lock_status(): + def job_import_data(self, wait=True, target=None): # noqa: PLR0912, C901 + """Import data from the API.""" + if DB.lock_status(): return {"status": False, "notif": "Importation déjà en cours..."} - else: - self.db.lock() - - if wait: - title("Démarrage du job d'importation dans 10s") - i = self.wait_job_start - while i > 0: - logging.info(f"{i}s") - time.sleep(1) - i = i - 1 - - if target == "gateway_status" or target is None: - self.get_gateway_status() - - # ###################################################################################################### - # FETCH TEMPO DATA - if target == "tempo" or target is None: - self.get_tempo() - - # ###################################################################################################### - # FETCH ECOWATT DATA - if target == "ecowatt" or target is None: - self.get_ecowatt() - - for self.usage_point_config in self.usage_points: - self.usage_point_id = self.usage_point_config.usage_point_id - log_usage_point_id(self.usage_point_id) - self.db.last_call_update(self.usage_point_id) - if self.usage_point_config.enable: - ####################################################################################################### - # CHECK ACCOUNT DATA - if target == "account_status" or target is None: - self.get_account_status() - - ####################################################################################################### - # CONTRACT - if target == "contract" or target is None: - self.get_contract() - - ####################################################################################################### - # ADDRESSE - if target == "addresses" or target is None: - self.get_addresses() - - ####################################################################################################### - # CONSUMPTION / PRODUCTION - if target == "consumption" or target is None: - self.get_consumption() - - if target == "consumption_detail" or target is None: - self.get_consumption_detail() - - if target == "production" or target is None: - self.get_production() - - if target == "production_detail" or target is None: - self.get_production_detail() - - if target == "consumption_max_power" or target is None: - self.get_consumption_max_power() - - ####################################################################################################### - # STATISTIQUES - if target == "stat" or target is None: - self.stat_price() - - ####################################################################################################### - # MQTT - if target == "mqtt" or target is None: - self.export_mqtt() - - ####################################################################################################### - # HOME ASSISTANT - if target == "home_assistant" or target is None: - self.export_home_assistant() - - ####################################################################################################### - # HOME ASSISTANT WS - if target == "home_assistant_ws" or target is None: - self.export_home_assistant_ws() - - ####################################################################################################### - # INFLUXDB - if target == "influxdb" or target is None: - self.export_influxdb() - else: - logging.info( - f" => Point de livraison Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)." - ) + DB.lock() + + if wait: + title("Démarrage du job d'importation dans 10s") + i = self.wait_job_start + while i > 0: + logging.info(f"{i}s") + time.sleep(1) + i = i - 1 + + # ###################################################################################################### + # FETCH TEMPO DATA + if target == "tempo" or target is None: + self.get_tempo() + + # ###################################################################################################### + # FETCH ECOWATT DATA + if target == "ecowatt" or target is None: + self.get_ecowatt() + + for usage_point_config in self.usage_points_all: + self.usage_point_config = usage_point_config + usage_point_id = usage_point_config.usage_point_id + log_usage_point_id(usage_point_id) + DatabaseUsagePoints(usage_point_id).last_call_update() + if usage_point_config.enable: + ####################################################################################################### + # CHECK ACCOUNT DATA + if target == "account_status" or target is None: + self.get_account_status() + + ####################################################################################################### + # CONTRACT + if target == "contract" or target is None: + self.get_contract() + + ####################################################################################################### + # ADDRESSE + if target == "addresses" or target is None: + self.get_addresses() + + ####################################################################################################### + # CONSUMPTION / PRODUCTION + if target == "consumption" or target is None: + self.get_consumption() + + if target == "consumption_detail" or target is None: + self.get_consumption_detail() + + if target == "production" or target is None: + self.get_production() + + if target == "production_detail" or target is None: + self.get_production_detail() + + if target == "consumption_max_power" or target is None: + self.get_consumption_max_power() + + ####################################################################################################### + # STATISTIQUES + if target == "stat" or target is None: + self.stat_price() + + ####################################################################################################### + # MQTT + if target == "mqtt" or target is None: + self.export_mqtt() + + ####################################################################################################### + # HOME ASSISTANT + if target == "home_assistant" or target is None: + self.export_home_assistant() + + ####################################################################################################### + # HOME ASSISTANT WS + if target == "home_assistant_ws" or target is None: + self.export_home_assistant_ws() + + ####################################################################################################### + # INFLUXDB + if target == "influxdb" or target is None: + self.export_influxdb() + else: + logging.info( + " => Point de livraison Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)." + ) - finish() + finish() - self.usage_point_id = None - self.db.unlock() - return {"status": True, "notif": "Importation terminée"} + self.usage_point_id = None + DB.unlock() + return {"status": True, "notif": "Importation terminée"} def header_generate(self, token=True): + """Generate the header for the API request. + + Args: + token (bool, optional): Whether to include the authorization token in the header. Defaults to True. + + Returns: + dict: The generated header as a dictionary. + """ output = { "Content-Type": "application/json", "call-service": "myelectricaldata", @@ -154,7 +161,15 @@ def header_generate(self, token=True): return output def get_gateway_status(self): - detail = "Récupération du statut de la passerelle :" + """Retrieve the status of the gateway. + + This method retrieves the status of the gateway by pinging it. If an error occurs during the process, + it logs the error message. + + Returns: + None + """ + detail = "Récupération du statut de la passerelle" try: title(detail) Status(headers=self.header_generate(token=False)).ping() @@ -164,37 +179,62 @@ def get_gateway_status(self): logging.error(e) def get_account_status(self): + """Retrieve the account status information. + + This method retrieves the account status information for the usage point(s). + It sets the error log if there is an error in the status response. + + Returns: + None + """ detail = "Récupération des informations du compte" - def run(usage_point_config): - usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail} :") + def run(): + usage_point_id = self.usage_point_config.usage_point_id + title(f"[{usage_point_id}] {detail}") status = Status(headers=self.header_generate()).status(usage_point_id=usage_point_id) - if "error" in status and status["error"]: + if status.get("error"): message = f'{status["status_code"]} - {status["description"]["detail"]}' - self.db.set_error_log(usage_point_id, message) + DatabaseUsagePoints(usage_point_id).set_error_log(message) else: - self.db.set_error_log(usage_point_id, None) + DatabaseUsagePoints(usage_point_id).set_error_log(None) export_finish() try: - if self.usage_point_id is None: - for usage_point_config in self.usage_points: - if usage_point_config.enable: - run(usage_point_config) + if self.usage_point_config is None: + for usage_point_config in self.usage_points_all: + self.usage_point_config = usage_point_config + if self.usage_point_config.enable: + run() else: - run(self.usage_point_config) + run() except Exception as e: traceback.print_exc() logging.error(f"Erreur lors de la {detail.lower()}") logging.error(e) def get_contract(self): + """Retrieve contract information for the usage points. + + This method iterates over the list of usage points and retrieves the contract information + for each enabled usage point. If a specific usage point ID is provided, it retrieves the + contract information only for that usage point. + + Args: + self: The current instance of the Jobs class. + + Returns: + None + + Raises: + Exception: If an error occurs during the retrieval of contract information. + + """ detail = "Récupération des informations contractuelles" def run(usage_point_config): usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail} :") + title(f"[{usage_point_id}] {detail}") Contract( headers=self.header_generate(), usage_point_id=usage_point_id, @@ -204,7 +244,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -215,17 +255,29 @@ def run(usage_point_config): logging.error(e) def get_addresses(self): + """Retrieve the postal addresses for the usage points. + + This method iterates over the list of usage points and retrieves the postal addresses + for each enabled usage point. It calls the `Address.get()` method to fetch the addresses + and then calls the `export_finish()` function to indicate the completion of the export. + + If a specific usage point ID is provided, only that usage point will be processed. + + Raises: + Exception: If an error occurs during the retrieval of postal addresses. + + """ detail = "Récupération des coordonnées postales" def run(usage_point_config): usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail} :") + title(f"[{usage_point_id}] {detail}") Address(headers=self.header_generate(), usage_point_id=usage_point_id).get() export_finish() try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -236,11 +288,12 @@ def run(usage_point_config): logging.error(e) def get_consumption(self): + """Get consumption from gateway.""" detail = "Récupération de la consommation journalière" def run(usage_point_config): usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail} :") + title(f"[{usage_point_id}] {detail}") if hasattr(usage_point_config, "consumption") and usage_point_config.consumption: Daily(headers=self.header_generate(), usage_point_id=usage_point_id).get() export_finish() @@ -249,7 +302,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -260,11 +313,12 @@ def run(usage_point_config): logging.error(e) def get_consumption_detail(self): + """Retrieve the detailed consumption.""" detail = "Récupération de la consommation détaillée" def run(usage_point_config): usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail} :") + title(f"[{usage_point_id}] {detail}") if hasattr(usage_point_config, "consumption_detail") and usage_point_config.consumption_detail: Detail(headers=self.header_generate(), usage_point_id=usage_point_id).get() export_finish() @@ -273,7 +327,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -284,11 +338,12 @@ def run(usage_point_config): logging.error(e) def get_production(self): + """Retrieve the detailed production.""" detail = "Récupération de la production journalière" def run(usage_point_config): usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail} :") + title(f"[{usage_point_id}] {detail}") if hasattr(usage_point_config, "production") and usage_point_config.production: Daily( headers=self.header_generate(), @@ -301,7 +356,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -312,11 +367,12 @@ def run(usage_point_config): logging.error(e) def get_production_detail(self): + """Get production detail from gateway.""" detail = "Récupération de la production détaillée" def run(usage_point_config): usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail} :") + title(f"[{usage_point_id}] {detail}") if hasattr(usage_point_config, "production_detail") and usage_point_config.production_detail: Detail( headers=self.header_generate(), @@ -329,7 +385,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -340,33 +396,38 @@ def run(usage_point_config): logging.error(e) def get_consumption_max_power(self): + """Get max power from gateway.""" detail = "Récupération de la puissance maximum journalière" - def run(usage_point_config): - usage_point_id = usage_point_config.usage_point_id - title(f"[{self.usage_point_id}] {detail} :") - Power(headers=self.header_generate(), usage_point_id=usage_point_id).get() - export_finish() + def run(usage_point_id: str, usage_point_config: UsagePointId) -> None: + title(f"[{usage_point_id}] {detail}") + if getattr(usage_point_config, "consumption_max_power", True): + Power(headers=self.header_generate(), usage_point_id=usage_point_id).get() + export_finish() + else: + logging.info(f"{detail} désactivée sur le point de livraison") try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + usage_point_config: UsagePointId + for usage_point_id, usage_point_config in APP_CONFIG.myelectricaldata.usage_point_config.items(): if usage_point_config.enable: - run(usage_point_config) + run(usage_point_id, usage_point_config) else: - run(self.usage_point_config) + run(self.usage_point_id, APP_CONFIG.myelectricaldata.usage_point_config[self.usage_point_id]) except Exception as e: traceback.print_exc() logging.error(f"Erreur lors de la {detail.lower()}") logging.error(e) def get_tempo(self): + """Get tempo from gateway.""" try: - title(f"Récupération des données Tempo :") + title("Récupération des données Tempo") Tempo().fetch() - title(f"Calcul des jours Tempo :") + title("Calcul des jours Tempo") Tempo().calc_day() - title(f"Récupération des tarifs Tempo :") + title("Récupération des tarifs Tempo") Tempo().fetch_price() export_finish() except Exception as e: @@ -375,8 +436,9 @@ def get_tempo(self): logging.error(e) def get_ecowatt(self): + """Get ecowatt from gateway.""" try: - title(f"Récupération des données EcoWatt :") + title("Récupération des données EcoWatt") Ecowatt().fetch() export_finish() except Exception as e: @@ -385,11 +447,12 @@ def get_ecowatt(self): logging.error(e) def stat_price(self): + """Stat price.""" detail = "Génération des statistiques Tarifaire de consommation/production " def run(usage_point_config): usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail} :") + title(f"[{usage_point_id}] {detail}") if hasattr(usage_point_config, "consumption_detail") and usage_point_config.consumption_detail: logging.info("Consommation :") Stat(usage_point_id=usage_point_id, measurement_direction="consumption").generate_price() @@ -400,7 +463,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -411,10 +474,10 @@ def run(usage_point_config): logging.error(e) def export_home_assistant(self, target=None): + """Export data to Home Assistant.""" detail = "Exportation des données vers Home Assistant (via MQTT)" - def run(usage_point_config, target): - usage_point_id = usage_point_config.usage_point_id + def run(usage_point_id, target): title(f"[{usage_point_id}] {detail}") if target is None: HomeAssistant(usage_point_id).export() @@ -423,14 +486,14 @@ def run(usage_point_config, target): export_finish() try: - if "enable" in self.home_assistant_config and str2bool(self.home_assistant_config["enable"]): - if "enable" in self.mqtt_config and str2bool(self.mqtt_config["enable"]): + if APP_CONFIG.home_assistant: + if APP_CONFIG.mqtt: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: - run(usage_point_config, target) + run(usage_point_config.usage_point_id, target) else: - run(self.usage_point_config, target) + run(self.usage_point_id, target) else: logging.critical( "L'export Home Assistant est dépendant de MQTT, " @@ -444,122 +507,31 @@ def run(usage_point_config, target): logging.error(e) def export_home_assistant_ws(self): + """Export to Home Assistant Energy.""" detail = "Import des données vers l'onglet Energy de Home Assistant (WebSocket)" usage_point_id = self.usage_point_config.usage_point_id title(f"[{usage_point_id}] {detail}") - if ( - self.home_assistant_ws_config - and "enable" in self.home_assistant_ws_config - and str2bool(self.home_assistant_ws_config["enable"]) - ): + if APP_CONFIG.home_assistant_ws.enable: HomeAssistantWs(usage_point_id) else: title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") def export_influxdb(self): - detail = "Export InfluxDB" - - def run(usage_point_config): - usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id} {detail}") - export_influxdb = ExportInfluxDB(self.influxdb_config, usage_point_config) - if hasattr(usage_point_config, "consumption") and usage_point_config.consumption: - export_influxdb.daily() - if hasattr(usage_point_config, "production") and usage_point_config.production: - export_influxdb.daily(measurement_direction="production") - if hasattr(usage_point_config, "consumption_detail") and usage_point_config.consumption_detail: - export_influxdb.detail() - if hasattr(usage_point_config, "production_detail") and usage_point_config.production_detail: - export_influxdb.detail(measurement_direction="production") - tempo_config = self.config.tempo_config() - if tempo_config and "enable" in tempo_config and tempo_config["enable"]: - export_influxdb.tempo() - export_influxdb.ecowatt() - export_finish() - - try: - if "enable" in self.influxdb_config and self.influxdb_config["enable"]: - if self.usage_point_id is None: - for usage_point_config in self.usage_points: - if usage_point_config.enable: - run(usage_point_config) - else: - run(self.usage_point_config) - else: - title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") - except Exception as e: - traceback.print_exc() - logging.error(f"Erreur lors de l'{detail.lower()}") - logging.error(e) + """Export to influxdb data.""" + detail = "Import des données vers InfluxDB" + usage_point_id = self.usage_point_config.usage_point_id + title(f"[{usage_point_id}] {detail}") + if APP_CONFIG.influxdb.enable: + ExportInfluxDB(usage_point_id) + else: + title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") def export_mqtt(self): - detail = "Export MQTT" - - def run(usage_point_config): - usage_point_id = usage_point_config.usage_point_id - title(f"[{usage_point_id}] {detail}") - export_mqtt = ExportMqtt(usage_point_id) - export_mqtt.status() - export_mqtt.contract() - export_mqtt.address() - export_mqtt.ecowatt() - if (hasattr(usage_point_config, "consumption") and usage_point_config.consumption) or ( - hasattr(usage_point_config, "consumption_detail") and usage_point_config.consumption_detail - ): - export_mqtt.tempo() - if hasattr(usage_point_config, "consumption") and usage_point_config.consumption: - export_mqtt.daily_annual( - usage_point_config.consumption_price_base, - measurement_direction="consumption", - ) - export_mqtt.daily_linear( - usage_point_config.consumption_price_base, - measurement_direction="consumption", - ) - if hasattr(usage_point_config, "production") and usage_point_config.production: - export_mqtt.daily_annual( - usage_point_config.production_price, - measurement_direction="production", - ) - export_mqtt.daily_linear( - usage_point_config.production_price, - measurement_direction="production", - ) - if hasattr(usage_point_config, "consumption_detail") and usage_point_config.consumption_detail: - export_mqtt.detail_annual( - usage_point_config.consumption_price_hp, - usage_point_config.consumption_price_hc, - measurement_direction="consumption", - ) - export_mqtt.detail_linear( - usage_point_config.consumption_price_hp, - usage_point_config.consumption_price_hc, - measurement_direction="consumption", - ) - if hasattr(usage_point_config, "production_detail") and usage_point_config.production_detail: - export_mqtt.detail_annual( - usage_point_config.production_price, - measurement_direction="production", - ) - export_mqtt.detail_linear( - usage_point_config.production_price, - measurement_direction="production", - ) - if hasattr(usage_point_config, "consumption_max_power") and usage_point_config.consumption_max_power: - export_mqtt.max_power() - export_finish() - - try: - if "enable" in self.mqtt_config and self.mqtt_config["enable"]: - if self.usage_point_id is None: - for usage_point_config in self.usage_points: - if usage_point_config.enable: - run(usage_point_config) - else: - run(self.usage_point_config) - else: - title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") - except Exception as e: - traceback.print_exc() - logging.error(f"Erreur lors de la {detail.lower()}") - logging.error(e) + """MQTT Export.""" + detail = "Import des données vers MQTT" + usage_point_id = self.usage_point_config.usage_point_id + title(f"[{usage_point_id}] {detail}") + if APP_CONFIG.mqtt.enable: + ExportMqtt(usage_point_id) + else: + title("Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)") diff --git a/src/models/mqtt.py b/src/models/mqtt.py deleted file mode 100644 index 1dcb3817..00000000 --- a/src/models/mqtt.py +++ /dev/null @@ -1,75 +0,0 @@ -import logging - -import paho.mqtt.publish as publish -from paho.mqtt import client as mqtt - -from dependencies import separator, title - - -class Mqtt: - def __init__( - self, - hostname, - username="", - password="", - client_id="myelectricaldata", - prefix="myelectricaldata", - retain=True, - qos=0, - port=1883, - ca_cert=None, - ): - self.hostname = hostname - self.port = port - self.username = username - self.password = password - self.client_id = client_id - self.prefix = prefix - self.retain = retain - self.qos = qos - - self.client = {} - self.ca_cert = ca_cert - self.connect() - - def connect(self): - separator() - logging.info(f"Connect to MQTT broker {self.hostname}:{self.port}") - try: - self.client = mqtt.Client(self.client_id) - if self.username != "" and self.password != "": - self.client.username_pw_set(self.username, self.password) - if self.ca_cert: - logging.info(f"Using ca_cert: {self.ca_cert}") - self.client.tls_set(ca_certs=self.ca_cert) - self.client.connect(self.hostname, self.port) - self.client.loop_start() - title("Connection success") - except Exception as e: - logging.critical(["MQTT Connexion failed", e]) - - def publish(self, topic, msg, prefix=None): - if prefix is None: - prefix = self.prefix - result = self.client.publish(f"{self.prefix}/{prefix}/{topic}", str(msg), qos=self.qos, retain=self.retain) - status = result[0] - if status == 0: - logging.debug(f" MQTT Send : {prefix}/{topic} => {msg}") - else: - logging.info(f" - Failed to send message to topic {prefix}/{topic}") - - def publish_multiple(self, data, prefix=None): - if data: - payload = [] - if prefix is None: - prefix = self.prefix - else: - prefix = f"{prefix}" - for topics, value in data.items(): - payload.append( - {"topic": f"{prefix}/{topics}", "payload": value, "qos": self.qos, "retain": self.retain} - ) - auth = None - if self.username is not None and self.password is not None: - auth = {"username": self.username, "password": self.password} - publish.multiple(payload, hostname=self.hostname, port=self.port, client_id=self.client_id, auth=auth) diff --git a/src/models/query.py b/src/models/query.py index 478472a1..c4070a30 100755 --- a/src/models/query.py +++ b/src/models/query.py @@ -1,26 +1,25 @@ +"""Request.""" + import logging import requests -from dependencies import str2bool -from init import CONFIG +from config.main import APP_CONFIG class Query(object): + """Requests object.""" + def __init__(self, endpoint, headers=None): self.endpoint = endpoint self.timeout = 60 - check_ssl = CONFIG.get("ssl") - if check_ssl and "gateway" in check_ssl: - self.ssl_valid = str2bool(check_ssl["gateway"]) - else: - self.ssl_valid = True if not headers: self.headers = {"Content-Type": "application/x-www-form-urlencoded"} else: self.headers = headers def get(self, params=None): + """Get.""" logging.debug(f"[GET] Endpoint {self.endpoint}") logging.debug(f" - url : {self.endpoint}") logging.debug(f" - headers : {self.headers}") @@ -33,7 +32,7 @@ def get(self, params=None): params=params, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") @@ -42,6 +41,7 @@ def get(self, params=None): return response def post(self, params=None, data=None): + """Post.""" logging.debug(f"[POST] Endpoint {self.endpoint}") logging.debug(f" - url : {self.endpoint}") logging.debug(f" - headers : {self.headers}") @@ -56,15 +56,16 @@ def post(self, params=None, data=None): data=data, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") - except Exception as e: + except Exception: logging.error(response) return response def delete(self, params=None, data=None): + """Delete.""" logging.debug(f"[DELETE] Endpoint {self.endpoint}") logging.debug(f" - headers : {self.headers}") logging.debug(f" - params : {params}") @@ -78,16 +79,17 @@ def delete(self, params=None, data=None): data=data, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") return response - except Exception as e: + except Exception: logging.error(response) return response def update(self, params=None, data=None): + """Update.""" logging.debug(f"[UPDATE] Endpoint {self.endpoint}") logging.debug(f" - headers : {self.headers}") logging.debug(f" - params : {params}") @@ -101,16 +103,17 @@ def update(self, params=None, data=None): data=data, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") return response - except Exception as e: + except Exception: logging.error(response) return response def put(self, params=None, data=None): + """Put.""" logging.debug(f"[PUT] Endpoint {self.endpoint}") logging.debug(f" - headers : {self.headers}") logging.debug(f" - params : {params}") @@ -124,10 +127,10 @@ def put(self, params=None, data=None): data=data, url=self.endpoint, timeout=self.timeout, - verify=self.ssl_valid, + verify=APP_CONFIG.gateway.ssl, ) logging.debug(f"[RESPONSE] : status_code {response.status_code}") logging.debug(f" => {response.text}...") - except Exception as e: + except Exception: logging.error(response) return response diff --git a/src/models/query_address.py b/src/models/query_address.py deleted file mode 100755 index 74461893..00000000 --- a/src/models/query_address.py +++ /dev/null @@ -1,103 +0,0 @@ -import json -import logging -import traceback - -from config import URL -from dependencies import title -from init import CONFIG, DB -from models.query import Query - - -class Address: - def __init__(self, headers, usage_point_id): - self.config = CONFIG - self.db = DB - self.url = URL - - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = self.config.usage_point_id_config(self.usage_point_id) - - def run(self): - name = "addresses" - endpoint = f"{name}/{self.usage_point_id}" - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - target = f"{self.url}/{endpoint}" - - response = Query(endpoint=target, headers=self.headers).get() - if response.status_code == 200: - try: - response_json = json.loads(response.text) - response = response_json["customer"]["usage_points"][0] - usage_point = response["usage_point"] - usage_point_addresses = usage_point["usage_point_addresses"] - response = usage_point_addresses - response.update(usage_point) - self.db.set_addresse( - self.usage_point_id, - { - "usage_points": str(usage_point["usage_point_id"]) - if usage_point["usage_point_id"] is not None - else "", - "street": str(usage_point_addresses["street"]) - if usage_point_addresses["street"] is not None - else "", - "locality": str(usage_point_addresses["locality"]) - if usage_point_addresses["locality"] is not None - else "", - "postal_code": str(usage_point_addresses["postal_code"]) - if usage_point_addresses["postal_code"] is not None - else "", - "insee_code": str(usage_point_addresses["insee_code"]) - if usage_point_addresses["insee_code"] is not None - else "", - "city": str(usage_point_addresses["city"]) - if usage_point_addresses["city"] is not None - else "", - "country": str(usage_point_addresses["country"]) - if usage_point_addresses["country"] is not None - else "", - "geo_points": str(usage_point_addresses["geo_points"]) - if usage_point_addresses["geo_points"] is not None - else "", - }, - ) - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération du contrat.", - } - return response - else: - return {"error": True, "description": json.loads(response.text)["detail"]} - - def get(self): - current_cache = self.db.get_addresse(usage_point_id=self.usage_point_id) - if not current_cache: - # No cache - logging.info(" => Pas de cache") - result = self.run() - else: - # Refresh cache - if hasattr(self.usage_point_config, "refresh_addresse") and self.usage_point_config.refresh_addresse: - logging.info(" => Mise à jour du cache") - result = self.run() - self.usage_point_config.refresh_addresse = False - DB.set_usage_point(self.usage_point_id, self.usage_point_config.__dict__) - else: - # Get data in cache - logging.info(" => Récupération du cache") - result = {} - for column in current_cache.__table__.columns: - result[column.name] = str(getattr(current_cache, column.name)) - logging.debug(f" => {result}") - if "error" not in result: - for key, value in result.items(): - if key != "usage_point_addresses": - logging.info(f"{key}: {value}") - else: - logging.error(result) - return result diff --git a/src/models/query_cache.py b/src/models/query_cache.py deleted file mode 100644 index 03482a18..00000000 --- a/src/models/query_cache.py +++ /dev/null @@ -1,28 +0,0 @@ -import json -import logging - -from config import URL -from dependencies import get_version -from models.query import Query - - -class Cache: - def __init__(self, usage_point_id, headers=None): - self.url = URL - self.headers = headers - self.usage_point_id = usage_point_id - - def reset(self): - target = f"{self.url}/cache/{self.usage_point_id}" - response = Query(endpoint=target, headers=self.headers).delete() - if response.status_code == 200: - try: - status = json.loads(response.text) - for key, value in status.items(): - logging.info(f"{key}: {value}") - status["version"] = get_version() - return status - except LookupError: - return {"error": True, "description": "Erreur lors du reset du cache."} - else: - return {"error": True, "description": "Erreur lors du reset du cache."} diff --git a/src/models/query_contract.py b/src/models/query_contract.py deleted file mode 100755 index 83ecf367..00000000 --- a/src/models/query_contract.py +++ /dev/null @@ -1,120 +0,0 @@ -import datetime -import json -import logging -import re -import traceback - -from config import URL -from dependencies import title -from init import DB -from models.query import Query - - -class Contract: - def __init__(self, headers, usage_point_id, config): - self.db = DB - self.url = URL - - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = config - - def run(self): - name = "contracts" - endpoint = f"{name}/{self.usage_point_id}" - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - target = f"{self.url}/{endpoint}" - - query_response = Query(endpoint=target, headers=self.headers).get() - if query_response.status_code == 200: - try: - response_json = json.loads(query_response.text) - response = response_json["customer"]["usage_points"][0] - usage_point = response["usage_point"] - contracts = response["contracts"] - response = contracts - response.update(usage_point) - - if contracts["offpeak_hours"] is not None: - offpeak_hours = re.search("HC \((.*)\)", contracts["offpeak_hours"]).group(1) - else: - offpeak_hours = "" - if "last_activation_date" in contracts and contracts["last_activation_date"] is not None: - last_activation_date = ( - datetime.datetime.strptime(contracts["last_activation_date"], "%Y-%m-%d%z") - ).replace(tzinfo=None) - else: - last_activation_date = contracts["last_activation_date"] - if ( - "last_distribution_tariff_change_date" in contracts - and contracts["last_distribution_tariff_change_date"] is not None - ): - last_distribution_tariff_change_date = ( - datetime.datetime.strptime( - contracts["last_distribution_tariff_change_date"], - "%Y-%m-%d%z", - ) - ).replace(tzinfo=None) - else: - last_distribution_tariff_change_date = contracts["last_distribution_tariff_change_date"] - self.db.set_contract( - self.usage_point_id, - { - "usage_point_status": usage_point["usage_point_status"], - "meter_type": usage_point["meter_type"], - "segment": contracts["segment"], - "subscribed_power": contracts["subscribed_power"], - "last_activation_date": last_activation_date, - "distribution_tariff": contracts["distribution_tariff"], - "offpeak_hours_0": offpeak_hours, - "offpeak_hours_1": offpeak_hours, - "offpeak_hours_2": offpeak_hours, - "offpeak_hours_3": offpeak_hours, - "offpeak_hours_4": offpeak_hours, - "offpeak_hours_5": offpeak_hours, - "offpeak_hours_6": offpeak_hours, - "contract_status": contracts["contract_status"], - "last_distribution_tariff_change_date": last_distribution_tariff_change_date, - }, - ) - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération du contrat.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } - - def get(self): - current_cache = self.db.get_contract(usage_point_id=self.usage_point_id) - if not current_cache: - # No cache - logging.info(" => Pas de cache") - result = self.run() - else: - # Refresh cache - if hasattr(self.usage_point_config, "refresh_contract") and self.usage_point_config.refresh_contract: - logging.info(" => Mise à jour du cache") - result = self.run() - self.usage_point_config.refresh_contract = False - DB.set_usage_point(self.usage_point_id, self.usage_point_config.__dict__) - else: - # Get data in cache - logging.info(" => Récupération du cache") - result = {} - for column in current_cache.__table__.columns: - result[column.name] = str(getattr(current_cache, column.name)) - logging.debug(f" => {result}") - if "error" not in result: - for key, value in result.items(): - logging.info(f"{key}: {value}") - else: - logging.error(result) - return result diff --git a/src/models/query_daily.py b/src/models/query_daily.py deleted file mode 100644 index fe10dc32..00000000 --- a/src/models/query_daily.py +++ /dev/null @@ -1,308 +0,0 @@ -import json -import logging -from datetime import datetime, timedelta - -from dateutil.relativedelta import relativedelta - -from config import DAILY_MAX_DAYS, URL -from init import CONFIG, DB -from models.query import Query -from models.stat import Stat - - -def daterange(start_date, end_date): - for n in range(int((end_date - start_date).days)): - yield start_date + timedelta(n) - - -class Daily: - """ - The 'Daily' class represents a daily data retrieval and manipulation process for a specific usage point. It provides methods for fetching, resetting, deleting, and blacklisting daily data. - - Attributes: - config (dict): The configuration settings. - db (object): The database object. - url (str): The base URL for API requests. - max_daily (int): The maximum number of days to retrieve data for. - date_format (str): The format of dates. - date_detail_format (str): The format of detailed dates. - headers (dict): The headers for API requests. - usage_point_id (str): The ID of the usage point. - usage_point_config (object): The configuration settings for the usage point. - contract (object): The contract associated with the usage point. - daily_max_days (int): The maximum number of days for daily data. - max_days_date (datetime): The maximum date for retrieving data. - activation_date (datetime): The activation date for retrieving data. - measure_type (str): The type of measurement (consumption or production). - base_price (float): The base price for the measurement type. - - Methods: - run(begin, end): - Retrieves and stores daily data for a specified date range. - - get(): - Retrieves and returns all available daily data for the usage point. - - reset(date=None): - Resets the daily data for the usage point, optionally for a specific date. - - delete(date=None): - Deletes the daily data for the usage point, optionally for a specific date. - - fetch(date): - Fetches and returns the daily data for a specific date. - - blacklist(date, action): - Adds or removes a date from the blacklist for the usage point. - - Note: - The 'Daily' class relies on the 'Query' class for making API requests and the 'Stat' class for retrieving additional statistics. - - Example usage: - headers = {"Authorization": "Bearer token"} - usage_point_id = "1234567890" - daily = Daily(headers, usage_point_id) - data = daily.get() - for item in data: - print(item) - """ - - def __init__(self, headers, usage_point_id, measure_type="consumption"): - self.config = CONFIG - self.db = DB - self.url = URL - self.max_daily = 1095 - self.date_format = "%Y-%m-%d" - self.date_detail_format = "%Y-%m-%d %H:%M:%S" - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = self.db.get_usage_point(self.usage_point_id) - self.contract = self.db.get_contract(self.usage_point_id) - self.daily_max_days = int(DAILY_MAX_DAYS) - self.max_days_date = datetime.utcnow() - timedelta(days=self.daily_max_days) - if ( - measure_type == "consumption" - and hasattr(self.usage_point_config, "consumption_max_date") - and self.usage_point_config.consumption_max_date != "" - and self.usage_point_config.consumption_max_date is not None - ): - self.activation_date = self.usage_point_config.consumption_max_date - elif ( - measure_type == "production" - and hasattr(self.usage_point_config, "production_max_date") - and self.usage_point_config.production_max_date != "" - and self.usage_point_config.production_max_date is not None - ): - self.activation_date = self.usage_point_config.production_max_date - elif ( - hasattr(self.contract, "last_activation_date") - and self.contract.last_activation_date != "" - and self.contract.last_activation_date is not None - ): - self.activation_date = self.contract.last_activation_date - else: - self.activation_date = self.max_days_date - self.measure_type = measure_type - self.base_price = 0 - if measure_type == "consumption": - if hasattr(self.usage_point_config, "consumption_price_base"): - self.base_price = self.usage_point_config.consumption_price_base - else: - if hasattr(self.usage_point_config, "production_price"): - self.base_price = self.usage_point_config.production_price - - def run(self, begin, end): - begin_str = begin.strftime(self.date_format) - end_str = end.strftime(self.date_format) - logging.info(f"Récupération des données : {begin_str} => {end_str}") - endpoint = f"daily_{self.measure_type}/{self.usage_point_id}/start/{begin_str}/end/{end_str}" - # if begin < now() - timedelta(days=7): - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - try: - current_data = self.db.get_daily(self.usage_point_id, begin, end, self.measure_type) - if not current_data["missing_data"]: - logging.info(" => Toutes les données sont déjà en cache.") - output = [] - for date, data in current_data["date"].items(): - output.append({"date": date, "value": data["value"]}) - return output - else: - logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") - data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() - if data.status_code == 403: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = 500 - return { - "error": True, - "description": description, - "status_code": status_code, - "exit": True, - } - else: - blacklist = 0 - max_histo = datetime.combine(datetime.now(), datetime.max.time()) - timedelta(days=1) - if hasattr(data, "status_code"): - if data.status_code == 200: - meter_reading = json.loads(data.text)["meter_reading"] - interval_reading = meter_reading["interval_reading"] - interval_reading_tmp = {} - for interval_reading_data in interval_reading: - interval_reading_tmp[interval_reading_data["date"]] = interval_reading_data["value"] - for single_date in daterange(begin, end): - if single_date < max_histo: - if single_date.strftime(self.date_format) in interval_reading_tmp: - # FOUND - self.db.insert_daily( - usage_point_id=self.usage_point_id, - date=datetime.combine(single_date, datetime.min.time()), - value=interval_reading_tmp[single_date.strftime(self.date_format)], - blacklist=blacklist, - measurement_direction=self.measure_type, - ) - else: - # NOT FOUND - self.db.daily_fail_increment( - usage_point_id=self.usage_point_id, - date=datetime.combine(single_date, datetime.min.time()), - measurement_direction=self.measure_type, - ) - return interval_reading - else: - return { - "error": True, - "description": json.loads(data.text)["detail"], - "status_code": data.status_code, - } - else: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = 500 - return { - "error": True, - "description": description, - "status_code": status_code, - } - except Exception as e: - logging.exception(e) - logging.error(e) - - def get(self): - """Generate a range of dates between a start date and an end date. - - Parameters: - start_date (datetime.date): The start date of the range. - end_date (datetime.date): The end date of the range. - - Yields: - datetime.date: The next date in the range. - - Example: - >>> start_date = datetime.date(2021, 1, 1) - >>> end_date = datetime.date(2021, 1, 5) - >>> for date in daterange(start_date, end_date): - ... print(date) - ... - 2021-01-01 - 2021-01-02 - 2021-01-03 - 2021-01-04 - - Note: - The end date is exclusive, meaning it is not included in the range. - """ - end = datetime.combine((datetime.now() + timedelta(days=2)), datetime.max.time()) - begin = datetime.combine(end - relativedelta(days=self.max_daily), datetime.min.time()) - finish = True - result = [] - while finish: - if self.max_days_date > begin: - # Max day reached - begin = self.max_days_date - finish = False - response = self.run(begin, end) - elif self.activation_date and self.activation_date > begin: - # Activation date reached - begin = self.activation_date - finish = False - response = self.run(begin, end) - else: - response = self.run(begin, end) - begin = begin - relativedelta(months=self.max_daily) - end = end - relativedelta(months=self.max_daily) - if "exit" in response: - finish = False - response = { - "error": True, - "description": response["description"], - "status_code": response["status_code"], - } - if response is not None: - result = [*result, *response] - else: - response = { - "error": True, - "description": "MyElectricalData est indisponible.", - } - if "error" in response and response["error"]: - logging.error("Echec de la récupération des données") - logging.error(f'=> {response["description"]}') - logging.error(f"=> {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") - if "status_code" in response and (response["status_code"] == 409 or response["status_code"] == 400): - finish = False - logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error(f"Prochain lancement à {datetime.now() + timedelta(seconds=self.config.get('cycle'))}") - return result - - def reset(self, date=None): - if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.reset_daily(self.usage_point_id, date, self.measure_type) - return True - - def delete(self, date=None): - if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.delete_daily(self.usage_point_id, date, self.measure_type) - return True - - def fetch(self, date): - if date is not None: - date = datetime.strptime(date, self.date_format) - result = self.run( - datetime.combine(date - timedelta(days=2), datetime.min.time()), - datetime.combine(date + timedelta(days=2), datetime.min.time()), - ) - if "error" in result and result["error"]: - return { - "error": True, - "notif": result["description"], - "fail_count": self.db.get_daily_fail_count(self.usage_point_id, date, self.measure_type), - } - for item in result: - if date.strftime(self.date_format) in item["date"]: - item["hc"] = Stat(self.usage_point_id, self.measure_type).get_daily(date, "hc") - item["hp"] = Stat(self.usage_point_id, self.measure_type).get_daily(date, "hp") - return item - return { - "error": True, - "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": self.db.get_daily_fail_count(self.usage_point_id, date, self.measure_type), - } - - def blacklist(self, date, action): - if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.blacklist_daily(self.usage_point_id, date, action, self.measure_type) - return True diff --git a/src/models/query_detail.py b/src/models/query_detail.py deleted file mode 100644 index 8a0adfbc..00000000 --- a/src/models/query_detail.py +++ /dev/null @@ -1,279 +0,0 @@ -import json -import logging -import re -from datetime import datetime, timedelta - -from config import DETAIL_MAX_DAYS, URL -from init import CONFIG, DB -from models.database import ConsumptionDetail, ProductionDetail -from models.query import Query - - -def daterange(start_date, end_date): - for n in range(int((end_date - start_date).days)): - yield start_date + timedelta(n) - - -class Detail: - def __init__(self, headers, usage_point_id, measure_type="consumption"): - self.config = CONFIG - self.db = DB - self.url = URL - self.max_detail = 7 - self.date_format = "%Y-%m-%d" - self.date_detail_format = "%Y-%m-%d %H:%M:%S" - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = self.db.get_usage_point(self.usage_point_id) - self.contract = self.db.get_contract(self.usage_point_id) - self.daily_max_days = int(DETAIL_MAX_DAYS) - self.max_days_date = datetime.utcnow() - timedelta(days=self.daily_max_days) - if ( - measure_type == "consumption" - and hasattr(self.usage_point_config, "consumption_detail_max_date") - and self.usage_point_config.consumption_detail_max_date != "" - and self.usage_point_config.consumption_detail_max_date is not None - ): - self.activation_date = self.usage_point_config.consumption_detail_max_date - elif ( - measure_type == "production" - and hasattr(self.usage_point_config, "production_detail_max_date") - and self.usage_point_config.production_detail_max_date != "" - and self.usage_point_config.production_detail_max_date is not None - ): - self.activation_date = self.usage_point_config.production_detail_max_date - elif ( - hasattr(self.contract, "last_activation_date") - and self.contract.last_activation_date != "" - and self.contract.last_activation_date is not None - ): - self.activation_date = self.contract.last_activation_date - else: - self.activation_date = self.max_days_date - self.offpeak_hours = { - 0: self.usage_point_config.offpeak_hours_0, - 1: self.usage_point_config.offpeak_hours_1, - 2: self.usage_point_config.offpeak_hours_2, - 3: self.usage_point_config.offpeak_hours_3, - 4: self.usage_point_config.offpeak_hours_4, - 5: self.usage_point_config.offpeak_hours_5, - 6: self.usage_point_config.offpeak_hours_6, - } - self.measure_type = measure_type - self.base_price = 0 - if measure_type == "consumption": - self.detail_table = ConsumptionDetail - if hasattr(self.usage_point_config, "consumption_price_base"): - self.base_price = self.usage_point_config.consumption_price_base - else: - self.detail_table = ProductionDetail - if hasattr(self.usage_point_config, "production_price"): - self.base_price = self.usage_point_config.production_price - - def run(self, begin, end): - if begin.strftime(self.date_format) == end.strftime(self.date_format): - end = end + timedelta(days=1) - begin_str = begin.strftime(self.date_format) - end_str = end.strftime(self.date_format) - logging.info(f"Récupération des données : {begin_str} => {end_str}") - endpoint = f"{self.measure_type}_load_curve/{self.usage_point_id}/start/{begin_str}/end/{end_str}" - # if begin <= (datetime.now() - timedelta(days=8)): - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - try: - current_data = self.db.get_detail(self.usage_point_id, begin, end, self.measure_type) - # current_week = datetime.now() - timedelta(days=self.max_detail + 1) - # last_week = False - # if current_week <= begin: - # last_week = True - # if not current_data["missing_data"] and not last_week: - if not current_data["missing_data"]: - logging.info(" => Toutes les données sont déjà en cache.") - output = [] - for date, data in current_data["date"].items(): - output.append({"date": date, "value": data["value"]}) - return output - else: - logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") - data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() - if hasattr(data, "status_code"): - if data.status_code == 403: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = 500 - return { - "error": True, - "description": description, - "status_code": status_code, - "exit": True, - } - if data.status_code == 200: - meter_reading = json.loads(data.text)["meter_reading"] - for interval_reading in meter_reading["interval_reading"]: - value = interval_reading["value"] - interval = re.findall(r"\d+", interval_reading["interval_length"])[0] - date = interval_reading["date"] - date_object = datetime.strptime(date, self.date_detail_format) - # CHANGE DATE TO BEGIN RANGE - date = date_object - timedelta(minutes=int(interval)) - # date = date.strftime(self.date_detail_format) - # print(date) - # GET WEEKDAY - # date_days = date_object.weekday() - # date_hour_minute = date_object.strftime('%H:%M') - # measure_type = "HP" - # day_offpeak_hours = self.offpeak_hours[date_days] - # if day_offpeak_hours is not None: - # for offpeak_hour in day_offpeak_hours.split(";"): - # if offpeak_hour != "None" and offpeak_hour != "" and offpeak_hour is not None: - # offpeak_begin = offpeak_hour.split("-")[0].replace('h', ':').replace('H', ':') - # # FORMAT HOUR WITH 2 DIGIT - # offpeak_begin = datetime.strptime(offpeak_begin, '%H:%M') - # offpeak_begin = datetime.strftime(offpeak_begin, '%H:%M') - # offpeak_stop = offpeak_hour.split("-")[1].replace('h', ':').replace('H', ':') - # # FORMAT HOUR WITH 2 DIGIT - # offpeak_stop = datetime.strptime(offpeak_stop, '%H:%M') - # offpeak_stop = datetime.strftime(offpeak_stop, '%H:%M') - # result = is_between(date_hour_minute, (offpeak_begin, offpeak_stop)) - # if result: - # measure_type = "HC" - self.db.insert_detail( - usage_point_id=self.usage_point_id, - date=date, - value=value, - interval=interval, - measure_type="", - blacklist=0, - mesure_type=self.measure_type, - ) - return meter_reading["interval_reading"] - else: - return { - "error": True, - "description": json.loads(data.text)["detail"], - "status_code": data.status_code, - } - else: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = 500 - return { - "error": True, - "description": description, - "status_code": status_code, - } - except Exception as e: - logging.exception(e) - logging.error(e) - - def get(self): - end = datetime.combine((datetime.now() + timedelta(days=2)), datetime.max.time()) - begin = datetime.combine(end - timedelta(days=self.max_detail), datetime.min.time()) - finish = True - result = [] - while finish: - if self.max_days_date > begin: - # Max day reached - begin = self.max_days_date - finish = False - response = self.run(begin, end) - elif self.activation_date and self.activation_date > begin: - # Activation date reached - begin = self.activation_date - finish = False - response = self.run(begin, end) - else: - response = self.run(begin, end) - begin = begin - timedelta(days=self.max_detail) - end = end - timedelta(days=self.max_detail) - if "exit" in response: - finish = False - response = { - "error": True, - "description": response["description"], - "status_code": response["status_code"], - } - if response is not None: - result = [*result, *response] - else: - response = { - "error": True, - "description": "MyElectricalData est indisponible.", - } - if "error" in response and response["error"]: - logging.error("Echec de la récupération des données.") - logging.error(f' => {response["description"]}') - logging.error(f" => {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") - if "status_code" in response and (response["status_code"] == 409 or response["status_code"] == 400): - finish = False - logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error(f"Prochain lancement à {datetime.now() + timedelta(seconds=self.config.get('cycle'))}") - return result - - def reset_daily(self, date): - begin = datetime.combine(datetime.strptime(date, self.date_format), datetime.min.time()) - end = datetime.combine(datetime.strptime(date, self.date_format), datetime.max.time()) - self.db.reset_detail_range(self.usage_point_id, begin, end, self.measure_type) - return True - - def delete_daily(self, date): - begin = datetime.combine(datetime.strptime(date, self.date_format), datetime.min.time()) - end = datetime.combine(datetime.strptime(date, self.date_format), datetime.max.time()) - self.db.delete_detail_range(self.usage_point_id, begin, end, self.measure_type) - return True - - def reset(self, date=None): - if date is not None: - date = datetime.strptime(date, self.date_detail_format) - self.db.reset_detail(self.usage_point_id, date, self.measure_type) - return True - - def delete(self, date=None): - if date is not None: - date = datetime.strptime(date, self.date_detail_format) - self.db.delete_detail(self.usage_point_id, date, self.measure_type) - return True - - def fetch(self, date): - if date is not None: - date = datetime.strptime(date, self.date_format) - result = self.run( - datetime.combine(date - timedelta(days=2), datetime.min.time()), - datetime.combine(date + timedelta(days=2), datetime.min.time()), - ) - if "error" in result and result["error"]: - return { - "error": True, - "notif": result["description"], - "fail_count": self.db.get_detail_fail_count(self.usage_point_id, date, self.measure_type), - } - - for item in result: - if type(item["date"]) == str: - item["date"] = datetime.strptime(item["date"], self.date_detail_format) - result_date = item["date"].strftime(self.date_format) - if date.strftime(self.date_format) in result_date: - item["date"] = result_date - return item - - return { - "error": True, - "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": self.db.get_detail_fail_count(self.usage_point_id, date, self.measure_type), - } - - -def is_between(time, time_range): - if time_range[1] < time_range[0]: - return time > time_range[0] or time <= time_range[1] - return time_range[0] < time <= time_range[1] diff --git a/src/models/query_ecowatt.py b/src/models/query_ecowatt.py deleted file mode 100644 index de803deb..00000000 --- a/src/models/query_ecowatt.py +++ /dev/null @@ -1,79 +0,0 @@ -import ast -import json -import logging -import traceback -from datetime import datetime - -from dateutil.relativedelta import relativedelta - -from config import URL -from dependencies import title -from init import CONFIG, DB -from models.query import Query - - -class Ecowatt: - def __init__(self): - self.config = CONFIG - self.db = DB - self.url = URL - self.valid_date = datetime.combine(datetime.now() + relativedelta(days=2), datetime.min.time()) - - def run(self): - start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") - end = (datetime.now() + relativedelta(days=3)).strftime("%Y-%m-%d") - target = f"{self.url}/rte/ecowatt/{start}/{end}" - query_response = Query(endpoint=target).get() - if query_response.status_code == 200: - try: - response_json = json.loads(query_response.text) - for date, data in response_json.items(): - date = datetime.strptime(date, "%Y-%m-%d") - self.db.set_ecowatt(date, data["value"], data["message"], str(data["detail"])) - response = response_json - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération des données Ecowatt.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } - - def get(self): - data = self.db.get_ecowatt() - output = {} - for d in data: - if hasattr(d, "date") and hasattr(d, "value") and hasattr(d, "message") and hasattr(d, "detail"): - output[d.date] = { - "value": d.value, - "message": d.message, - "detail": ast.literal_eval(d.detail), - } - return output - - def fetch(self): - current_cache = self.db.get_ecowatt() - result = {} - if not current_cache: - # No cache - title(f"No cache") - result = self.run() - else: - last_item = current_cache[0] - if last_item.date < self.valid_date: - result = self.run() - else: - logging.info(" => Toutes les données sont déjà en cache.") - if "error" not in result: - for key, value in result.items(): - logging.info(f"{key}: {value['message']}") - else: - logging.error(result) - return "OK" - return result diff --git a/src/models/query_power.py b/src/models/query_power.py deleted file mode 100644 index fe54b16c..00000000 --- a/src/models/query_power.py +++ /dev/null @@ -1,209 +0,0 @@ -import json -import logging -from datetime import datetime, timedelta - -from config import DAILY_MAX_DAYS, URL -from init import CONFIG, DB -from models.query import Query - - -def daterange(start_date, end_date): - for n in range(int((end_date - start_date).days)): - yield start_date + timedelta(n) - - -class Power: - def __init__(self, headers, usage_point_id): - self.config = CONFIG - self.db = DB - self.url = URL - self.max_daily = 1095 - self.date_format = "%Y-%m-%d" - self.date_format_detail = "%Y-%m-%d %H:%M:%S" - self.headers = headers - self.usage_point_id = usage_point_id - self.usage_point_config = self.db.get_usage_point(self.usage_point_id) - self.contract = self.db.get_contract(self.usage_point_id) - self.daily_max_days = DAILY_MAX_DAYS - self.max_days_date = datetime.utcnow() - timedelta(days=self.daily_max_days) - if ( - hasattr(self.usage_point_config, "consumption_max_date") - and self.usage_point_config.consumption_max_date != "" - and self.usage_point_config.consumption_max_date is not None - ): - self.activation_date = self.usage_point_config.consumption_max_date - elif ( - hasattr(self.contract, "last_activation_date") - and self.contract.last_activation_date != "" - and self.contract.last_activation_date is not None - ): - self.activation_date = self.contract.last_activation_date - else: - self.activation_date = self.max_days_date - - def run(self, begin, end): - begin_str = begin.strftime(self.date_format) - end_str = end.strftime(self.date_format) - logging.info(f"Récupération des données : {begin_str} => {end_str}") - endpoint = f"daily_consumption_max_power/{self.usage_point_id}/start/{begin_str}/end/{end_str}" - if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: - endpoint += "/cache" - try: - current_data = self.db.get_daily_power(self.usage_point_id, begin, end) - if not current_data["missing_data"]: - logging.info(" => Toutes les données sont déjà en cache.") - output = [] - for date, data in current_data["date"].items(): - output.append({"date": date, "value": data["value"]}) - return output - else: - logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") - data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() - blacklist = 0 - max_histo = datetime.combine(datetime.now(), datetime.max.time()) - timedelta(days=1) - if hasattr(data, "status_code"): - if data.status_code == 200: - meter_reading = json.loads(data.text)["meter_reading"] - interval_reading = meter_reading["interval_reading"] - interval_reading_tmp = {} - for interval_reading_data in interval_reading: - date = datetime.strptime(interval_reading_data["date"], self.date_format_detail) - date = datetime.combine(date, datetime.min.time()) - interval_reading_tmp[date.strftime(self.date_format)] = { - "date": datetime.strptime( - interval_reading_data["date"], - self.date_format_detail, - ), - "value": interval_reading_data["value"], - } - for single_date in daterange(begin, end): - if single_date < max_histo: - if single_date.strftime(self.date_format) in interval_reading_tmp: - # FOUND - single_date_value = interval_reading_tmp[single_date.strftime(self.date_format)] - self.db.insert_daily_max_power( - usage_point_id=self.usage_point_id, - date=datetime.combine(single_date, datetime.min.time()), - event_date=single_date_value["date"], - value=single_date_value["value"], - blacklist=blacklist, - ) - else: - # NOT FOUND - self.db.daily_max_power_fail_increment( - usage_point_id=self.usage_point_id, - date=datetime.combine(single_date, datetime.min.time()), - ) - return interval_reading - else: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = 500 - return { - "error": True, - "description": description, - "status_code": status_code, - } - else: - if hasattr(data, "text"): - description = json.loads(data.text)["detail"] - else: - description = data - if hasattr(data, "status_code"): - status_code = data.status_code - else: - status_code = 500 - return { - "error": True, - "description": description, - "status_code": status_code, - } - except Exception as e: - logging.exception(e) - logging.error(e) - - def get(self): - end = datetime.combine((datetime.now() + timedelta(days=2)), datetime.max.time()) - begin = datetime.combine(end - timedelta(days=self.max_daily), datetime.min.time()) - finish = True - result = [] - while finish: - if self.max_days_date > begin: - # Max day reached - begin = self.max_days_date - finish = False - response = self.run(begin, end) - elif self.activation_date and self.activation_date > begin: - # Activation date reached - begin = self.activation_date - finish = False - response = self.run(begin, end) - else: - response = self.run(begin, end) - begin = begin - timedelta(days=self.max_daily) - end = end - timedelta(days=self.max_daily) - if response is not None: - result = [*result, *response] - else: - response = { - "error": True, - "description": "MyElectricalData est indisponible.", - } - if "error" in response and response["error"]: - logging.error("Echec de la récupération des données.") - logging.error(f' => {response["description"]}') - logging.error(f" => {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") - if "status_code" in response and (response["status_code"] == 409 or response["status_code"] == 400): - finish = False - logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error(f"Prochain lancement à {datetime.now() + timedelta(seconds=CONFIG.get('cycle'))}") - return result - - def reset(self, date=None): - if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.reset_daily_max_power(self.usage_point_id, date) - return True - - def delete(self, date=None): - if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.delete_daily_max_power(self.usage_point_id, date) - return True - - def blacklist(self, date, action): - if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.blacklist_daily_max_power(self.usage_point_id, date, action) - return True - - def fetch(self, date): - if date is not None: - date = datetime.strptime(date, self.date_format) - result = self.run( - date - timedelta(days=1), - date + timedelta(days=1), - ) - if "error" in result and result["error"]: - return { - "error": True, - "notif": result["description"], - "fail_count": self.db.get_daily_max_power_fail_count(self.usage_point_id, date), - } - for item in result: - target_date = datetime.strptime(item["date"], self.date_format_detail).strftime(self.date_format) - event_date = datetime.strptime(item["date"], self.date_format_detail).strftime("%H:%M:%S") - if date.strftime(self.date_format) == target_date: - item["date"] = target_date - item["event_date"] = event_date - return item - return { - "error": True, - "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": self.db.get_daily_max_power_fail_count(self.usage_point_id, date), - } diff --git a/src/models/query_status.py b/src/models/query_status.py deleted file mode 100755 index 27eb8b1b..00000000 --- a/src/models/query_status.py +++ /dev/null @@ -1,84 +0,0 @@ -import datetime -import json -import logging -import traceback -from os import environ, getenv - -from config import URL -from dependencies import get_version -from init import DB -from models.query import Query - - -class Status: - def __init__(self, headers=None): - self.db = DB - self.url = URL - self.headers = headers - - def ping(self): - target = f"{self.url}/ping" - status = { - "version": get_version(), - "status": False, - "information": "MyElectricalData injoignable.", - } - try: - response = Query(endpoint=target, headers=self.headers).get() - if hasattr(response, "status_code") and response.status_code == 200: - status = json.loads(response.text) - for key, value in status.items(): - logging.info(f"{key}: {value}") - status["version"] = get_version() - return status - else: - return status - except LookupError: - return status - - def status(self, usage_point_id): - usage_point_id_config = self.db.get_usage_point(usage_point_id) - target = f"{self.url}/valid_access/{usage_point_id}" - if hasattr(usage_point_id_config, "cache") and usage_point_id_config.cache: - target += "/cache" - response = Query(endpoint=target, headers=self.headers).get() - if response: - status = json.loads(response.text) - if response.status_code == 200: - try: - for key, value in status.items(): - logging.info(f"{key}: {value}") - self.db.usage_point_update( - usage_point_id, - consentement_expiration=datetime.datetime.strptime( - status["consent_expiration_date"], "%Y-%m-%dT%H:%M:%S" - ), - # last_call=datetime.datetime.strptime(status["last_call"], "%Y-%m-%dT%H:%M:%S.%f"), - call_number=status["call_number"], - quota_limit=status["quota_limit"], - quota_reached=status["quota_reached"], - quota_reset_at=datetime.datetime.strptime(status["quota_reset_at"], "%Y-%m-%dT%H:%M:%S.%f"), - ban=status["ban"], - ) - return status - except Exception as e: - if "DEBUG" in environ and getenv("DEBUG"): - traceback.print_exc() - logging.error(e) - return { - "error": True, - "description": "Erreur lors de la récupération du statut du compte.", - } - else: - if "DEBUG" in environ and getenv("DEBUG"): - traceback.print_exc() - logging.error(status["detail"]) - return {"error": True, "description": status["detail"]} - else: - if "DEBUG" in environ and getenv("DEBUG"): - traceback.print_exc() - return { - "error": True, - "status_code": response.status_code, - "description": json.loads(response.text), - } diff --git a/src/models/query_tempo.py b/src/models/query_tempo.py deleted file mode 100644 index 9b349d40..00000000 --- a/src/models/query_tempo.py +++ /dev/null @@ -1,156 +0,0 @@ -import json -import logging -import traceback -from datetime import datetime, timedelta - -from dateutil.relativedelta import relativedelta - -from config import URL -from dependencies import title -from init import CONFIG, DB -from models.query import Query - - -class Tempo: - def __init__(self): - self.config = CONFIG - self.db = DB - self.url = URL - self.valid_date = datetime.combine(datetime.now() + relativedelta(days=1), datetime.min.time()) - self.nb_check_day = 31 - self.total_tempo_days = { - "red": 22, - "white": 43, - "blue": 300, - } - - def run(self): - start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") - end = (datetime.now() + relativedelta(days=2)).strftime("%Y-%m-%d") - target = f"{self.url}/rte/tempo/{start}/{end}" - query_response = Query(endpoint=target).get() - if query_response.status_code == 200: - try: - response_json = json.loads(query_response.text) - for date, color in response_json.items(): - date = datetime.strptime(date, "%Y-%m-%d") - self.db.set_tempo(date, color) - response = response_json - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération de données Tempo.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } - - def get(self): - data = self.db.get_tempo() - output = {} - for d in data: - if hasattr(d, "date") and hasattr(d, "color"): - output[d.date] = d.color - return output - - def fetch(self): - current_cache = self.db.get_tempo() - result = {} - if not current_cache: - # No cache - title(f"No cache") - result = self.run() - else: - valid_date = self.valid_date - missing_date = False - for i in range(self.nb_check_day): - if current_cache[i].date != valid_date: - missing_date = True - valid_date = valid_date - relativedelta(days=1) - if missing_date: - result = self.run() - else: - logging.info(" => Toutes les données sont déjà en cache.") - if "error" not in result: - for key, value in result.items(): - logging.info(f"{key}: {value}") - else: - logging.error(result) - return "OK" - return result - - def calc_day(self): - """ - Calculates the number of days left for each color based on the current date. - - Args: - None - - Returns: - A dictionary containing the number of days left for each color. - - """ - now = datetime.now() - begin = datetime.combine(now.replace(month=9, day=1), datetime.min.time()) - if now < begin: - begin = begin.replace(year=int(now.strftime("%Y")) - 1) - end = datetime.combine(begin - timedelta(hours=5), datetime.max.time()).replace( - year=int(begin.strftime("%Y")) + 1 - ) - current_tempo_day = self.db.get_tempo_range(begin=begin, end=end) - result = self.total_tempo_days - for day in current_tempo_day: - result[day.color.lower()] -= 1 - self.db.set_tempo_config("days", result) - return result - - def fetch_day(self): - target = f"{self.url}/edf/tempo/days" - query_response = Query(endpoint=target).get() - if query_response.status_code == 200: - try: - response_json = json.loads(query_response.text) - self.db.set_tempo_config("days", response_json) - response = {"error": False, "description": "", "items": response_json} - logging.info(" => Toutes les valeurs sont misent à jours.") - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération de jours Tempo.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } - - def fetch_price(self): - target = f"{self.url}/edf/tempo/price" - query_response = Query(endpoint=target).get() - if query_response.status_code == 200: - try: - response_json = json.loads(query_response.text) - self.db.set_tempo_config("price", response_json) - response = {"error": False, "description": "", "items": response_json} - logging.info(" => Toutes les valeurs sont misent à jours.") - except Exception as e: - logging.error(e) - traceback.print_exc() - response = { - "error": True, - "description": "Erreur lors de la récupération de jours Tempo.", - } - return response - else: - return { - "error": True, - "description": json.loads(query_response.text)["detail"], - } diff --git a/src/models/rte.py b/src/models/rte.py deleted file mode 100644 index ecf805fd..00000000 --- a/src/models/rte.py +++ /dev/null @@ -1,29 +0,0 @@ -from json import loads - -from rauth import OAuth2Service - - -class ExampleOAuth2Client: - def __init__(self, client_id, client_secret): - self.access_token = None - - self.service = OAuth2Service( - name="foo", - client_id=client_id, - client_secret=client_secret, - access_token_url="http://api.example.com/oauth/access_token", - authorize_url="http://api.example.com/oauth/access_token", - base_url="http://api.example.com/", - ) - - self.get_access_token() - - def get_access_token(self): - data = { - "code": "bar", # specific to my app - "grant_type": "client_credentials", # generally required! - } - - session = self.service.get_auth_session(data=data, decoder=loads) - - self.access_token = session.access_token diff --git a/src/models/stat.py b/src/models/stat.py index b546c799..92e5f05c 100644 --- a/src/models/stat.py +++ b/src/models/stat.py @@ -1,14 +1,20 @@ +"""Generate all statistical data for a usage point.""" import calendar import json import logging from datetime import date, datetime, timedelta, timezone -import pytz from dateutil.relativedelta import relativedelta -from init import CONFIG, DB - -utc = pytz.UTC +from const import TEMPO_BEGIN, TEMPO_END +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.max_power import DatabaseMaxPower +from database.statistique import DatabaseStatistique +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from utils import is_between now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) @@ -43,11 +49,11 @@ class Stat: # pylint: disable=R0902,R0904 - value_yesterday_hp: The value of yesterday for high peak measurement type. - value_yesterday_hc: The value of yesterday for high consumption measurement type. - value_peak_offpeak_percent_hp: The percentage value of peak and off-peak for high peak measurement type. - - value_peak_offpeak_percent_hc: The percentage value of peak and off-peak for high consumption measurement type. + - value_peak_offpeak_percent_hc: The percentage value of peak and off-peak for high consumption. - value_current_week_evolution: The evolution value of the current week. - value_yesterday_evolution: The evolution value of yesterday. - value_current_month_evolution: The evolution value of the current month. - - value_peak_offpeak_percent_hp_vs_hc: The percentage value of peak and off-peak for high peak and high consumption measurement types. + - value_peak_offpeak_percent_hp_vs_hc: The percentage value of peak and off-peak for high peak/consumption. - value_monthly_evolution: The evolution value of the monthly data. - value_yearly_evolution: The evolution value of the yearly data. @@ -80,21 +86,25 @@ class Stat: # pylint: disable=R0902,R0904 - yesterday_hc_hp(): Returns the yesterday data for high consumption and high peak measurement types. - peak_offpeak_percent(): Returns the percentage value of peak and off-peak. - get_year(year, measure_type=None): Returns the yearly data for the specified year and measure type. - - get_year_linear(idx, measure_type=None): Returns the linear yearly data for the specified index and measure type. - - get_month(year, month=None, measure_type=None): Returns the monthly data for the specified year, month, and measure type. - - get_month_linear(idx, measure_type=None): Returns the linear monthly data for the specified index and measure type. - - get_week(year, month=None, measure_type=None): Returns the weekly data for the specified year, month, and measure type. - - get_week_linear(idx, measure_type=None): Returns the linear weekly data for the specified index and measure type. + - get_year_linear(idx, measure_type=None): Returns the linear yearly data for the specified index and + measure type. + - get_month(year, month=None, measure_type=None): Returns the monthly data for the specified year, month, + and measure type. + - get_month_linear(idx, measure_type=None): Returns the linear monthly data for the specified index + and measure type. + - get_week(year, month=None, measure_type=None): Returns the weekly data for the specified year, month, + and measure type. + - get_week_linear(idx, measure_type=None): Returns the linear weekly data for the specified index + and measure type. - get_price(): Returns the price data. - get_mesure_type(date): Returns the measure type for the specified date. - generate_price(): Generates and saves the price data. - get_daily(date, mesure_type): Returns the daily data for the specified date and measure type. - delete(): Deletes the statistical data for the usage point. - - is_between(time, time_range): Checks if the given time is between the given time range. """ def __init__(self, usage_point_id, measurement_direction=None): - """Initializes a new instance of the 'Stat' class. + """Initialize a new instance of the 'Stat' class. Parameters: usage_point_id (int): The ID of the usage point. @@ -123,24 +133,25 @@ def __init__(self, usage_point_id, measurement_direction=None): value_last_year (int): The value of the last year. value_yesterday_hp (int): The value of yesterday for high peak measurement type. value_yesterday_hc (int): The value of yesterday for high consumption measurement type. - value_peak_offpeak_percent_hp (int): The percentage value of peak and off-peak for high peak measurement type. - value_peak_offpeak_percent_hc (int): The percentage value of peak and off-peak for high consumption measurement type. + value_peak_offpeak_percent_hp (int): The percentage value of peak and off-peak for high peak + measurement type. + value_peak_offpeak_percent_hc (int): The percentage value of peak and off-peak for high consumption + measurement type. value_current_week_evolution (int): The evolution value of the current week. value_yesterday_evolution (int): The evolution value of yesterday. value_current_month_evolution (int): The evolution value of the current month. - value_peak_offpeak_percent_hp_vs_hc (int): The percentage value of peak and off-peak for high peak and high consumption measurement types. + value_peak_offpeak_percent_hp_vs_hc (int): The percentage value of peak and off-peak for high peak and + high consumption measurement types. value_monthly_evolution (int): The evolution value of the monthly data. value_yearly_evolution (int): The evolution value of the yearly data. Returns: None """ - self.config = CONFIG - self.db = DB self.usage_point_id = usage_point_id self.measurement_direction = measurement_direction - self.usage_point_id_config = self.db.get_usage_point(self.usage_point_id) - self.usage_point_id_contract = self.db.get_contract(self.usage_point_id) + self.usage_point_id_config = DatabaseUsagePoints(self.usage_point_id).get() + self.usage_point_id_contract = DatabaseContracts(self.usage_point_id).get() self.date_format = "%Y-%m-%d" self.date_format_detail = "%Y-%m-%d %H:%M:%S" # STAT @@ -166,15 +177,23 @@ def __init__(self, usage_point_id, measurement_direction=None): self.value_peak_offpeak_percent_hp_vs_hc = 0 self.value_monthly_evolution = 0 self.value_yearly_evolution = 0 - self.usage_point_id_contract = self.db.get_contract(self.usage_point_id) + self.usage_point_id_contract = DatabaseContracts(self.usage_point_id).get() def daily(self, index=0): + """Calculate the daily value for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary containing the calculated value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) value = 0 - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + data.value return { "value": value, @@ -183,17 +202,27 @@ def daily(self, index=0): } def detail(self, index, measure_type=None): + """Calculate the detailed value for the given index and measure type. + + Args: + index (int): The index for the number of days ago. + measure_type (str, optional): The measure type (HP or HC). Defaults to None. + + Returns: + dict: A dictionary containing the calculated value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) value = 0 - for data in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(data.date) + day_interval = data.interval if hasattr(data, "interval") and data.interval != 0 else 1 if measure_type is None or (measure_type == "HP" and day_measure_type == "HP"): - value = value + data.value / (60 / data.interval) + value = value + data.value / (60 / day_interval) elif measure_type is None or (measure_type == "HC" and day_measure_type == "HC"): - value = value + data.value / (60 / data.interval) + value = value + data.value / (60 / day_interval) return { "value": value, "begin": begin.strftime(self.date_format), @@ -201,6 +230,14 @@ def detail(self, index, measure_type=None): } def tempo(self, index): + """Calculate the tempo value for the given index. + + Args: + index (int): The index for the number of days ago. + + Returns: + dict: A dictionary containing the calculated value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) @@ -213,17 +250,16 @@ def tempo(self, index): "red_hc": 0, "red_hp": 0, } - for data in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): - # print(data) + for data in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): hour = int(datetime.strftime(data.date, "%H")) - if hour < 6: - color = self.db.get_tempo_range(begin - timedelta(days=1), end - timedelta(days=1))[0].color + if hour < TEMPO_BEGIN: + color = DatabaseTempo().get_range(begin - timedelta(days=1), end - timedelta(days=1))[0].color color = f"{color.lower()}_hc" - elif hour >= 22: - color = self.db.get_tempo_range(begin + timedelta(days=1), end + timedelta(days=1))[0].color + elif hour >= TEMPO_END: + color = DatabaseTempo().get_range(begin + timedelta(days=1), end + timedelta(days=1))[0].color color = f"{color.lower()}_hc" else: - color = self.db.get_tempo_range(begin, end)[0].color + color = DatabaseTempo().get_range(begin, end)[0].color color = f"{color.lower()}_hp" value[color] += data.value / (60 / data.interval) return { @@ -233,12 +269,20 @@ def tempo(self, index): } def tempo_color(self, index=0): + """Calculate the tempo color for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary containing the tempo color value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) value = "" - for data in self.db.get_tempo_range(begin, end): + for data in DatabaseTempo().get_range(begin, end): logging.debug(f"tempo data: {data}") value = value + data.color return { @@ -248,14 +292,20 @@ def tempo_color(self, index=0): } def max_power(self, index=0): + """Calculate the maximum power for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary containing the maximum power value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) value = 0 - # print(self.db.get_daily_max_power_range(self.usage_point_id, begin, end)) - for data in self.db.get_daily_max_power_range(self.usage_point_id, begin, end): - # print(data) + for data in DatabaseMaxPower(self.usage_point_id).get_range(begin, end): value = value + data.value return { "value": value, @@ -264,6 +314,14 @@ def max_power(self, index=0): } def max_power_over(self, index=0): + """Calculate if the maximum power is exceeded for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary indicating if the maximum power is exceeded, begin date, and end date. + """ max_power = 0 now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) @@ -276,7 +334,7 @@ def max_power_over(self, index=0): end = datetime.combine(begin, datetime.max.time()) value = 0 boolv = "true" - for data in self.db.get_daily_max_power_range(self.usage_point_id, begin, end): + for data in DatabaseMaxPower(self.usage_point_id).get_range(begin, end): value = value + data.value if (value / 1000) < max_power: boolv = "false" @@ -287,14 +345,20 @@ def max_power_over(self, index=0): } def max_power_time(self, index=0): + """Calculate the maximum power time for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary containing the maximum power time value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) max_power_time = "" - # print(self.db.get_daily_max_power_range(self.usage_point_id, begin, end)) - for data in self.db.get_daily_max_power_range(self.usage_point_id, begin, end): - # print(data) + for data in DatabaseMaxPower(self.usage_point_id).get_range(begin, end): if data.event_date is None or data.event_date == "": max_power_time = data.date else: @@ -311,6 +375,11 @@ def max_power_time(self, index=0): return data def current_week_array(self): + """Calculate the array of values for the current week. + + Returns: + list: A list containing the values for each day of the current week. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date, datetime.min.time()) @@ -318,8 +387,9 @@ def current_week_array(self): end = datetime.combine(yesterday_date, datetime.max.time()) day_idx = 0 daily_obj = [] - while day_idx < 7: - day = self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction) + id_max = 7 + while day_idx < id_max: + day = DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end) if day: daily_obj.append({"date": day[0].date, "value": day[0].value}) else: @@ -330,11 +400,16 @@ def current_week_array(self): return {"value": daily_obj, "begin": begin_return, "end": end} def current_week(self): + """Calculate the total value for the current week. + + Returns: + dict: A dictionary containing the total value, begin date, and end date of the current week. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date - relativedelta(weeks=1), datetime.min.time()) end = datetime.combine(yesterday_date, datetime.max.time()) - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_week = self.value_current_week + data.value logging.debug(f" current_week => {self.value_current_week}") return { @@ -343,33 +418,17 @@ def current_week(self): "end": end.strftime(self.date_format), } - # def get_week(self, year): - # logging.debug(f"[{year}] current_week") - # begin = datetime.combine(now_date - relativedelta(weeks=1), datetime.min.time()) - # end = datetime.combine(yesterday_date, datetime.max.time()) - # for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): - # self.value_current_week = self.value_current_week + data.value - # logging.debug(f" {self.value_current_week}") - # return { - # "value": self.value_current_week, - # "begin": begin.strftime(self.date_format), - # "end": end.strftime(self.date_format) - # } - def last_week(self): + """Calculate the total value for the last week. + + Returns: + dict: A dictionary containing the total value, begin date, and end date of the last week. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date - relativedelta(weeks=2), datetime.min.time()) end = datetime.combine(yesterday_date - relativedelta(weeks=1), datetime.max.time()) - # while day_idx < 7: - # day = self.db.get_daily_range(self.usage_point_id, begin, end, self.self.measurement_direction) - # if day: - # for data in day: - # last_week = last_week + data.value - # begin = begin - timedelta(days=1) - # end = end - timedelta(days=1) - # day_idx = day_idx + 1 - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_last_week = self.value_last_week + data.value logging.debug(f" last_week => {self.value_last_week}") return { @@ -379,17 +438,27 @@ def last_week(self): } def current_week_evolution(self): + """Calculate the evolution of the current week's value compared to the previous week. + + Returns: + float: The percentage change in value between the current week and the previous week. + """ if self.value_last_week != 0: self.value_current_week_evolution = ((self.value_current_week * 100) / self.value_last_week) - 100 logging.debug(f" current_week_evolution => {self.value_current_week_evolution}") return self.value_current_week_evolution def yesterday(self): + """Calculate the value for yesterday. + + Returns: + dict: A dictionary containing the value, begin date, and end date of yesterday. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date, datetime.min.time()) end = datetime.combine(yesterday_date, datetime.max.time()) - data = self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction) + data = DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end) if data: self.value_yesterday = data[0].value else: @@ -402,11 +471,16 @@ def yesterday(self): } def yesterday_1(self): + """Calculate the value for the day before yesterday. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the day before yesterday. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=1), datetime.min.time()) end = datetime.combine(yesterday_date - timedelta(days=1), datetime.max.time()) - data = self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction) + data = DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end) if data: self.value_yesterday_1 = data[0].value else: @@ -419,6 +493,11 @@ def yesterday_1(self): } def yesterday_evolution(self): + """Calculate the evolution of the value for yesterday compared to the day before yesterday. + + Returns: + float: The percentage change in value between yesterday and the day before yesterday. + """ self.yesterday() self.yesterday_1() if self.value_yesterday_1 != 0: @@ -427,17 +506,11 @@ def yesterday_evolution(self): return self.value_yesterday_evolution def current_week_last_year(self): - # begin = datetime.combine(yesterday - relativedelta(years=1), datetime.min.time()) - # end = datetime.combine(yesterday - relativedelta(years=1), datetime.max.time()) - # day_idx = 0 - # while day_idx < 7: - # day = self.db.get_daily_range(self.usage_point_id, begin, end, self.self.measurement_direction) - # if day: - # for data in day: - # current_week_last_year = current_week_last_year + data.value - # begin = begin - timedelta(days=1) - # end = end - timedelta(days=1) - # day_idx = day_idx + 1 + """Calculate the value for the current week of the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current week of the last year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine( @@ -445,7 +518,7 @@ def current_week_last_year(self): datetime.min.time(), ) end = datetime.combine(yesterday_date - relativedelta(years=1), datetime.max.time()) - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_week_last_year = self.value_current_week_last_year + data.value logging.debug(f" current_week_last_year => {self.value_current_week_last_year}") return { @@ -455,6 +528,11 @@ def current_week_last_year(self): } def last_month(self): + """Calculate the value for the last month. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the last month. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine( @@ -462,7 +540,7 @@ def last_month(self): datetime.min.time(), ) end = datetime.combine(yesterday_date.replace(day=1) - timedelta(days=1), datetime.max.time()) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_last_month = self.value_last_month + day.value logging.debug(f" last_month => {self.value_last_month}") return { @@ -472,11 +550,16 @@ def last_month(self): } def current_month(self): + """Calculate the value for the current month. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current month. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date.replace(day=1), datetime.min.time()) end = yesterday_date - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_month = self.value_current_month + day.value logging.debug(f" current_month => {self.value_current_month}") return { @@ -486,11 +569,16 @@ def current_month(self): } def current_month_last_year(self): + """Calculate the value for the current month of the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current month of the last year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date.replace(day=1), datetime.min.time()) - relativedelta(years=1) end = yesterday_date - relativedelta(years=1) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_month_last_year = self.value_current_month_last_year + day.value logging.debug(f" current_month_last_year => {self.value_current_month_last_year}") return { @@ -500,6 +588,11 @@ def current_month_last_year(self): } def current_month_evolution(self): + """Calculate the evolution of the current month compared to the same month of the previous year. + + Returns: + float: The percentage evolution of the current month. + """ if self.value_current_month_last_year != 0: self.value_current_month_evolution = ( (100 * self.value_current_month) / self.value_current_month_last_year @@ -508,6 +601,11 @@ def current_month_evolution(self): return self.value_current_month_evolution def last_month_last_year(self): + """Calculate the value for the last month of the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the last month of the last year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine( @@ -517,7 +615,7 @@ def last_month_last_year(self): end = datetime.combine(yesterday_date.replace(day=1) - timedelta(days=1), datetime.max.time()) - relativedelta( years=1 ) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_last_month_last_year = self.value_last_month_last_year + day.value logging.debug(f" last_month_last_year => {self.value_last_month_last_year}") return { @@ -527,6 +625,11 @@ def last_month_last_year(self): } def monthly_evolution(self): + """Calculate the monthly evolution based on the last month and the last month of the previous year. + + Returns: + float: The percentage monthly evolution. + """ self.last_month() self.last_month_last_year() if self.value_last_month_last_year != 0: @@ -535,11 +638,16 @@ def monthly_evolution(self): return self.value_monthly_evolution def current_year(self): + """Calculate the value for the current year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date.replace(month=1, day=1), datetime.min.time()) end = yesterday_date - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_year = self.value_current_year + day.value logging.debug(f" current_year => {self.value_current_year}") return { @@ -549,6 +657,11 @@ def current_year(self): } def current_year_last_year(self): + """Calculate the value for the current year of the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current year of the last year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine( @@ -556,7 +669,7 @@ def current_year_last_year(self): datetime.min.time(), ) end = yesterday_date - relativedelta(years=1) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_year_last_year = self.value_current_year_last_year + day.value logging.debug(f" current_year_last_year => {self.value_current_year_last_year}") return { @@ -566,6 +679,11 @@ def current_year_last_year(self): } def last_year(self): + """Calculate the value for the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the last year. + """ now_date = datetime.now(timezone.utc) begin = datetime.combine( now_date.replace(month=1, day=1) - relativedelta(years=1), @@ -573,7 +691,7 @@ def last_year(self): ) last_day_of_month = calendar.monthrange(int(begin.strftime("%Y")), 12)[1] end = datetime.combine(begin.replace(month=1, day=last_day_of_month), datetime.max.time()) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_last_year = self.value_last_year + day.value logging.debug(f" last_year => {self.value_last_year}") return { @@ -583,6 +701,11 @@ def last_year(self): } def yearly_evolution(self): + """Calculate the yearly evolution based on the current year and the last year. + + Returns: + float: The percentage yearly evolution. + """ self.current_year() self.current_year_last_year() if self.value_last_month_last_year != 0: @@ -591,16 +714,22 @@ def yearly_evolution(self): return self.value_yearly_evolution def yesterday_hc_hp(self): + """Calculate the value for yesterday's HC and HP. + + Returns: + dict: A dictionary containing the values for HC and HP, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date, datetime.min.time()) end = datetime.combine(now_date, datetime.max.time()) - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): measure_type = self.get_mesure_type(day.date) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 if measure_type == "HP": - self.value_yesterday_hp = self.value_yesterday_hp + (day.value / (60 / day.interval)) + self.value_yesterday_hp = self.value_yesterday_hp + (day.value / (60 / day_interval)) if measure_type == "HC": - self.value_yesterday_hc = self.value_yesterday_hc + (day.value / (60 / day.interval)) + self.value_yesterday_hc = self.value_yesterday_hc + (day.value / (60 / day_interval)) logging.debug(f" yesterday_hc => HC : {self.value_yesterday_hc}") logging.debug(f" yesterday_hp => HP : {self.value_yesterday_hp}") return { @@ -610,6 +739,11 @@ def yesterday_hc_hp(self): } def peak_offpeak_percent(self): + """Calculate the percentage difference between peak and off-peak values. + + Returns: + float: The percentage difference between peak and off-peak values. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = yesterday_date - relativedelta(years=1) @@ -617,7 +751,7 @@ def peak_offpeak_percent(self): value_peak_offpeak_percent_hp = 0 value_peak_offpeak_percent_hc = 0 value_peak_offpeak_percent_hp_vs_hc = 0 - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): measure_type = self.get_mesure_type(day.date) if measure_type == "HP": value_peak_offpeak_percent_hp = value_peak_offpeak_percent_hp + day.value @@ -632,6 +766,15 @@ def peak_offpeak_percent(self): # STAT V2 def get_year(self, year, measure_type=None): + """Retrieve the data for a specific year. + + Args: + year (int): The year for which to retrieve the data. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) begin = datetime.combine(now_date.replace(year=year, month=1, day=1), datetime.min.time()) last_day_of_month = calendar.monthrange(year, 12)[1] @@ -641,13 +784,14 @@ def get_year(self, year, measure_type=None): ) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -655,19 +799,29 @@ def get_year(self, year, measure_type=None): } def get_year_linear(self, idx, measure_type=None): + """Retrieve the linear data for a specific year. + + Args: + idx (int): The index of the year. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) end = datetime.combine(yesterday_date - relativedelta(years=idx), datetime.max.time()) begin = datetime.combine(end - relativedelta(years=1), datetime.min.time()) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -675,6 +829,16 @@ def get_year_linear(self, idx, measure_type=None): } def get_month(self, year, month=None, measure_type=None): + """Retrieve the data for a specific month. + + Args: + year (int): The year for which to retrieve the data. + month (int, optional): The month for which to retrieve the data. Defaults to None. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) if month is None: month = int(datetime.now().strftime("%m")) @@ -686,13 +850,14 @@ def get_month(self, year, month=None, measure_type=None): ) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -700,19 +865,29 @@ def get_month(self, year, month=None, measure_type=None): } def get_month_linear(self, idx, measure_type=None): + """Retrieve the linear data for a specific month. + + Args: + idx (int): The index of the month. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) end = datetime.combine(yesterday_date - relativedelta(years=idx), datetime.max.time()) begin = datetime.combine(end - relativedelta(months=1), datetime.min.time()) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -720,6 +895,16 @@ def get_month_linear(self, idx, measure_type=None): } def get_week(self, year, month=None, measure_type=None): + """Retrieve the data for a specific week. + + Args: + year (int): The year for which to retrieve the data. + month (int, optional): The month for which to retrieve the data. Defaults to None. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) if month is None: month = int(datetime.now().strftime("%m")) @@ -740,13 +925,14 @@ def get_week(self, year, month=None, measure_type=None): ) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -754,19 +940,29 @@ def get_week(self, year, month=None, measure_type=None): } def get_week_linear(self, idx, measure_type=None): + """Retrieve the linear data for a specific week. + + Args: + idx (int): The index of the week. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) end = datetime.combine(yesterday_date - relativedelta(years=idx), datetime.max.time()) begin = datetime.combine(end - timedelta(days=7), datetime.min.time()) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: - value = value + (day.value / (60 / day.interval)) + day_interval = day.interval if hasattr(day, "interval") and day.interval != 0 else 1 + value = value + (day.value / (60 / day_interval)) return { "value": value, "begin": begin.strftime(self.date_format), @@ -774,9 +970,15 @@ def get_week_linear(self, idx, measure_type=None): } def get_price(self): - data = self.db.get_stat(self.usage_point_id, f"price_{self.measurement_direction}") + """Retrieve the price data for the measurement direction. + + Returns: + dict: A dictionary containing the price data. + """ + data = DatabaseStatistique(self.usage_point_id).get(f"price_{self.measurement_direction}") + if len(data) == 0: + return {} return json.loads(data[0].value) - # return ast.literal_eval() def get_mesure_type(self, measurement_date): """Determine the measurement type (HP or HC) based on the given date and off-peak hours. @@ -806,42 +1008,23 @@ def get_mesure_type(self, measurement_date): # FORMAT HOUR WITH 2 DIGIT offpeak_stop = datetime.strptime(offpeak_stop, "%H:%M") offpeak_stop = datetime.strftime(offpeak_stop, "%H:%M") - result = self.is_between(date_hour_minute, (offpeak_begin, offpeak_stop)) + result = is_between(date_hour_minute, (offpeak_begin, offpeak_stop)) if result: measure_type = "HC" return measure_type - def is_between(self, time, time_range): - """Check if a given time is between a specified time range. - - Args: - time (datetime): The time to check. - time_range (tuple): The time range represented by a tuple of two datetime objects. - - Returns: - bool: True if the time is between the time range, False otherwise. - """ - time = time.replace(":", "") - start = time_range[0].replace(":", "") - end = time_range[1].replace(":", "") - if end < start: - return time >= start or time < end - return start <= time < end - - def generate_price(self): - """Generates the price for the usage point based on the measurement data. + def generate_price(self): # noqa: C901, PLR0912, PLR0915 + """Generate the price for the usage point based on the measurement data. Returns: str: JSON string representing the calculated price. """ - data = self.db.get_detail_all( - usage_point_id=self.usage_point_id, measurement_direction=self.measurement_direction - ) + data = DatabaseDetail(self.usage_point_id, self.measurement_direction).get_all() result = {} last_month = "" if data: - tempo_config = self.db.get_tempo_config("price") - tempo_data = self.db.get_tempo_range(data[0].date, data[-1].date) + tempo_config = DatabaseTempo().get_config("price") + tempo_data = DatabaseTempo().get_range(data[0].date, data[-1].date) for item in data: year = item.date.strftime("%Y") month = item.date.strftime("%m") @@ -851,7 +1034,7 @@ def generate_price(self): measure_type = self.get_mesure_type(item.date) tempo_date = datetime.combine(item.date, datetime.min.time()) - interval = item.interval + interval = item.interval if hasattr(item, "interval") and item.interval != 0 else 1 if year not in result: result[year] = { "BASE": {"euro": 0, "kWh": 0, "Wh": 0}, @@ -910,7 +1093,7 @@ def generate_price(self): # TEMPO if tempo_config: hour = int(item.date.strftime("%H")) - if 6 <= hour < 22: + if TEMPO_BEGIN <= hour < TEMPO_END: measure_type = "HP" else: measure_type = "HC" @@ -927,11 +1110,12 @@ def generate_price(self): result[year]["month"][month]["TEMPO"][f"{color}_{measure_type}"]["kWh"] += kwh result[year]["month"][month]["TEMPO"][f"{color}_{measure_type}"]["euro"] += kwh * tempo_price last_month = month - self.db.set_stat( - self.usage_point_id, + DatabaseStatistique(self.usage_point_id).set( f"price_{self.measurement_direction}", json.dumps(result), ) + else: + logging.error(" => Aucune donnée en cache.") return json.dumps(result) def get_daily(self, specific_date, mesure_type): @@ -947,7 +1131,12 @@ def get_daily(self, specific_date, mesure_type): begin = datetime.combine(specific_date, datetime.min.time()) end = datetime.combine(specific_date, datetime.max.time()) value = 0 - for item in self.db.get_detail_range(self.usage_point_id, begin, end): + for item in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): if self.get_mesure_type(item.date).upper() == mesure_type.upper(): - value += item.value / (60 / item.interval) + day_interval = item.interval if hasattr(item, "interval") and item.interval != 0 else 1 + value += item.value / (60 / day_interval) return value + + def delete(self): + """Delete the data from the database.""" + DatabaseStatistique(self.usage_point_id).delete() diff --git a/src/requirements.txt b/src/requirements.txt index 4b17ed85..95a5b77a 100755 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -1,47 +1,93 @@ -aiohttp==3.9.1 ; python_version >= "3.10" and python_version < "4.0" -aiosignal==1.3.1 ; python_version >= "3.10" and python_version < "4.0" -alembic==1.13.1 ; python_version >= "3.10" and python_version < "4.0" -anyio==4.2.0 ; python_version >= "3.10" and python_version < "4.0" -art==6.1 ; python_version >= "3.10" and python_version < "4.0" -asyncio==3.4.3 ; python_version >= "3.10" and python_version < "4.0" -attrs==23.2.0 ; python_version >= "3.10" and python_version < "4.0" -certifi==2023.11.17 ; python_version >= "3.10" and python_version < "4.0" -charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" -click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" -colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" -fastapi-utils==0.2.1 ; python_version >= "3.10" and python_version < "4.0" -fastapi==0.109.0 ; python_version >= "3.10" and python_version < "4.0" -frozenlist==1.4.1 ; python_version >= "3.10" and python_version < "4.0" -greenlet==3.0.3 ; python_version >= "3.10" and (platform_machine == "aarch64" or platform_machine == "ppc64le" or platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64" or platform_machine == "win32" or platform_machine == "WIN32") and python_version < "4.0" -h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" -idna==3.6 ; python_version >= "3.10" and python_version < "4.0" -influxdb-client==1.39.0 ; python_version >= "3.10" and python_version < "4.0" -jinja2==3.1.3 ; python_version >= "3.10" and python_version < "4.0" -mako==1.3.0 ; python_version >= "3.10" and python_version < "4.0" -markdown==3.5.2 ; python_version >= "3.10" and python_version < "4.0" -markupsafe==2.1.4 ; python_version >= "3.10" and python_version < "4.0" -mergedeep==1.3.4 ; python_version >= "3.10" and python_version < "4.0" -multidict==6.0.4 ; python_version >= "3.10" and python_version < "4.0" -paho-mqtt==1.6.1 ; python_version >= "3.10" and python_version < "4.0" -psycopg2-binary==2.9.9 ; python_version >= "3.10" and python_version < "4.0" -pydantic==1.10.14 ; python_version >= "3.10" and python_version < "4.0" -pymysql==1.1.0 ; python_version >= "3.10" and python_version < "4.0" -pypdf==3.17.4 ; python_version >= "3.10" and python_version < "4.0" -python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" -python-multipart==0.0.6 ; python_version >= "3.10" and python_version < "4.0" -pytz==2023.3.post1 ; python_version >= "3.10" and python_version < "4.0" -pyyaml==6.0.1 ; python_version >= "3.10" and python_version < "4.0" -rauth==0.7.3 ; python_version >= "3.10" and python_version < "4.0" -reactivex==4.0.4 ; python_version >= "3.10" and python_version < "4.0" -requests==2.31.0 ; python_version >= "3.10" and python_version < "4.0" -setuptools==69.0.3 ; python_version >= "3.10" and python_version < "4.0" -six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" -sniffio==1.3.0 ; python_version >= "3.10" and python_version < "4.0" -sqlalchemy==1.4.51 ; python_version >= "3.10" and python_version < "4.0" -starlette==0.35.1 ; python_version >= "3.10" and python_version < "4.0" -typing-extensions==4.9.0 ; python_version >= "3.10" and python_version < "4.0" -urllib3==2.1.0 ; python_version >= "3.10" and python_version < "4.0" -uvicorn==0.25.0 ; python_version >= "3.10" and python_version < "4.0" -waitress==2.1.2 ; python_version >= "3.10" and python_version < "4.0" -websocket-client==1.7.0 ; python_version >= "3.10" and python_version < "4.0" -yarl==1.9.4 ; python_version >= "3.10" and python_version < "4.0" +aiohttp==3.9.5 ; python_full_version == "3.12.3" +aiosignal==1.3.1 ; python_full_version == "3.12.3" +alembic==1.13.2 ; python_full_version == "3.12.3" +anyio==3.7.1 ; python_full_version == "3.12.3" +art==6.2 ; python_full_version == "3.12.3" +asgiref==3.8.1 ; python_full_version == "3.12.3" +asyncio==3.4.3 ; python_full_version == "3.12.3" +attrs==23.2.0 ; python_full_version == "3.12.3" +certifi==2024.7.4 ; python_full_version == "3.12.3" +charset-normalizer==3.3.2 ; python_full_version == "3.12.3" +click==8.1.7 ; python_full_version == "3.12.3" +colorama==0.4.6 ; python_full_version == "3.12.3" and (sys_platform == "win32" or platform_system == "Windows") +deepdiff==7.0.1 ; python_full_version == "3.12.3" +deprecated==1.2.14 ; python_full_version == "3.12.3" +dnspython==2.6.1 ; python_full_version == "3.12.3" +email-validator==2.2.0 ; python_full_version == "3.12.3" +fastapi-cli==0.0.4 ; python_full_version == "3.12.3" +fastapi-utils==0.2.1 ; python_full_version == "3.12.3" +fastapi==0.111.1 ; python_full_version == "3.12.3" +frozenlist==1.4.1 ; python_full_version == "3.12.3" +googleapis-common-protos==1.63.2 ; python_full_version == "3.12.3" +greenlet==3.0.3 ; python_full_version == "3.12.3" and (platform_machine == "aarch64" or platform_machine == "ppc64le" or platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64" or platform_machine == "win32" or platform_machine == "WIN32") +grpcio==1.64.1 ; python_full_version == "3.12.3" +h11==0.14.0 ; python_full_version == "3.12.3" +httpcore==1.0.5 ; python_full_version == "3.12.3" +httptools==0.6.1 ; python_full_version == "3.12.3" +httpx==0.27.0 ; python_full_version == "3.12.3" +idna==3.7 ; python_full_version == "3.12.3" +importlib-metadata==7.1.0 ; python_full_version == "3.12.3" +influxdb-client==1.44.0 ; python_full_version == "3.12.3" +jinja2==3.1.4 ; python_full_version == "3.12.3" +mako==1.3.5 ; python_full_version == "3.12.3" +markdown-it-py==3.0.0 ; python_full_version == "3.12.3" +markdown==3.6 ; python_full_version == "3.12.3" +markupsafe==2.1.5 ; python_full_version == "3.12.3" +mdurl==0.1.2 ; python_full_version == "3.12.3" +mergedeep==1.3.4 ; python_full_version == "3.12.3" +multidict==6.0.5 ; python_full_version == "3.12.3" +opentelemetry-api==1.25.0 ; python_full_version == "3.12.3" +opentelemetry-distro==0.46b0 ; python_full_version == "3.12.3" +opentelemetry-exporter-otlp-proto-common==1.25.0 ; python_full_version == "3.12.3" +opentelemetry-exporter-otlp-proto-grpc==1.25.0 ; python_full_version == "3.12.3" +opentelemetry-exporter-otlp-proto-http==1.25.0 ; python_full_version == "3.12.3" +opentelemetry-exporter-otlp==1.25.0 ; python_full_version == "3.12.3" +opentelemetry-instrumentation-asgi==0.46b0 ; python_full_version == "3.12.3" +opentelemetry-instrumentation-fastapi==0.46b0 ; python_full_version == "3.12.3" +opentelemetry-instrumentation-requests==0.46b0 ; python_full_version == "3.12.3" +opentelemetry-instrumentation-sqlalchemy==0.46b0 ; python_full_version == "3.12.3" +opentelemetry-instrumentation==0.46b0 ; python_full_version == "3.12.3" +opentelemetry-proto==1.25.0 ; python_full_version == "3.12.3" +opentelemetry-sdk==1.25.0 ; python_full_version == "3.12.3" +opentelemetry-semantic-conventions==0.46b0 ; python_full_version == "3.12.3" +opentelemetry-util-http==0.46b0 ; python_full_version == "3.12.3" +ordered-set==4.1.0 ; python_full_version == "3.12.3" +packaging==24.1 ; python_full_version == "3.12.3" +paho-mqtt==1.6.1 ; python_full_version == "3.12.3" +protobuf==4.25.3 ; python_full_version == "3.12.3" +psycopg2-binary==2.9.9 ; python_full_version == "3.12.3" +pydantic==1.10.17 ; python_full_version == "3.12.3" +pygments==2.18.0 ; python_full_version == "3.12.3" +pymysql==1.1.1 ; python_full_version == "3.12.3" +pypdf==3.17.4 ; python_full_version == "3.12.3" +python-dateutil==2.9.0.post0 ; python_full_version == "3.12.3" +python-dotenv==1.0.1 ; python_full_version == "3.12.3" +python-multipart==0.0.7 ; python_full_version == "3.12.3" +pytz==2023.4 ; python_full_version == "3.12.3" +pyyaml==6.0.1 ; python_full_version == "3.12.3" +rauth==0.7.3 ; python_full_version == "3.12.3" +reactivex==4.0.4 ; python_full_version == "3.12.3" +requests==2.32.3 ; python_full_version == "3.12.3" +rich==13.7.1 ; python_full_version == "3.12.3" +ruamel-yaml-clib==0.2.8 ; platform_python_implementation == "CPython" and python_full_version == "3.12.3" +ruamel-yaml==0.18.6 ; python_full_version == "3.12.3" +setuptools==71.1.0 ; python_full_version == "3.12.3" +shellingham==1.5.4 ; python_full_version == "3.12.3" +six==1.16.0 ; python_full_version == "3.12.3" +sniffio==1.3.1 ; python_full_version == "3.12.3" +sqlalchemy==1.4.52 ; python_full_version == "3.12.3" +starlette==0.37.2 ; python_full_version == "3.12.3" +typer==0.12.3 ; python_full_version == "3.12.3" +typing-extensions==4.12.2 ; python_full_version == "3.12.3" +unidecode==1.3.8 ; python_full_version == "3.12.3" +urllib3==2.2.2 ; python_full_version == "3.12.3" +uvicorn==0.25.0 ; python_full_version == "3.12.3" +uvicorn[standard]==0.25.0 ; python_full_version == "3.12.3" +uvloop==0.19.0 ; (sys_platform != "win32" and sys_platform != "cygwin") and platform_python_implementation != "PyPy" and python_full_version == "3.12.3" +waitress==2.1.2 ; python_full_version == "3.12.3" +watchfiles==0.22.0 ; python_full_version == "3.12.3" +websocket-client==1.8.0 ; python_full_version == "3.12.3" +websockets==12.0 ; python_full_version == "3.12.3" +wrapt==1.16.0 ; python_full_version == "3.12.3" +yarl==1.9.4 ; python_full_version == "3.12.3" +zipp==3.19.2 ; python_full_version == "3.12.3" diff --git a/src/routers/account.py b/src/routers/account.py index 457c2eeb..04c64c7b 100644 --- a/src/routers/account.py +++ b/src/routers/account.py @@ -1,5 +1,11 @@ +"""Account routes.""" + +import inspect + from fastapi import APIRouter, Request +from opentelemetry import trace +from config.main import APP_CONFIG from models.ajax import Ajax ROUTER = APIRouter(tags=["Account"], include_in_schema=False) @@ -8,18 +14,25 @@ @ROUTER.post("/configuration/{usage_point_id}") @ROUTER.post("/configuration/{usage_point_id}/", include_in_schema=False) async def configuration(request: Request, usage_point_id): - form = await request.form() - return Ajax(usage_point_id).configuration(form) + """Account configuration.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + form = await request.form() + return Ajax(usage_point_id).configuration(form) @ROUTER.post("/new_account") @ROUTER.post("/new_account/", include_in_schema=False) async def new_account(request: Request): - form = await request.form() - return Ajax().new_account(form) + """Create account.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + form = await request.form() + return Ajax().new_account(form) @ROUTER.get("/account_status/{usage_point_id}") @ROUTER.get("/account_status/{usage_point_id}/", include_in_schema=False) def account_status(usage_point_id): - return Ajax(usage_point_id).account_status() + """Get account status.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax(usage_point_id).account_status() diff --git a/src/routers/action.py b/src/routers/action.py index 7d2efddf..2d96a2c7 100644 --- a/src/routers/action.py +++ b/src/routers/action.py @@ -1,5 +1,10 @@ +"""Ajax call.""" +import inspect + from fastapi import APIRouter, Path +from opentelemetry import trace +from config.main import APP_CONFIG from doc import DOCUMENTATION from models.ajax import Ajax @@ -15,7 +20,9 @@ @ROUTER.get("/import/{usage_point_id}/", include_in_schema=False) def import_all_data(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Force l'importation des données depuis la passerelle.""" - return Ajax(usage_point_id).import_data() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return Ajax(usage_point_id).import_data() @ROUTER.get( @@ -45,21 +52,26 @@ def import_data( - home_assistant - influxdb """ - return Ajax(usage_point_id).import_data(target) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax(usage_point_id).import_data(target) @ROUTER.get("/reset/{usage_point_id}", summary="Efface les données du point de livraison.") @ROUTER.get("/reset/{usage_point_id}/", include_in_schema=False) def reset_all_data(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Efface les données du point de livraison.""" - return Ajax(usage_point_id).reset_all_data() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return Ajax(usage_point_id).reset_all_data() @ROUTER.get("/delete/{usage_point_id}", summary="Supprime le point de livraison.") @ROUTER.get("/delete/{usage_point_id}/", include_in_schema=False) def delete_all_data(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Supprime le point de livraison.""" - return Ajax(usage_point_id).delete_all_data() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return Ajax(usage_point_id).delete_all_data() @ROUTER.get( @@ -69,7 +81,9 @@ def delete_all_data(usage_point_id: str = Path(..., description=DOCUMENTATION["u @ROUTER.get("/reset_gateway/{usage_point_id}/", include_in_schema=False) def reset_gateway(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Efface le cache du point de livraison sur la passerelle.""" - return Ajax(usage_point_id).reset_gateway() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return Ajax(usage_point_id).reset_gateway() @ROUTER.get( @@ -91,7 +105,11 @@ def reset_data( - production_detail - consumption_max_power """ - return Ajax(usage_point_id).reset_data(target, date) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("target", target) + trace.get_current_span().set_attribute("date", date) + return Ajax(usage_point_id).reset_data(target, date) @ROUTER.get( @@ -116,7 +134,11 @@ def blacklist_data( - production_detail - consumption_max_power """ - return Ajax(usage_point_id).blacklist(target, date) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("target", target) + trace.get_current_span().set_attribute("date", date) + return Ajax(usage_point_id).blacklist(target, date) @ROUTER.get( @@ -163,4 +185,8 @@ def fetch_data( - production_detail - consumption_max_power """ - return Ajax(usage_point_id).fetch(target, date) + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("target", target) + trace.get_current_span().set_attribute("date", date) + return Ajax(usage_point_id).fetch(target, date) diff --git a/src/routers/data.py b/src/routers/data.py index ae3493e0..67da62ce 100644 --- a/src/routers/data.py +++ b/src/routers/data.py @@ -1,41 +1,98 @@ +"""Return data from cache.""" + import ast +import inspect from datetime import datetime from fastapi import APIRouter, HTTPException, Path, Request from fastapi.responses import HTMLResponse +from opentelemetry import trace +from config.main import APP_CONFIG +from database.addresses import DatabaseAddresses +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.max_power import DatabaseMaxPower +from database.usage_points import DatabaseUsagePoints from doc import DOCUMENTATION -from init import DB from models.ajax import Ajax ROUTER = APIRouter(tags=["Données"]) +@ROUTER.get("/contract/{usage_point_id}") +@ROUTER.get("/contract/{usage_point_id}/", include_in_schema=False) +def get_contract(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): + """Renvoie les information du contrat remonter par Enedis.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + if DatabaseUsagePoints(usage_point_id).get() is not None: + data = DatabaseContracts(usage_point_id).get() + if data is None: + msg = ( + f"Aucune information de contrat disponible en cache pour Le point de livraison '{usage_point_id}'" + ) + raise HTTPException( + status_code=404, + detail=msg, + ) + return dict(sorted(data.__dict__.items())) + raise HTTPException( + status_code=404, + detail=f"Le point de livraison '{usage_point_id}' est inconnu!", + ) + + +@ROUTER.get("/addresse/{usage_point_id}") +@ROUTER.get("/addresse/{usage_point_id}/", include_in_schema=False) +def get_addresse(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): + """Renvoie les information postal remonter par Enedis.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + if DatabaseUsagePoints(usage_point_id).get() is not None: + data = DatabaseAddresses(usage_point_id).get() + if data is None: + msg = ( + f"Aucune information postale disponible en cache pour Le point de livraison '{usage_point_id}'", + ) + raise HTTPException(status_code=404, detail=msg) + return dict(sorted(data.__dict__.items())) + raise HTTPException( + status_code=404, + detail=f"Le point de livraison '{usage_point_id}' est inconnu!", + ) + + @ROUTER.put("/tempo", include_in_schema=False) @ROUTER.put("/tempo/", include_in_schema=False) def put_tempo(): """Force la récupération des données Tempo.""" - return Ajax().fetch_tempo() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().fetch_tempo() @ROUTER.get("/tempo", summary="Retourne les données Tempo du cache local.") @ROUTER.get("/tempo/", include_in_schema=False) def tempo(): """Retourne les données Tempo du cache local.""" - return Ajax().get_tempo() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().get_tempo() @ROUTER.put("/ecowatt", include_in_schema=False) @ROUTER.put("/ecowatt/", include_in_schema=False) -def ecowatt(): - return Ajax().fetch_ecowatt() +def put_ecowatt(): + """Update ecowatt.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().fetch_ecowatt() @ROUTER.get("/ecowatt", summary="Retourne les données Ecowatt du cache local.") @ROUTER.get("/ecowatt/", include_in_schema=False) def ecowatt(): """Retourne les données Ecowatt du cache local.""" - return Ajax().get_ecowatt() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().get_ecowatt() @ROUTER.put( @@ -46,10 +103,11 @@ def ecowatt(): @ROUTER.put("/price/{usage_point_id}/", include_in_schema=False) def fetch_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Mise à jour le cache local du comparateur d'abonnement.""" - usage_point_id = usage_point_id.strip() - if DB.get_usage_point(usage_point_id) is not None: - return ast.literal_eval(Ajax(usage_point_id).generate_price()) - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + usage_point_id = usage_point_id.strip() + if DatabaseUsagePoints(usage_point_id).get() is not None: + return ast.literal_eval(Ajax(usage_point_id).generate_price()) raise HTTPException( status_code=404, detail=f"Le point de livraison '{usage_point_id}' est inconnu!", @@ -63,10 +121,11 @@ def fetch_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage @ROUTER.get("/price/{usage_point_id}/", include_in_schema=False) def get_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Retourne les données du cache local du comparateur d'abonnement.""" - usage_point_id = usage_point_id.strip() - if DB.get_usage_point(usage_point_id) is not None: - return Ajax(usage_point_id).get_price() - else: + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + usage_point_id = usage_point_id.strip() + if DatabaseUsagePoints(usage_point_id).get() is not None: + return Ajax(usage_point_id).get_price() raise HTTPException( status_code=404, detail=f"Le point de livraison '{usage_point_id}' est inconnu!", @@ -88,20 +147,23 @@ def get_data_daily( end: str = Path(..., description=DOCUMENTATION["end"]), ): """Retourne les données du cache local de consommation journalière.""" - usage_point_id = usage_point_id.strip() - begin = datetime.strptime(begin, "%Y-%m-%d") - end = datetime.strptime(end, "%Y-%m-%d") - if measurement_direction not in ["consumption", "production"]: - raise HTTPException( - status_code=404, - detail=f"'measurement_direction' inconnu, valeur possible consumption/production", - ) - data = DB.get_daily_range(usage_point_id, begin, end, measurement_direction) - output = {"unit": "w", "data": {}} - if data is not None: - for d in data: - output["data"][d.date] = d.value - return output + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("measurement_direction", measurement_direction) + usage_point_id = usage_point_id.strip() + begin = datetime.strptime(begin, "%Y-%m-%d") + end = datetime.strptime(end, "%Y-%m-%d") + if measurement_direction not in ["consumption", "production"]: + raise HTTPException( + status_code=404, + detail="'measurement_direction' inconnu, valeur possible consumption/production", + ) + data = DatabaseDaily(usage_point_id, measurement_direction).get_range(begin=begin, end=end) + output = {"unit": "w", "data": {}} + if data is not None: + for d in data: + output["data"][d.date] = d.value + return output @ROUTER.get( @@ -119,20 +181,50 @@ def get_data_detail( end: str = Path(..., description=DOCUMENTATION["end"]), ): """Retourne les données du cache local de consommation détaillée.""" - usage_point_id = usage_point_id.strip() - begin = datetime.strptime(begin, "%Y-%m-%d") - end = datetime.strptime(end, "%Y-%m-%d") - if measurement_direction not in ["consumption", "production"]: - raise HTTPException( - status_code=404, - detail=f"'measurement_direction' inconnu, valeur possible consumption/production", - ) - data = DB.get_detail_range(usage_point_id, begin, end, measurement_direction) - output = {"unit": "w", "data": {}} - if data is not None: - for d in data: - output["data"][d.date] = d.value - return output + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("measurement_direction", measurement_direction) + usage_point_id = usage_point_id.strip() + begin = datetime.strptime(begin, "%Y-%m-%d") + end = datetime.strptime(end, "%Y-%m-%d") + if measurement_direction not in ["consumption", "production"]: + raise HTTPException( + status_code=404, + detail="'measurement_direction' inconnu, valeur possible consumption/production", + ) + data = DatabaseDetail(usage_point_id, measurement_direction).get_range(begin=begin, end=end) + output = {"unit": "w", "data": {}} + if data is not None: + for d in data: + output["data"][d.date] = d.value + return output + + +@ROUTER.get( + "/max_power/{usage_point_id}/{begin}/{end}", + summary="Retourne la puissance maximun.", +) +@ROUTER.get( + "/max_power/{usage_point_id}/{begin}/{end}/", + include_in_schema=False, +) +def get_max_power( + usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"]), + begin: str = Path(..., description=DOCUMENTATION["begin"]), + end: str = Path(..., description=DOCUMENTATION["end"]), +): + """Retourne les données du cache local de puissance maximal.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + usage_point_id = usage_point_id.strip() + begin = datetime.strptime(begin, "%Y-%m-%d") + end = datetime.strptime(end, "%Y-%m-%d") + data = DatabaseMaxPower(usage_point_id).get_range(begin=begin, end=end) + output = {"unit": "w", "data": {}} + if data is not None: + for d in data: + output["data"][d.event_date] = d.value + return output @ROUTER.get( @@ -150,11 +242,15 @@ def get_data( usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"]), measurement_direction: str = Path(..., description=DOCUMENTATION["measurement_direction"]), ): - usage_point_id = usage_point_id.strip() - if DB.get_usage_point(usage_point_id) is not None: - return Ajax(usage_point_id).datatable(measurement_direction, request) - else: - raise HTTPException( - status_code=404, - detail=f"Le point de livraison '{usage_point_id}' est inconnu!", - ) + """Retourne les données du cache local de consommation journalière.""" + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("measurement_direction", measurement_direction) + usage_point_id = usage_point_id.strip() + if DatabaseUsagePoints(usage_point_id).get() is not None: + return Ajax(usage_point_id).datatable(measurement_direction, request) + else: + raise HTTPException( + status_code=404, + detail=f"Le point de livraison '{usage_point_id}' est inconnu!", + ) diff --git a/src/routers/html.py b/src/routers/html.py index de81f33d..adb67ffa 100644 --- a/src/routers/html.py +++ b/src/routers/html.py @@ -1,8 +1,13 @@ +"""HTML.""" + +import inspect + from fastapi import APIRouter, Request from fastapi.responses import FileResponse, HTMLResponse +from opentelemetry import trace -from dependencies import APPLICATION_PATH -from init import CONFIG, DB +from config.main import APP_CONFIG +from database import DB from models.ajax import Ajax from templates.index import Index from templates.usage_point import UsagePoint @@ -12,29 +17,29 @@ @ROUTER.get("/favicon.ico") async def favicon(): - """ - This function handles the endpoint '/favicon.ico' and returns the favicon.ico file as a response. + """This function handles the endpoint '/favicon.ico' and returns the favicon.ico file as a response. Returns: - FileResponse: The favicon.ico file as a response. """ - return FileResponse(f"{APPLICATION_PATH}/static/favicon.ico") + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return FileResponse(f"{APP_CONFIG.application_path}/static/favicon.ico") @ROUTER.get("/", response_class=HTMLResponse) def main(): - """This function handles the root endpoint '/' and returns the HTML response generated by the 'display' method of the 'Index' class. + """Handle the root endpoint '/' and return the HTML generated by the 'display' method of the 'Index' class. Returns: - HTMLResponse: The HTML response generated by the 'display' method of the 'Index' class. """ - return Index(CONFIG, DB).display() + return Index(DB).display() @ROUTER.get("/usage_point_id/{usage_point_id}", response_class=HTMLResponse) @ROUTER.get("/usage_point_id/{usage_point_id}/", response_class=HTMLResponse) def usage_point_id(usage_point_id): - """This function handles the endpoint '/usage_point_id/{usage_point_id}' and '/usage_point_id/{usage_point_id}/' and returns the HTML response generated by the 'display' method of the 'UsagePoint' class. + """Handle the endpoint '/usage_point_id/{usage_point_id}' and '/usage_point_id/{usage_point_id}/'. Parameters: - usage_point_id (str): The ID of the usage point. @@ -42,7 +47,9 @@ def usage_point_id(usage_point_id): Returns: - HTMLResponse: The HTML response generated by the 'display' method of the 'UsagePoint' class. """ - return UsagePoint(usage_point_id).display() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + return UsagePoint(usage_point_id).display() @ROUTER.get("/datatable/{usage_point_id}/{measurement_direction}") @@ -61,31 +68,7 @@ def datatable(request: Request, usage_point_id, measurement_direction): Example: datatable(request, "usage_point_id", "measurement_direction") """ - return Ajax(usage_point_id).datatable(measurement_direction, request) - - -# ######################################################################################################################## -# # SWAGGER -# @ROUTER.get(f"/swagger", response_class=HTMLResponse, include_in_schema=False) -# def swagger(): -# data = '' -# html_content = html_return_fullscreen(body=data, footer_type="consent") -# return html_content -# -# -# ######################################################################################################################## -# # REDOC -# @ROUTER.get(f"/redocs", response_class=HTMLResponse, include_in_schema=False) -# def swagger(): -# data = '' -# html_content = html_return_fullscreen(body=data, footer_type="consent") -# return html_content -# -# from jinja2 import Template -# def html_return_fullscreen(body, footer_type="donation"): -# with open(f'/app/templates/html/index.html') as file_: -# index_template = Template(file_.read()) -# html = index_template.render( -# body=body, -# ) -# return html + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + trace.get_current_span().set_attribute("usage_point_id", usage_point_id) + trace.get_current_span().set_attribute("measurement_direction", measurement_direction) + return Ajax(usage_point_id).datatable(measurement_direction, request) diff --git a/src/routers/info.py b/src/routers/info.py index 2bcffe00..92e86099 100644 --- a/src/routers/info.py +++ b/src/routers/info.py @@ -1,10 +1,14 @@ +"""Routers pour les informations générales.""" + +import inspect from typing import Optional from fastapi import APIRouter from fastapi.responses import HTMLResponse from pydantic import BaseModel -from init import CONFIG, DB +from config.main import APP_CONFIG +from database import DB from models.ajax import Ajax ROUTER = APIRouter(tags=["Infos"]) @@ -53,4 +57,5 @@ class GatewayStatus(BaseModel): @ROUTER.get("/gateway_status/", response_model=GatewayStatus, include_in_schema=False) def gateway_status(): """Remonte l'état de la passerelle MyElectricalData.""" - return Ajax().gateway_status() + with APP_CONFIG.tracer.start_as_current_span(f"{__name__}.{inspect.currentframe().f_code.co_name}"): + return Ajax().gateway_status() diff --git a/src/templates/config.example.yaml b/src/templates/config.example.yaml new file mode 100755 index 00000000..9baf9de1 --- /dev/null +++ b/src/templates/config.example.yaml @@ -0,0 +1,97 @@ +backend: + uri: sqlite:////data/myelectricaldata.db +gateway: + url: myelectricaldata.fr + ssl: true +home_assistant: + enable: false + discovery_prefix: homeassistant +home_assistant_ws: + enable: false + ssl: false + token: '' + url: ws://localhost:8123 + purge: false + batch_size: 1000 + max_date: +influxdb: + enable: false + scheme: http + hostname: localhost + port: 8086 + token: my-token + org: myorg + bucket: mybucket + method: synchronous + timezone: UTC + wipe: false + batching_options: + batch_size: 1000 + flush_interval: 1000 + jitter_interval: 0 + retry_interval: 5000 + max_retry_time: '180_000' + max_retries: 5 + max_retry_delay: '125_000' + exponential_base: 2 +logging: + log_format: '%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s' + log_format_date: '%Y-%m-%d %H:%M:%S' + log2file: false + log_level: 20 + debug: false + log_http: false +mqtt: + enable: false + hostname: localhost + port: 1883 + username: '' + password: '' + prefix: myelectricaldata + client_id: myelectricaldata + retain: true + qos: 0 + cert: false +myelectricaldata: + MON_POINT_DE_LIVRAISON: + enable: true + name: MON_POINT_DE_LIVRAISON + token: VOTRE_TOKEN_MYELECTRICALDATA + cache: true + plan: BASE + consumption: true + consumption_detail: true + consumption_max_power: true + consumption_price_hc: 0 + consumption_price_hp: 0 + consumption_price_base: 0 + consumption_max_date: '' + consumption_detail_max_date: '' + production: false + production_detail: false + production_max_date: '' + production_detail_max_date: '' + production_price: 0 + offpeak_hours_0: '' + offpeak_hours_1: '' + offpeak_hours_2: '' + offpeak_hours_3: '' + offpeak_hours_4: '' + offpeak_hours_5: '' + offpeak_hours_6: '' + refresh_addresse: false + refresh_contract: false +opentelemetry: + enable: false + service_name: myelectricaldata + endpoint: http://localhost:4317 + environment: production + extension: + - sqlalchemy + - fastapi +server: # Configuration du serveur web. + cidr: 0.0.0.0 + port: 5000 + certfile: '' + keyfile: '' + cycle: 14400 diff --git a/src/templates/index.py b/src/templates/index.py index a5f9306b..eacec219 100644 --- a/src/templates/index.py +++ b/src/templates/index.py @@ -1,8 +1,10 @@ +"""Index HTML.""" +from pathlib import Path + import markdown from jinja2 import Template -from dependencies import APPLICATION_PATH -from templates.loading import Loading +from config.main import APP_CONFIG from templates.models.configuration import Configuration from templates.models.menu import Menu from templates.models.sidemenu import SideMenu @@ -10,10 +12,11 @@ class Index: - def __init__(self, config, db): - self.config = config + """Index HTML.""" + + def __init__(self, db): + self.config = APP_CONFIG self.db = db - self.application_path = APPLICATION_PATH self.usage_point_select = UsagePointSelect(self.config, self.db, choice=True) self.side_menu = SideMenu() self.menu = Menu( @@ -24,33 +27,38 @@ def __init__(self, config, db): } } ) - self.configuration_div = Configuration(self.db, "Ajout d'un point de livraison", display_usage_point_id=True) + self.configuration_div = Configuration("Ajout d'un point de livraison", display_usage_point_id=True) def display(self): - # if DB.lock_status(): - # return Loading().display() - # else: - with open(f"{self.application_path}/templates/md/index.md") as file_: + """Display Index.""" + with Path(f"{APP_CONFIG.application_path}/templates/md/index.md").open(encoding="UTF-8") as file_: homepage_template = Template(file_.read()) body = homepage_template.render() body = markdown.markdown(body, extensions=["fenced_code", "codehilite"]) - with open(f"{self.application_path}/templates/html/index.html") as file_: + with Path(f"{APP_CONFIG.application_path}/templates/html/index.html").open(encoding="UTF-8") as file_: index_template = Template(file_.read()) - html = index_template.render( - select_usage_points=self.usage_point_select.html(), - head=open(f"{self.application_path}/templates/html/head.html").read(), - body=body, - side_menu=self.side_menu.html(), - javascript=( - self.configuration_div.javascript() - + self.side_menu.javascript() - + self.usage_point_select.javascript() - + open(f"{self.application_path}/templates/js/notif.js").read() - + open(f"{self.application_path}/templates/js/loading.js").read() - + open(f"{self.application_path}/templates/js/gateway_status.js").read() - ), - configuration=self.configuration_div.html().strip(), - menu=self.menu.html(), - ) + + with Path(f"{APP_CONFIG.application_path}/templates/html/head.html").open(encoding="UTF-8") as head: + with Path(f"{APP_CONFIG.application_path}/templates/js/notif.js").open(encoding="UTF-8") as notif: + with Path(f"{APP_CONFIG.application_path}/templates/js/loading.js").open(encoding="UTF-8") as loading: + with Path(f"{APP_CONFIG.application_path}/templates/js/gateway_status.js").open( + encoding="UTF-8" + ) as gateway_status: + html = index_template.render( + select_usage_points=self.usage_point_select.html(), + head=head.read(), + body=body, + side_menu=self.side_menu.html(), + javascript=( + self.configuration_div.javascript() + + self.side_menu.javascript() + + self.usage_point_select.javascript() + + notif.read() + + loading.read() + + gateway_status.read() + ), + configuration=self.configuration_div.html().strip(), + menu=self.menu.html(), + ) return html diff --git a/src/templates/js/datatable.js b/src/templates/js/datatable.js index f79f96c0..6c122b35 100644 --- a/src/templates/js/datatable.js +++ b/src/templates/js/datatable.js @@ -16,6 +16,7 @@ $(document.body).on('click', '.datatable_button', function () { }) .done(function (data) { data = $.parseJSON(JSON.stringify(data)) + console.log(data); if (tag.includes("detail") && type != "reset") { setTimeout(function () { $('#dataTableConsommationDetail').DataTable(datatable_consumption_detail).ajax.reload(); diff --git a/src/templates/js/gateway_status.js b/src/templates/js/gateway_status.js index dd284580..2fc1ed2a 100644 --- a/src/templates/js/gateway_status.js +++ b/src/templates/js/gateway_status.js @@ -44,7 +44,7 @@ if (document.URL.indexOf("/usage_point_id/") >= 0) { var last_call = data["last_call"] } if(information === undefined) { - information = "Soucis sur le compte
Vérifier les logs ou votre configuration" + information = "Erreur détectée." } content = "" + "" + diff --git a/src/templates/js/usage_point_configuration.js b/src/templates/js/usage_point_configuration.js index 990e9995..b1bcec63 100644 --- a/src/templates/js/usage_point_configuration.js +++ b/src/templates/js/usage_point_configuration.js @@ -121,11 +121,12 @@ function sendForm() { if ($('#formConfiguration').valid()) { $.LoadingOverlay("show", loading); var formData = { {{configurationInput}} }; - var usage_poind_id = $('#usage_point_id').val() + var usage_poind_id = $('#configuration_usage_point_id').val() if (usage_poind_id == undefined) { - var url = "/new_account" - }else{ + var usage_poind_id = $('#usage_point_id').val() var url = "/configuration/"+$('#usage_point_id').val() + }else{ + var url = "/new_account" } $.ajax({ type: "POST", @@ -163,4 +164,4 @@ var $dialog = $('
').dialog({ $(".help").click(function () { $dialog.dialog('open'); $dialog.html($(this).attr("alt")); -}); \ No newline at end of file +}); diff --git a/src/templates/loading.py b/src/templates/loading.py index a8482a8e..1fe65172 100644 --- a/src/templates/loading.py +++ b/src/templates/loading.py @@ -1,22 +1,26 @@ -import __main__ as app +"""Loading Screen.""" + +from pathlib import Path + from jinja2 import Template -from dependencies import APPLICATION_PATH -from init import DB +from config.main import APP_CONFIG from templates.models.configuration import Configuration class Loading: + """Loading Screen.""" + def __init__(self): - self.application_path = APPLICATION_PATH - self.configuration_div = Configuration(DB, "Page de chargement", display_usage_point_id=True) + self.configuration_div = Configuration("Page de chargement", display_usage_point_id=True) def display(self): - with open(f"{self.application_path}/templates/html/loading.html") as file_: + """Display Loading Screen.""" + with Path(f"{APP_CONFIG.application_path}/templates/html/loading.html").open(encoding="UTF-8") as file_: index_template = Template(file_.read()) html = index_template.render( - head=open(f"{self.application_path}/templates/html/head.html").read(), - javascript=(open(f"{self.application_path}/templates/js/loading.js").read()), + head=Path(f"{APP_CONFIG.application_path}/templates/html/head.html").open(encoding="UTF-8").read(), + javascript=(Path(f"{APP_CONFIG.application_path}/templates/js/loading.js").open(encoding="UTF-8").read()), configuration=self.configuration_div.html().strip(), ) return html diff --git a/src/templates/models/configuration.py b/src/templates/models/configuration.py index 38af4d1d..acbeaa2b 100644 --- a/src/templates/models/configuration.py +++ b/src/templates/models/configuration.py @@ -1,15 +1,24 @@ +"""Configation of usage point.""" + import datetime +from pathlib import Path +import pytz from jinja2 import Template from mergedeep import Strategy, merge -from dependencies import APPLICATION_PATH, str2bool +from config.main import APP_CONFIG +from database.contracts import DatabaseContracts +from database.usage_points import DatabaseUsagePoints +from utils import str2bool + +TIMEZONE = pytz.timezone("Europe/Paris") class Configuration: - def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False): - self.db = db - self.application_path = APPLICATION_PATH + """Represents the configuration settings for the application.""" + + def __init__(self, title="", usage_point_id=0, display_usage_point_id=False): self.title = title self.usage_point_id = usage_point_id self.display_usage_point_id = display_usage_point_id @@ -83,13 +92,13 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) "par les API d'Enedis." "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement du quota", - "type": datetime.datetime.now(), + "type": datetime.datetime.now(tz=TIMEZONE), "default": "", }, "consumption_detail": { "title": "Consommation détaillée", - "help": "Active/Désactive la récupération de la consommation détaillée.

ATTENTION, pour " - "fonctionner il vous faut activer le relevé de consommation horaire sur le site d'Enedis" + "help": "Active/Désactive la récupération de la consommation détaillée.

ATTENTION, " + "pour fonctionner il vous faut activer le relevé de consommation horaire sur le site d'Enedis" "Plus d'informations sont disponibles ici", "type": True, "default": True, @@ -101,7 +110,7 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) "par les API d'Enedis." "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement du quota", - "type": datetime.datetime.now(), + "type": datetime.datetime.now(tz=TIMEZONE), "default": "", }, "consumption_price_hc": { @@ -159,14 +168,14 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) "par les API d'Enedis." "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement de quota", - "type": datetime.datetime.now(), + "type": datetime.datetime.now(tz=TIMEZONE), "default": "", }, "production_detail": { "title": "Production détaillée", "help": "Active/Désactive la récupération de la production détaillée via vos panneaux solaires." - "

ATTENTION, pour fonctionner il vous faut activer le relevé de consommation horaire" - "sur le site d'Enedis
Plus d'informations sont disponibles " + "

ATTENTION, pour fonctionner il vous faut activer le relevé de consommation " + "horaire sur le site d'Enedis
Plus d'informations sont disponibles " "ici", "type": True, "default": False, @@ -178,7 +187,7 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) "par les API d'Enedis." "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement de quota", - "type": datetime.datetime.now(), + "type": datetime.datetime.now(tz=TIMEZONE), "default": "", }, "production_price": { @@ -206,16 +215,17 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) strategy=Strategy.ADDITIVE, ) - def html(self): + def html(self): # noqa: PLR0912, PLR0912, PLR0915, C901 + """Generate the HTML representation of the configuration.""" current_cat = "" if self.usage_point_id != 0: configuration = f"""
+ +
""" - config = self.db.get_usage_point(self.usage_point_id) - contract = self.db.get_contract(self.usage_point_id) + config = DatabaseUsagePoints(self.usage_point_id).get() + contract = DatabaseContracts(self.usage_point_id).get() current_cat = "" for cat, cat_data in self.config.items(): for key, data in cat_data.items(): @@ -238,7 +248,7 @@ def html(self): configuration += f""" """ configuration += "" - elif type(var_type) == bool: + elif isinstance(var_type, bool): checked = "" value = str2bool(value) if value: @@ -246,22 +256,27 @@ def html(self): configuration += f""" - """ + """ if "help" in data: configuration += f""" """ configuration += "" - elif type(var_type) == str or type(var_type) == float: + elif isinstance(var_type, (str, float)): + if value is None: + value = "" configuration += f""" - """ + """ if "help" in data: configuration += f""" """ configuration += "" - elif type(var_type) == list: - configuration += f' + - """ + """ if "help" in data: configuration += f""" """ @@ -288,8 +304,8 @@ def html(self): else: configuration = f"""
help_outline
{title}help_outline
{title} + help_outline
{title}
{title}
{title}help_outline
+ +
""" for cat, cat_data in self.config.items(): for key, data in cat_data.items(): @@ -298,6 +314,7 @@ def html(self): current_cat = cat title = data["title"] var_type = data["type"] + default = False if "default" in data: default = data["default"] if var_type is None: @@ -308,44 +325,50 @@ def html(self): configuration += f""" """ configuration += "" - elif type(var_type) == bool: + elif isinstance(var_type, bool): checked = "" if default: checked = "checked" configuration += f""" - """ + """ if "help" in data: configuration += f""" """ - configuration += f"" - elif type(var_type) == str or type(var_type) == float: + configuration += "" + elif isinstance(var_type, (str, float)): configuration += f""" - """ + """ if "help" in data: configuration += f""" """ - configuration += f"" - elif type(var_type) == list: - configuration += f'" + elif isinstance(var_type, list): + configuration += f""" + + " + configuration += "" if "help" in data: configuration += f""" """ - configuration += f"" + configuration += "" elif isinstance(var_type, datetime.datetime): configuration += f""" - """ + + """ if "help" in data: configuration += f""" """ @@ -354,19 +377,21 @@ def html(self): return configuration def javascript(self): + """Generate JavaScript code based on the configuration input.""" configuration_input = "" - for cat, cat_data in self.config.items(): + for _, cat_data in self.config.items(): for key, data in cat_data.items(): var_type = data["type"] - if type(var_type) == bool: + if isinstance(var_type, bool): configuration_input += f'{key}: $("#configuration_{key}").prop("checked"),' - elif type(var_type) == str or type(var_type) == float: + elif isinstance(var_type, (str, float)): configuration_input += f'{key}: $("#configuration_{key}").val(),' - elif type(var_type) == list: + elif isinstance(var_type, list): configuration_input += f'{key}: $("#configuration_{key}").val(),' elif isinstance(var_type, datetime.datetime): configuration_input += f'{key}: $("#configuration_{key}").val(),' - - with open(f"{self.application_path}/templates/js/usage_point_configuration.js") as file_: + with Path(f"{APP_CONFIG.application_path}/templates/js/usage_point_configuration.js").open( + encoding="UTF-8" + ) as file_: usage_point_configuration = Template(file_.read()) return usage_point_configuration.render(configurationInput=configuration_input) diff --git a/src/templates/models/datatable.py b/src/templates/models/datatable.py index d72060d7..c9ee6de1 100644 --- a/src/templates/models/datatable.py +++ b/src/templates/models/datatable.py @@ -1,13 +1,15 @@ +"""Generate Datatable.""" from datetime import datetime, timezone import pytz -from dependencies import daterange +from utils import daterange utc = pytz.UTC class Datatable: + """Datatable.""" def __init__(self, usage_point_id): self.usage_point_id = usage_point_id @@ -65,6 +67,10 @@ def html(self, title, tag, daily_data, cache_last_date, option=None): "blacklist": data.blacklist, "fail_count": data.fail_count, } + print("-" * 200) + print(cache_last_date) + print("-" * 200) + start_date = utc.localize(cache_last_date) end_date = datetime.now(timezone.utc) if start_date: diff --git a/src/templates/models/menu.py b/src/templates/models/menu.py index 428f72ab..b26082d9 100644 --- a/src/templates/models/menu.py +++ b/src/templates/models/menu.py @@ -1,22 +1,24 @@ -from dependencies import APPLICATION_PATH +"""Menu.""" class Menu: + """HTML Menu.""" + def __init__(self, items): - self.application_path = APPLICATION_PATH self.items = items def html(self): + """Return HTML Code.""" html = """
menu
    """ - for id, items in self.items.items(): + for idx, items in self.items.items(): html += f"""
  • - + {items["icon"]}
  • @@ -25,11 +27,12 @@ def html(self): return html def javascript(self): + """Return Javascript Code.""" javascript = "" - for id, items in self.items.items(): + for idx, items in self.items.items(): if "ajax" in items: javascript += f""" -$("#{id}").click(function () {{ +$("#{idx}").click(function () {{ $("#bottom_menu").removeClass("active") """ if "loading_page" in items: @@ -47,22 +50,23 @@ def javascript(self): data = JSON.parse(JSON.stringify(data)) let status = data["result"]["status"]; if (status == false) {{ - + }}else{{ location.reload(); }} }}) -}}); +}}); """ return javascript def css(self): + """Return CSS Code.""" css = "" - for id, items in self.items.items(): + for idx, items in self.items.items(): if "css" in items: css += f""" -#{id} {{ - {items["css"]} +#{idx} {{ + {items["css"]} }} """ return css diff --git a/src/templates/models/sidemenu.py b/src/templates/models/sidemenu.py index a61926c2..178281ab 100644 --- a/src/templates/models/sidemenu.py +++ b/src/templates/models/sidemenu.py @@ -1,19 +1,23 @@ +"""Sidemenu.""" +from pathlib import Path + from jinja2 import Template -from config import URL -from dependencies import APPLICATION_PATH +from config.main import APP_CONFIG +from const import URL class SideMenu: - def __init__(self): - self.application_path = APPLICATION_PATH + """Sidemenu.""" def html(self): - with open(f"{self.application_path}/templates/html/sidemenu.html") as file_: + """Open HTML.""" + with Path(f"{APP_CONFIG.application_path}/templates/html/sidemenu.html").open(encoding="UTF-8") as file_: side_menu = Template(file_.read()) return side_menu.render(myelectricaldata=f"{URL}") def javascript(self): - with open(f"{self.application_path}/templates/js/sidemenu.js") as file_: + """Open JS.""" + with Path(f"{APP_CONFIG.application_path}/templates/js/sidemenu.js").open(encoding="UTF-8") as file_: side_menu = Template(file_.read()) return side_menu.render() diff --git a/src/templates/models/usage_point_select.py b/src/templates/models/usage_point_select.py index 937b7d18..55280f59 100644 --- a/src/templates/models/usage_point_select.py +++ b/src/templates/models/usage_point_select.py @@ -1,25 +1,31 @@ +"""Usage Point selector.""" + import json +from pathlib import Path from jinja2 import Template -from dependencies import APPLICATION_PATH +from config.main import APP_CONFIG +from database.addresses import DatabaseAddresses +from database.usage_points import DatabaseUsagePoints class UsagePointSelect: - def __init__(self, config, db, selected_usage_point=None, choice=False): + """Class representing a usage point select.""" + + def __init__(self, config, selected_usage_point=None, choice=False): self.config = config - self.db = db - self.application_path = APPLICATION_PATH self.selected_usage_point = selected_usage_point self.choice = choice def html(self): + """Return HTML Code.""" list_usage_points_id = '
help_outline
{title} + help_outline
{title} + help_outline
{title}
{title}help_outline
{title}help_outline
- - - - - """ +
Aujourd'hui
{today.strftime("%d-%m-%Y")}
Demain
{tomorow.strftime("%d-%m-%Y")}
+ + + + + """ tempo_template = { "?": { "color": "background-color: #000000", @@ -272,12 +283,14 @@ def display(self): color = tempo[0].color else: color = "?" - body += f"""""" + body += f"""""" if len(tempo) > 1: color = tempo[1].color else: color = "?" - body += f"""""" + body += f"""""" body += """
Aujourd'hui
{today.strftime("%d-%m-%Y")}
Demain
{tomorow.strftime("%d-%m-%Y")}
{tempo_template[color]["text"]}{tempo_template[color]["text"]}{tempo_template[color]["text"]}{tempo_template[color]["text"]}
""" @@ -287,7 +300,6 @@ def display(self): if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: self.generate_data("consumption") self.consumption() - # recap_consumption = self.recap(data=self.recap_consumption_data) recap_consumption = self.recapv2() body += "

Consommation

" body += str(recap_consumption) @@ -345,8 +357,6 @@ def display(self): body += "

Consommation VS Production

" for year, data in self.recap_consumption_data.items(): if data["value"] != 0: - # body += f'

{year}

' - # body += f'
{self.consumption_vs_production(year)}
' self.consumption_vs_production(year) body += f'
' @@ -389,7 +399,10 @@ def display(self): """ if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail: body += "

Horaires

" - body += "
  • Quand vous videz le cache d'une tranche horaire, vous supprimez la totalité du cache de la journée.
" + body += ( + "
  • Quand vous videz le cache d'une tranche horaire, vous supprimez la totalité " + "du cache de la journée.
" + ) body += """ @@ -442,15 +455,16 @@ def display(self): - - +
ATTENTION, Un dépassement d'abonnement ne veut pas forcement dire + ATTENTION, Un dépassement d'abonnement ne veut pas forcement dire qu'il est nécessaire de basculer sur un abonnement supérieur. Le compteur Linky vous autorise à dépasser un certain seuil pendant un certain temps afin - d'absorber un pic de consommation anormal sans pour autant disjoncter. + d'absorber un pic de consommation anormal sans pour autant disjoncter.
Lien vers la documentation officielle d’Enedis. (cf. chapitre 7)Lien vers la + documentation officielle d'Enedis. (cf. chapitre 7)
""" body += """ @@ -516,7 +530,10 @@ def display(self): """ if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: body += "

Horaires

" - body += "
  • Quand vous videz le cache d'une tranche horaire, vous supprimez la totalité du cache de la journée.
" + body += ( + "
  • Quand vous videz le cache d'une tranche horaire, vous supprimez la totalité du cache" + "de la journée.
" + ) body += """ @@ -546,11 +563,15 @@ def display(self):
""" - with open(f"{self.application_path}/templates/html/usage_point_id.html") as file_: + with Path(f"{APP_CONFIG.application_path}/templates/html/usage_point_id.html").open( + encoding="UTF-8" + ) as file_: index_template = Template(file_.read()) html = index_template.render( select_usage_points=self.usage_point_select.html(), - javascript_loader=open(f"{self.application_path}/templates/html/head.html").read(), + javascript_loader=Path(f"{APP_CONFIG.application_path}/templates/html/head.html") + .open(encoding="UTF-8") + .read(), body=body, side_menu=self.side_menu.html(), javascript=( @@ -558,11 +579,13 @@ def display(self): + self.side_menu.javascript() + self.usage_point_select.javascript() + self.menu.javascript() - + open(f"{self.application_path}/templates/js/loading.js").read() - + open(f"{self.application_path}/templates/js/notif.js").read() - + open(f"{self.application_path}/templates/js/gateway_status.js").read() - + open(f"{self.application_path}/templates/js/datatable.js").read() - + open(f"{self.application_path}/templates/js/loading.js").read() + + Path(f"{APP_CONFIG.application_path}/templates/js/loading.js").open(encoding="UTF-8").read() + + Path(f"{APP_CONFIG.application_path}/templates/js/notif.js").open(encoding="UTF-8").read() + + Path(f"{APP_CONFIG.application_path}/templates/js/gateway_status.js") + .open(encoding="UTF-8") + .read() + + Path(f"{APP_CONFIG.application_path}/templates/js/datatable.js").open(encoding="UTF-8").read() + + Path(f"{APP_CONFIG.application_path}/templates/js/loading.js").open(encoding="UTF-8").read() + self.javascript ), configuration=self.configuration_div.html().strip(), @@ -572,6 +595,7 @@ def display(self): return html def contract_data(self): + """Return the contract data for the usage point.""" contract_data = {} if self.contract is not None: last_activation_date = self.contract.last_activation_date @@ -596,6 +620,8 @@ def contract_data(self): return contract_data def offpeak_hours_table(self): + """Return the offpeak hours table for the usage point.""" + def split(data): result = "" if data is not None: @@ -618,7 +644,8 @@ def split(data): """ day = 0 - while day <= 6: + max_day_offpeak = 6 + while day <= max_day_offpeak: week_day = f"offpeak_hours_{day}" if ( hasattr(self.contract, week_day) @@ -629,22 +656,26 @@ def split(data): contract_offpeak_hours = split(getattr(self.contract, week_day)) config_offpeak_hours = split(getattr(self.usage_point_config, week_day)) if getattr(self.usage_point_config, week_day) != getattr(self.contract, week_day): - offpeak_hours += f"{contract_offpeak_hours}
{config_offpeak_hours}" + offpeak_hours += ( + f"{contract_offpeak_hours}" + f"
{config_offpeak_hours}" + ) else: offpeak_hours += f"{contract_offpeak_hours}" else: - offpeak_hours += f"Pas de données." + offpeak_hours += "Pas de données." day = day + 1 offpeak_hours += "" return offpeak_hours def get_address(self): + """Return the address of the usage point.""" if self.address is not None: return f"{self.address.street}, " f"{self.address.postal_code} " f"{self.address.city}" - else: - return None + return None - def consumption(self): + def consumption(self): # noqa: C901 + """Return the consumption data for the usage point.""" if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: if self.recap_consumption_data: self.javascript += """ @@ -676,20 +707,21 @@ def consumption(self): table_value += ", " self.javascript += f"['{month}', {table_value}]," self.javascript += """]); - var options = { - title : '', - vAxis: {title: 'Consommation (kWh)'}, - hAxis: {title: 'Mois'}, - seriesType: 'bars', - series: {5: {type: 'line'}} - }; + var options = { + title : '', + vAxis: {title: 'Consommation (kWh)'}, + hAxis: {title: 'Mois'}, + seriesType: 'bars', + series: {5: {type: 'line'}} + }; - var chart = new google.visualization.ComboChart(document.getElementById('chart_daily_consumption')); - chart.draw(data, options); - } + var chart = new google.visualization.ComboChart(document.getElementById('chart_daily_consumption')); + chart.draw(data, options); + } """ - def production(self): + def production(self): # noqa: C901 + """Return the production data for the usage point.""" if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: if self.recap_production_data: self.javascript += """ @@ -721,19 +753,20 @@ def production(self): table_value += ", " self.javascript += f"['{month}', {table_value}]," self.javascript += """]); - var options = { - vAxis: {title: 'Production (kWh)'}, - hAxis: {title: 'Mois'}, - seriesType: 'bars', - series: {5: {type: 'line'}} - }; + var options = { + vAxis: {title: 'Production (kWh)'}, + hAxis: {title: 'Mois'}, + seriesType: 'bars', + series: {5: {type: 'line'}} + }; - var chart = new google.visualization.ComboChart(document.getElementById('chart_daily_production')); - chart.draw(data, options); - } + var chart = new google.visualization.ComboChart(document.getElementById('chart_daily_production')); + chart.draw(data, options); + } """ def consumption_vs_production(self, year): + """Return the consumption vs production data for the usage point.""" if self.recap_production_data != {} and self.usage_point_config.production != {}: # For a given year, we want to return the union of all months where # energy was either consumed or produced. @@ -756,7 +789,7 @@ def consumption_vs_production(self, year): production = self.recap_production_data[year]["month"][month] if month in production_months else 0 compare_comsuption_production[month] = [float(consumption) / 1000, float(production) / 1000] self.javascript += ( - """ + """ google.charts.load("current", {packages:["corechart"]}); google.charts.setOnLoadCallback(drawChartProductionVsConsumption""" + year @@ -771,9 +804,10 @@ def consumption_vs_production(self, year): for month, data in compare_comsuption_production.items(): table_value = "" for idx, value in enumerate(data): + new_value = None if value == "": - value = 0 - table_value += f"{value}" + new_value = 0 + table_value += f"{new_value}" if idx + 1 < len(data): table_value += ", " self.javascript += f"['{month}', {table_value}]," @@ -782,16 +816,17 @@ def consumption_vs_production(self, year): ]) data.sort([{column: 0}]); var options = { - title : '""" + title : '""" + year + """', - vAxis: {title: 'Consommation (kWh)'}, - hAxis: {title: 'Mois'}, - seriesType: 'bars', - series: {5: {type: 'line'}} + vAxis: {title: 'Consommation (kWh)'}, + hAxis: {title: 'Mois'}, + seriesType: 'bars', + series: {5: {type: 'line'}} }; - var chart = new google.visualization.ComboChart(document.getElementById('chart_daily_production_compare_""" + var chart = new google.visualization.ComboChart( + document.getElementById('chart_daily_production_compare_""" + year + """')); chart.draw(data, options); @@ -802,7 +837,10 @@ def consumption_vs_production(self, year): return "Pas de données." def generate_chart_hc_hp(self): - price_consumption = self.db.get_stat(self.usage_point_id, "price_consumption") + """Generate the chart for the usage point.""" + price_consumption = DatabaseStatistique( + self.usage_point_id, + ).get("price_consumption") if price_consumption and hasattr(price_consumption[0], "value"): recap = ast.literal_eval(price_consumption[0].value) for year, data in sorted(recap.items(), reverse=True): @@ -817,7 +855,6 @@ def generate_chart_hc_hp(self): self.javascript += " var data = google.visualization.arrayToDataTable([['Type', 'Valeur']," self.javascript += f"['HC', {data['HC']['Wh']}]," self.javascript += f"['HP', {data['HP']['Wh']}]," - # self.javascript += f"['BASE', {data['BASE']['Wh']}]," self.javascript += ( """ ]); @@ -837,7 +874,8 @@ def generate_chart_hc_hp(self): logging.error("Pas de données.") def generate_data(self, measurement_direction): - data = self.db.get_daily_all(self.usage_point_id, measurement_direction) + """Generate the data for the usage point.""" + data = DatabaseDaily(self.usage_point_id, measurement_direction).get_all() result = {} for item in data: year = item.date.strftime("%Y") @@ -854,6 +892,8 @@ def generate_data(self, measurement_direction): self.recap_production_data = result def get_price(self, measurement_direction): + """Return the price for the usage point.""" + def generate_price_compare(data): evolution_1 = round(data["price_2"] - data["price_1"], 2) evolution_2 = round(data["price_3"] - data["price_1"], 2) @@ -869,14 +909,18 @@ def generate_price_compare(data): if color_1 == "red" and color_2 == "red": text_color = "rgb(16, 150, 24);" return ( - f"" - f"
{data['price_1']} €
" - f"
{data['lib_1']} : {evolution_1}€
" - f"{data['lib_2']} : {evolution_2}€
" - f"" + "" + "
" + f"{data['price_1']} €" + "
" + "
" + f"{data['lib_1']} : {evolution_1}€
" + f"{data['lib_2']} : {evolution_2}€" + "
" + "" ) - data = self.db.get_stat(self.usage_point_id, f"price_{measurement_direction}") + data = DatabaseStatistique(self.usage_point_id).get(f"price_{measurement_direction}") html = "" if len(data) > 0: data = data[0] @@ -888,7 +932,6 @@ def generate_price_compare(data): Base HP/HC """ - tempo_config = self.config.tempo_config() html += "Tempo" html += "" if data: @@ -896,10 +939,9 @@ def generate_price_compare(data): for years, value in data_value.items(): price_base = round(value["BASE"]["euro"], 2) price_hchp = round(value["HC"]["euro"] + value["HP"]["euro"], 2) - tempo_config = self.config.tempo_config() price_tempo = None value_tempo = 0 - for color, tempo in value["TEMPO"].items(): + for _, tempo in value["TEMPO"].items(): value_tempo = value_tempo + tempo["euro"] price_tempo = round(value_tempo, 2) html += "" @@ -935,16 +977,18 @@ def generate_price_compare(data): html += "" return html - def recap(self, data): + def recap(self, data): # noqa: PLR0915, PLR0912, C901 + """Return the recap for the usage point.""" if data: - current_years = int(datetime.now().strftime("%Y")) - current_month = datetime.now().strftime("%m") + current_years = int(datetime.now(tz=pytz.utc).strftime("%Y")) + current_month = datetime.now(tz=pytz.utc).strftime("%m") max_history = current_years - self.max_history linear_years = {} mount_count = 0 first_occurance = False - for linear_year, linear_data in reversed(sorted(data.items())): - for linear_month, linear_value in reversed(sorted(linear_data["month"].items())): + nb_month = 12 + for _linear_year, linear_data in sorted(data.items(), reverse=True): + for _linear_month, linear_value in sorted(linear_data["month"].items(), reverse=True): key = f"{current_month}/{current_years} => {current_month}/{current_years - 1}" if not first_occurance and linear_value != 0: first_occurance = True @@ -953,26 +997,26 @@ def recap(self, data): linear_years[key] = 0 linear_years[key] = linear_years[key] + linear_value mount_count = mount_count + 1 - if mount_count >= 12: + if mount_count >= nb_month: current_years = current_years - 1 mount_count = 0 body = '' body += '' - current_years = int(datetime.now().strftime("%Y")) - for year, data in reversed(sorted(data.items())): + current_years = int(datetime.now(tz=pytz.utc).strftime("%Y")) + for year, year_data in sorted(data.items(), reverse=True): if int(year) > max_history: body += f""" - +
{round(year_data['value'] / 1000)} kWh
+ """ current_years = current_years - 1 body += "" body += "" body += '' - current_years = int(datetime.now().strftime("%Y")) - for year, data in linear_years.items(): + current_years = int(datetime.now(tz=pytz.utc).strftime("%Y")) + for year, year_data in linear_years.items(): if current_years > max_history: data_last_years_class = "" data_last_years = 0 @@ -980,7 +1024,7 @@ def recap(self, data): if str(key) in linear_years: data_last_years = linear_years[str(key)] if data_last_years != 0: - data_last_years = round((100 * int(data)) / int(data_last_years) - 100, 2) + data_last_years = round((100 * int(year_data)) / int(data_last_years) - 100, 2) current_years = current_years - 1 if data_last_years >= 0: if data_last_years == 0: @@ -991,11 +1035,11 @@ def recap(self, data): else: data_last_years_class = "green" body += f""" - + """ body += "" body += "
Annuel +
{year}
-
{round(data['value'] / 1000)} kWh
-
Annuel linéaire +
{year}
-
{round(data / 1000)} kWh
+
{round(year_data / 1000)} kWh
{data_last_years}%
-
" @@ -1004,6 +1048,7 @@ def recap(self, data): return body def recapv2(self, measurement_direction="consumption"): + """Return the recap for the usage point.""" idx = 0 finish = False output_data = {"years": {}, "linear": {}} diff --git a/src/utils.py b/src/utils.py new file mode 100644 index 00000000..d8a4bbad --- /dev/null +++ b/src/utils.py @@ -0,0 +1,516 @@ +"""Generic utils.""" +import decimal +import json +import logging +import re +import shutil +import sys +from datetime import datetime, timedelta +from math import floor +from os import getenv +from pathlib import Path +from typing import ClassVar, Union + +import pytz +import yaml +from art import decor, text2art +from dateutil.parser import parse +from mergedeep import Strategy, merge +from ruamel.yaml import YAML +from ruamel.yaml import comments as com + +from __version__ import VERSION +from const import URL_CONFIG_FILE + + +def daterange(start_date, end_date): + """Generate a range of dates between the start_date and end_date. + + Args: + start_date (datetime.date): The start date of the range. + end_date (datetime.date): The end date of the range. + + Yields: + datetime.date: The dates in the range. + + """ + for n in range(int((end_date - start_date).days)): + yield start_date + timedelta(n) + + +def is_bool(v): + """Check if a value is a boolean. + + Args: + v (any): The value to check. + + Returns: + bool: True if the value is a boolean, False otherwise. + + """ + if v in ["true", "false", "yes, no", "t, f", "y, n", 1, 0]: + return True + return False + + +def str2bool(v): + """Convert a string representation of a boolean value to a boolean. + + Args: + v (str): The string representation of the boolean value. + + Returns: + bool: The boolean value. + + """ + if not isinstance(v, bool): + return v and v.lower() in ("yes", "true", "t", "1") + return v + + +def is_float(element): + """Check if a value can be converted to a float. + + Args: + element (any): The value to check. + + Returns: + bool: True if the value can be converted to a float, False otherwise. + + """ + try: + float(element) + return True + except ValueError: + return False + + +def is_datetime(element, fuzzy=False): + """Check if a value can be parsed as a datetime. + + Args: + element (str): The value to check. + fuzzy (bool, optional): Whether to allow fuzzy parsing. Defaults to False. + + Returns: + bool: True if the value can be parsed as a datetime, False otherwise. + + """ + try: + parse(element, fuzzy=fuzzy) + return True + except ValueError: + return False + + +def is_integer(element): + """Check if a value can be converted to an integer. + + Args: + element (any): The value to check. + + Returns: + bool: True if the value can be converted to an integer, False otherwise. + + """ + try: + return float(element).is_integer() + except ValueError: + return False + + +def reformat_json(entry): + """Reformat a JSON object. + + Args: + entry (dict): The JSON object to reformat. + + Returns: + dict: The reformatted JSON object. + + """ + result = {} + for key, value in entry.items(): + if value in ["true", "false"]: + result[key] = str2bool(value) + elif isinstance(value, dict): + result[key] = value + elif not isinstance(value, bool) and is_float(value): + result[key] = float(value) + else: + result[key] = value + return result + + +def truncate(f, n=2): + """Truncate a float number to a specified number of decimal places. + + Args: + f (float): The float number to truncate. + n (int, optional): The number of decimal places to keep. Defaults to 2. + + Returns: + float: The truncated float number. + + """ + return floor(f * 10**n) / 10**n + + +def convert_kw(value): + """Convert a value from kilowatts to watts. + + Args: + value (float): The value in kilowatts. + + Returns: + float: The value in watts. + """ + return truncate(value / 1000, 2) + + +def convert_kw_to_euro(value, price): + """Convert a value from kilowatts to euros. + + Args: + value (float): The value in kilowatts. + price (float): The price per kilowatt-hour. + + Returns: + float: The value in euros. + """ + if isinstance(price, str): + price = float(price.replace(",", ".")) + return round(value / 1000 * price, 1) + + +def convert_price(price): + """Convert a price from string to float. + + Args: + price (str): The price as a string. + + Returns: + float: The price as a float. + """ + if isinstance(price, str): + price = price.replace(",", ".") + return float(price) + + +def force_round(x, n): + """Round a number to a specified number of decimal places. + + Args: + x (float): The number to be rounded. + n (int): The number of decimal places to round to. + + Returns: + float: The rounded number. + """ + d = decimal.Decimal(repr(x)) + targetdigit = decimal.Decimal("1e%d" % -n) + chopped = d.quantize(targetdigit, decimal.ROUND_DOWN) + return float(chopped) + + +def object_to_dict(obj): + """Convert an object to a dictionary. + + Args: + obj (object): The object to convert. + + Returns: + dict: The dictionary representation of the object. + """ + return json.loads(json.dumps(obj, default=lambda o: getattr(o, "__dict__", str(o)))) + + +def title(message): + """Print a title message. + + Args: + message (str or list): The message or list of messages to print as a title. + + """ + separator() + if isinstance(message, list): + for msg in message: + logging.info(f"{msg.upper()}") + else: + logging.info(f"{message.upper()}") + separator() + + +def title_warning(message): + """Print a warning message with a title format. + + Args: + message (str): The warning message to print. + + """ + separator_warning() + logging.warning(f" {message.upper()}") + separator_warning() + + +def title_critical(message): + """Print a critical message with a title format. + + Args: + message (str): The warning message to print. + + """ + separator_critical() + logging.critical(f" {message.upper()}") + separator_critical() + + +def separator(): + """Print a separator line.""" + logging.info( + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ◦ ❖ ◦ " + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + ) + + +def separator_warning(): + """Print a warning separator line.""" + logging.warning( + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ▲ " + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + ) + + +def separator_critical(): + """Print a critical separator line.""" + logging.critical( + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ▲ " + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + ) + + +def export_finish(): + """Finish the export process.""" + logging.info( + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ◦ TERMINE ◦ " + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + ) + + +def log_usage_point_id(usage_point_id): + """Log the usage point ID. + + Args: + usage_point_id (str): The usage point ID to log. + """ + text = f"Point de livraison : {usage_point_id}" + separator() + logging.info(f'{decor("barcode1")}{text: ^93}{decor("barcode1", reverse=True)}') + separator() + + +def finish(): + """Finish the import process.""" + separator() + for line in text2art("Import Finish!!!").splitlines(): + logging.info(f'{decor("barcode1")}{line: ^93}{decor("barcode1", reverse=True)}') + separator() + + +def get_version(): + """Return the version of the module.""" + return VERSION + + +def barcode_message(message): + """Barcode message.""" + art = text2art(message) + for line in art.splitlines(): + logging.info(f'{decor("barcode1")}{line: ^93}{decor("barcode1", reverse=True)}') + + +def logo(version): + """Print the logo of MyElectricalData with the version number. + + Args: + version (str): The version number of MyElectricalData. + + """ + art = text2art("MyElectricalData") + separator() + for line in art.splitlines(): + logging.info(f'{decor("barcode1")}{line: ^93}{decor("barcode1", reverse=True)}') + separator() + version = f"VERSION : {version}" + logging.info(f'{decor("barcode1")}{version: ^93}{decor("barcode1", reverse=True)}') + separator() + + +def check_format(value): + """Check the format of a value and convert it if necessary. + + Args: + value (any): The value to check and convert. + + Returns: + any: The checked and converted value. + + """ + if is_bool(value): + new_value = str2bool(value) + elif value is None or value == "None" or not value: + new_value = None + elif isinstance(value, int): + new_value = int(value) + elif is_float(value): + new_value = float(value) + elif is_datetime(value): + new_value = datetime.strptime(value, "%Y-%m-%d").replace(tzinfo=pytz.utc) + else: + new_value = str(value) + return new_value + + +def is_between(time, time_range): + """Check if a given time is between a specified time range. + + Args: + time (datetime): The time to check. + time_range (tuple): The time range represented by a tuple of two datetime objects. + + Returns: + bool: True if the time is between the time range, False otherwise. + """ + time = time.replace(":", "") + start = time_range[0].replace(":", "") + end = time_range[1].replace(":", "") + if end < start: + return time >= start or time < end + return start <= time < end + + +def chunks_list(lst, n): + """Yield successive n-sized chunks from lst.""" + for i in range(0, len(lst), n): + yield lst[i : i + n] + + +def is_json(myjson): + """Check if a string is a valid JSON object.""" + try: + json.loads(myjson) + except ValueError: + return False + return True + + +class ConfigOutput: + """Return object.""" + + application_path: str = None + application_path_data: str = None + application_path_log: str = None + config_file: str = None + config: ClassVar[dict] = {} + + +def load_config() -> ConfigOutput: + """Load config.yaml file.""" + output = ConfigOutput() + output.application_path = getenv("APPLICATION_PATH", "/app") + error = False + if not Path(output.application_path).is_dir(): + error = True + logging.error( + ( + "\n\nLe dossier contenant les sources n'existe pas.\n" + " Variable d'environnement : APPLICATION_PATH\n => %s\n" + ), + getenv("APPLICATION_PATH"), + ) + output.application_path_data = getenv("APPLICATION_PATH_DATA", "/data") + if not Path(output.application_path_data).is_dir(): + error = True + logging.error( + ( + "\n\nLe dossier contenant les données n'existe pas.\n" + " Variable d'environnement : APPLICATION_PATH_DATA\n => %s\n" + ), + getenv("APPLICATION_PATH_DATA"), + ) + output.application_path_log = getenv("APPLICATION_PATH_LOG", "/log") + if not Path(output.application_path_log).is_dir(): + error = True + logging.error( + ( + "\n\nLe dossier contenant les logs n'existe pas.\n" + " Variable d'environnement : APPLICATION_PATH_LOG\n => %s\n" + ), + getenv("APPLICATION_PATH_LOG"), + ) + if error: + sys.exit(1) + output.config_file = f"{output.application_path_data}/config.yaml" + if not Path(output.config_file).exists() or Path(output.config_file).stat().st_size == 0: + shutil.copyfile(f"{output.application_path}/templates/config.example.yaml", output.config_file) + try: + # Check Usage Point Id single quote + with Path(output.config_file) as file: + content_new = re.sub(r" ([0-9]*)\:", r" '\1':", file.read_text(encoding="UTF-8"), flags=re.M) + file.write_text(content_new, encoding="UTF-8") + with Path(output.config_file).open(encoding="utf-8") as file: + output.config = yaml.safe_load(file) + except yaml.YAMLError: + logging.critical( + f""" + Impossible de charger le fichier de configuration. + + Vous pouvez récupérer un exemple de configuration ici: + {URL_CONFIG_FILE} +""" + ) + sys.exit(1) + return output + + +def edit_config(data, file=None, comments=None, wipe=False): # noqa: C901 + """Edit a value in a YAML file.""" + if file is None: + file = load_config().config_file + with Path(file) as config_file: + yaml_obj = YAML() + yaml_obj.indent(mapping=2, sequence=4, offset=2) + code = yaml_obj.load(config_file.read_text(encoding="UTF-8")) if not wipe else {} + if code is None: + code = {} + # CLEAN OLD CONFIGURATION + if "wipe_influxdb" in code: + del code["wipe_influxdb"] + if "debug" in code: + del code["debug"] + if "log2file" in code: + del code["log2file"] + if "port" in code: + del code["port"] + if "ssl" in code: + del code["ssl"] + new_config = merge(code, data, strategy=Strategy.ADDITIVE) + new_config = dict(sorted(new_config.items())) + if comments is not None: + comments_obj = com.CommentedMap() + for key, value in comments.items(): + comments_obj.yaml_add_eol_comment(value, key, column=1) + new_config = merge(comments_obj, code, strategy=Strategy.ADDITIVE) + for key, value in new_config.items(): + currant_value = value + if isinstance(currant_value, list): + currant_value = list(set(currant_value)) + new_config[key] = currant_value + if isinstance(currant_value, Union[dict, list]): + for sub_key, sub_value in currant_value.items(): + current_sub_value = sub_value + if isinstance(current_sub_value, list): + current_sub_value = list(set(current_sub_value)) + new_config[key][sub_key] = current_sub_value + + yaml_obj.dump(new_config, config_file) diff --git a/tests/test_ajax_ecowatt.py b/tests/test_ajax_ecowatt.py index 1000fd19..0d47d923 100644 --- a/tests/test_ajax_ecowatt.py +++ b/tests/test_ajax_ecowatt.py @@ -6,17 +6,20 @@ import pytest from db_schema import Ecowatt -from tests.conftest import contains_logline +from conftest import contains_logline -@pytest.mark.parametrize("response, status_code, expect_exception, expect_success", [ - (None, 200, False, False), - (None, 500, True, False), - ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200, False, True) -]) +@pytest.mark.parametrize( + "response, status_code, expect_exception, expect_success", + [ + (None, 200, False, False), + (None, 500, True, False), + ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200, False, True), + ], +) def test_fetch_ecowatt_empty(mocker, caplog, requests_mock, response, status_code, expect_exception, expect_success): from models.ajax import Ajax - from config import URL + from const import URL start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") end = (datetime.now() + relativedelta(days=3)).strftime("%Y-%m-%d") @@ -41,22 +44,35 @@ def test_fetch_ecowatt_empty(mocker, caplog, requests_mock, response, status_cod assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 1 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) else: assert res == "OK" assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 0 - assert contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) - - -@pytest.mark.parametrize("response, expect_exception, expect_success", [ - (None, True, False), - ([Ecowatt(date="2099-01-01", value=9000, message="mock message", detail="{'detail': 'mock detail'}")], False, True) -]) + assert contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) + + +@pytest.mark.parametrize( + "response, expect_exception, expect_success", + [ + (None, True, False), + ( + [Ecowatt(date="2099-01-01", value=9000, message="mock message", detail="{'detail': 'mock detail'}")], + False, + True, + ), + ], +) def test_get_ecowatt(mocker, caplog, response, expect_exception, expect_success): from models.ajax import Ajax @@ -72,11 +88,15 @@ def test_get_ecowatt(mocker, caplog, response, expect_exception, expect_success) ajax.get_ecowatt() else: res = ajax.get_ecowatt() - assert res == {r.date: {"value": r.value, "message": r.message, "detail": ast.literal_eval(r.detail)} for r in - response} + assert res == { + r.date: {"value": r.value, "message": r.message, "detail": ast.literal_eval(r.detail)} for r in response + } assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 0 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) diff --git a/tests/test_ajax_get_account_status.py b/tests/test_ajax_get_account_status.py index 5ed10d59..e0492365 100644 --- a/tests/test_ajax_get_account_status.py +++ b/tests/test_ajax_get_account_status.py @@ -2,7 +2,7 @@ import pytest -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("usage_point_id", ["pdl1"]) @@ -12,31 +12,31 @@ ({"detail": "truthy response"}, 300), ({"detail": "falsy response"}, 500), ( - { - "consent_expiration_date": "2099-01-01T00:00:00", - "call_number": 42, - "quota_limit": 42, - "quota_reached": 42, - "quota_reset_at": "2099-01-01T00:00:00.000000", - "ban": False, - }, - 200, + { + "consent_expiration_date": "2099-01-01T00:00:00", + "call_number": 42, + "quota_limit": 42, + "quota_reached": 42, + "quota_reset_at": "2099-01-01T00:00:00.000000", + "ban": False, + }, + 200, ), ], ) def test_get_account_status(mocker, usage_point_id, caplog, status_response, status_code, requests_mock): from models.ajax import Ajax - from config import URL + from const import URL - default_error_message = 'Erreur lors de la récupération du statut du compte.' + default_error_message = "Erreur lors de la récupération du statut du compte." m_usage_point_update = mocker.patch("models.database.Database.usage_point_update") m_set_error_log = mocker.patch("models.database.Database.set_error_log") requests_mocks = list() - requests_mocks.append(requests_mock.get( - f"{URL}/valid_access/{usage_point_id}/cache", json=status_response, status_code=status_code - )) + requests_mocks.append( + requests_mock.get(f"{URL}/valid_access/{usage_point_id}/cache", json=status_response, status_code=status_code) + ) ajax = Ajax(usage_point_id=usage_point_id) if usage_point_id else Ajax() @@ -54,8 +54,12 @@ def test_get_account_status(mocker, usage_point_id, caplog, status_response, sta if is_truthy_response: if status_code != 200 or not is_complete: - assert contains_logline(caplog, status_response.get('detail', default_error_message), logging.ERROR) - assert res == {'description': status_response.get('detail', default_error_message), 'error': True, 'last_call': None} + assert contains_logline(caplog, status_response.get("detail", default_error_message), logging.ERROR) + assert res == { + "description": status_response.get("detail", default_error_message), + "error": True, + "last_call": None, + } # db.usage_point_update is not called assert 0 == m_usage_point_update.call_count @@ -74,10 +78,7 @@ def test_get_account_status(mocker, usage_point_id, caplog, status_response, sta assert not contains_logline(caplog, "Erreur lors de la récupération des informations du compte", logging.ERROR) # FIXME: Ajax does not use set_error_log while Job does assert 0 == m_set_error_log.call_count - assert res == {'description': status_response, - 'error': True, - 'last_call': None, - 'status_code': status_code} + assert res == {"description": status_response, "error": True, "last_call": None, "status_code": status_code} # Ensuring {URL}/valid_access/{usage_point_id} is called exactly as many times as enabled usage_points # and only once per enabled usage_point diff --git a/tests/test_ajax_get_gateway_status.py b/tests/test_ajax_get_gateway_status.py index 737c50b1..650326ce 100644 --- a/tests/test_ajax_get_gateway_status.py +++ b/tests/test_ajax_get_gateway_status.py @@ -3,15 +3,15 @@ import pytest -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("usage_point_id", [None, "pdl1"]) @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"mock": "response"}, 200)]) def test_get_gateway_status(caplog, requests_mock, response, status_code, usage_point_id): + from const import URL from models.ajax import Ajax - from config import URL - from dependencies import get_version + from utils import get_version requests_mock.get(f"{URL}/ping", json=response, status_code=status_code) @@ -24,18 +24,16 @@ def test_get_gateway_status(caplog, requests_mock, response, status_code, usage_ else: res = ajax.gateway_status() if status_code != 200: - assert res == {'information': 'MyElectricalData injoignable.', - 'status': False, - 'version': get_version()} + assert res == {"information": "MyElectricalData injoignable.", "status": False, "version": get_version()} # FIXME: No error is logged assert ( - "ERROR root:jobs.py:170 Erreur lors de la récupération du statut de la passerelle :\n" - not in caplog.text + "ERROR root:jobs.py:170 Erreur lors de la récupération du statut de la passerelle :\n" + not in caplog.text ) else: - assert res == {'mock': 'response', 'version': get_version()} + assert res == {"mock": "response", "version": get_version()} if usage_point_id: assert contains_logline(caplog, f"[{usage_point_id.upper()}] CHECK DE L'ÉTAT DE LA PASSERELLE.", logging.INFO) else: - assert contains_logline(caplog, f"CHECK DE L'ÉTAT DE LA PASSERELLE.", logging.INFO) + assert contains_logline(caplog, "CHECK DE L'ÉTAT DE LA PASSERELLE.", logging.INFO) diff --git a/tests/test_ajax_tempo.py b/tests/test_ajax_tempo.py index f7d585e2..3f71252d 100644 --- a/tests/test_ajax_tempo.py +++ b/tests/test_ajax_tempo.py @@ -5,14 +5,16 @@ import pytest from db_schema import Tempo -from tests.conftest import contains_logline +from conftest import contains_logline -@pytest.mark.parametrize("response, status_code", - [(None, 200), (None, 500), ({"mock": "response"}, 200), ({"2099-01-01": "turquoise"}, 200)]) +@pytest.mark.parametrize( + "response, status_code", + [(None, 200), (None, 500), ({"mock": "response"}, 200), ({"2099-01-01": "turquoise"}, 200)], +) def test_fetch_tempo(mocker, caplog, requests_mock, response, status_code): from models.ajax import Ajax - from config import URL + from const import URL start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") end = (datetime.now() + relativedelta(days=2)).strftime("%Y-%m-%d") @@ -40,8 +42,11 @@ def test_fetch_tempo(mocker, caplog, requests_mock, response, status_code): assert m_db_set_tempo.call_count == 1 assert m_db_set_tempo_config.call_count == 0 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) else: assert res == "OK" @@ -49,8 +54,11 @@ def test_fetch_tempo(mocker, caplog, requests_mock, response, status_code): assert m_db_set_tempo.call_count == 0 assert m_db_set_tempo_config.call_count == 0 - assert contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) @pytest.mark.parametrize("response", [None, [Tempo(date="2099-01-01", color="turquoise")]]) @@ -76,5 +84,8 @@ def test_get_tempo(mocker, caplog, response): assert m_db_set_tempo.call_count == 0 assert m_db_set_tempo_config.call_count == 0 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) diff --git a/tests/test_job_get_account_status.py b/tests/test_job_get_account_status.py index 510db42e..beb49a1e 100644 --- a/tests/test_job_get_account_status.py +++ b/tests/test_job_get_account_status.py @@ -1,8 +1,7 @@ import pytest -from test_jobs import job from db_schema import UsagePoints -from tests.conftest import contains_logline +from conftest import contains_logline import logging @@ -26,7 +25,7 @@ ], ) def test_get_account_status(mocker, job, caplog, status_response, status_code, requests_mock): - from config import URL + from const import URL m_set_error_log = mocker.patch("models.database.Database.set_error_log") m_usage_point_update = mocker.patch("models.database.Database.usage_point_update") diff --git a/tests/test_job_get_contract.py b/tests/test_job_get_contract.py index 55ae0c0c..5213cf26 100644 --- a/tests/test_job_get_contract.py +++ b/tests/test_job_get_contract.py @@ -4,7 +4,7 @@ from db_schema import UsagePoints from test_jobs import job -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize( @@ -15,19 +15,22 @@ ({"detail": "falsy response"}, 500), ( { - "customer": {"usage_points": [ - {"usage_point": { - "usage_point_status": "mock_value", - "meter_type": "mock meter type" - }, - "contracts": { - "offpeak_hours": None, "last_activation_date": "2099-01-01+00:00", - "last_distribution_tariff_change_date": "2099-01-01+00:00", - "segment": "mock_segment", - "subscribed_power": "10000000kVA", - "distribution_tariff": "mock tariff", - "contract_status": "mock status" - }}]}, + "customer": { + "usage_points": [ + { + "usage_point": {"usage_point_status": "mock_value", "meter_type": "mock meter type"}, + "contracts": { + "offpeak_hours": None, + "last_activation_date": "2099-01-01+00:00", + "last_distribution_tariff_change_date": "2099-01-01+00:00", + "segment": "mock_segment", + "subscribed_power": "10000000kVA", + "distribution_tariff": "mock tariff", + "contract_status": "mock status", + }, + } + ] + }, "call_number": 42, "quota_limit": 42, "quota_reached": 42, @@ -39,7 +42,7 @@ ], ) def test_get_contract(mocker, job, caplog, status_response, status_code, requests_mock): - from config import URL + from const import URL m_set_error_log = mocker.patch("models.database.Database.set_error_log") m_get_contract = mocker.patch("models.database.Database.get_contract") @@ -48,9 +51,7 @@ def test_get_contract(mocker, job, caplog, status_response, status_code, request requests_mocks = list() if job.usage_point_id: - rm = requests_mock.get( - f"{URL}/contracts/{job.usage_point_id}", json=status_response, status_code=status_code - ) + rm = requests_mock.get(f"{URL}/contracts/{job.usage_point_id}", json=status_response, status_code=status_code) requests_mocks.append(rm) # FIXME: If job has usage_point_id, get_contract() expects @@ -76,8 +77,7 @@ def test_get_contract(mocker, job, caplog, status_response, status_code, request if is_truthy_response: if status_code != 200 and status_response: # If the status code is truthy, but not 200, the contents of response['detail'] are logged - assert contains_logline(caplog, "{'error': True, 'description': 'truthy " - "response'}", logging.ERROR) + assert contains_logline(caplog, "{'error': True, 'description': 'truthy " "response'}", logging.ERROR) elif status_response and status_response.get("customer"): # Successful case: db is updated & set_error_log is called with None diff --git a/tests/test_job_get_ecowatt.py b/tests/test_job_get_ecowatt.py index 79b85263..c4233551 100644 --- a/tests/test_job_get_ecowatt.py +++ b/tests/test_job_get_ecowatt.py @@ -3,12 +3,20 @@ from dateutil.relativedelta import relativedelta import pytest from test_jobs import job -from tests.conftest import contains_logline +from conftest import contains_logline -@pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200)]) +@pytest.mark.parametrize( + "response, status_code", + [ + (None, 200), + (None, 500), + ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200), + ], +) def test_get_ecowatt(mocker, job, caplog, requests_mock, response, status_code): - from config import URL + from const import URL + start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") end = (datetime.now() + relativedelta(days=3)).strftime("%Y-%m-%d") @@ -34,12 +42,18 @@ def test_get_ecowatt(mocker, job, caplog, requests_mock, response, status_code): assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 1 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) else: assert m_db_get_ecowatt.call_count == 1 assert m_db_set_ecowatt.call_count == 0 - assert contains_logline(caplog, "{'error': True, 'description': 'Erreur " - "lors de la récupération des données Ecowatt.'}", logging.ERROR) + assert contains_logline( + caplog, + "{'error': True, 'description': 'Erreur " "lors de la récupération des données Ecowatt.'}", + logging.ERROR, + ) diff --git a/tests/test_job_get_gateway_status.py b/tests/test_job_get_gateway_status.py index 9f5f3ab8..d17a132c 100644 --- a/tests/test_job_get_gateway_status.py +++ b/tests/test_job_get_gateway_status.py @@ -2,12 +2,12 @@ import pytest from test_jobs import job -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"mock": "response"}, 200)]) def test_get_gateway_status(job, caplog, requests_mock, response, status_code): - from config import URL + from const import URL requests_mock.get(f"{URL}/ping", json=response, status_code=status_code) @@ -21,6 +21,10 @@ def test_get_gateway_status(job, caplog, requests_mock, response, status_code): if status_code == 200: if response: - assert not contains_logline(caplog, "Erreur lors de la récupération du statut de la passerelle :", logging.ERROR) + assert not contains_logline( + caplog, "Erreur lors de la récupération du statut de la passerelle :", logging.ERROR + ) else: - assert contains_logline(caplog, "Erreur lors de la récupération du statut de la passerelle :", logging.ERROR) + assert contains_logline( + caplog, "Erreur lors de la récupération du statut de la passerelle :", logging.ERROR + ) diff --git a/tests/test_job_get_tempo.py b/tests/test_job_get_tempo.py index 59f706ba..a07d1c0d 100644 --- a/tests/test_job_get_tempo.py +++ b/tests/test_job_get_tempo.py @@ -3,12 +3,13 @@ from dateutil.relativedelta import relativedelta import pytest from test_jobs import job -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"2099-01-01": "turquoise"}, 200)]) def test_get_tempo(mocker, job, caplog, requests_mock, response, status_code): - from config import URL + from const import URL + start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") end = (datetime.now() + relativedelta(days=2)).strftime("%Y-%m-%d") @@ -38,13 +39,19 @@ def test_get_tempo(mocker, job, caplog, requests_mock, response, status_code): assert m_db_set_tempo.call_count == 1 assert m_db_set_tempo_config.call_count == 2 - assert not contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert not contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) else: assert m_db_get_tempo.call_count == 1 assert m_db_set_tempo.call_count == 0 # FIXME: set_tempo_config shouldn't be called when status_code != 200 # assert m_db_set_tempo_config.call_count == 0 - assert contains_logline(caplog, "{'error': True, 'description': 'Erreur lors " - "de la récupération de données Tempo.'}", logging.ERROR) + assert contains_logline( + caplog, + "{'error': True, 'description': 'Erreur lors " "de la récupération de données Tempo.'}", + logging.ERROR, + ) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 871236cb..58528065 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -1,8 +1,8 @@ import logging import pytest -from conftest import setenv, contains_logline +from conftest import contains_logline, setenv from db_schema import UsagePoints EXPORT_METHODS = ["export_influxdb", "export_home_assistant_ws", "export_home_assistant", "export_mqtt"] @@ -71,7 +71,7 @@ def test_job_import_data(mocker, job, caplog): def test_header_generate(job, caplog): - from dependencies import get_version + from utils import get_version expected_logs = "" # FIXME: header_generate() assumes job.usage_point_config is populated from a side effect @@ -112,12 +112,10 @@ def test_header_generate(job, caplog): ) @pytest.mark.parametrize("side_effect", [None, Exception("Mocker: call failed")]) def test_get_no_return_check(mocker, job, caplog, side_effect, return_value, method, patch, details): - """ - This test covers all methods that call "get" methods from query objects: + """This test covers all methods that call "get" methods from query objects: - without checking for their return value - without calling set_error_log on failure """ - m = mocker.patch(patch) m_set_error_log = mocker.patch("models.database.Database.set_error_log") mocker.patch("models.jobs.Job.header_generate") diff --git a/tests/test_query_detail.py b/tests/test_query_detail.py index 35f0ab97..6d05e441 100644 --- a/tests/test_query_detail.py +++ b/tests/test_query_detail.py @@ -13,7 +13,7 @@ class MockResponse: @pytest.mark.parametrize("measure_type", ["consumption", "production"]) def test_get(mocker, measure_type): - from models.query_detail import Detail + from external_services.myelectricaldata.detail import Detail m_get: mock.Mock = mocker.patch("models.query.Query.get") m_insert_detail: mock.Mock = mocker.patch("models.database.Database.insert_detail") diff --git a/toolbox/tools/jaeger.yaml b/toolbox/tools/jaeger.yaml new file mode 100644 index 00000000..2a0a86ad --- /dev/null +++ b/toolbox/tools/jaeger.yaml @@ -0,0 +1,32 @@ +version: "3.4" + +services: + jaeger: + image: jaegertracing/all-in-one:latest + user: root + hostname: jaeger + ports: + - 6831:6831/udp # accept jaeger.thrift in compact Thrift protocol used by most current Jaeger clients + - 6832:6832/udp # accept jaeger.thrift in binary Thrift protocol used by Node.js Jaeger client (because thriftrw npm package does not support compact protocol) + - 5775:5775/udp # accept zipkin.thrift in compact Thrift protocol (deprecated; only used by very old Jaeger clients, circa 2016) + - 5778:5778 # serve configs, sampling strategies + - 4317:4317 # OpenTelemetry Protocol (OTLP) over gRPC + - 4318:4318 # OpenTelemetry Protocol (OTLP) over HTTP + - 16686:16686 # UI port + - 14269:14269 # collector admin port: health check at / and metrics at /metrics + - 9411:9411 # Zipkin compatible endpoint + environment: + - MEMORY_MAX_TRACES=100000 + - COLLECTOR_ZIPKIN_HOST_PORT=:9411 + - COLLECTOR_OTLP_ENABLED=true + - SPAN_STORAGE_TYPE=badger + - BADGER_EPHEMERAL=false + - BADGER_DIRECTORY_VALUE=/badger/data + - BADGER_DIRECTORY_KEY=/badger/key + volumes: + - /tmp/jaeger:/badger + networks: + - backend_network + +networks: + backend_network: diff --git a/zscaler.crt b/zscaler.crt new file mode 100644 index 00000000..e69de29b