From 2959b0576f3a605d6f5f8b727abbf425b0679f9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Fayard-Le=20Barzic?= Date: Tue, 23 Apr 2024 16:12:12 +0200 Subject: [PATCH 01/44] fix export to influxdb for production --- src/models/export_influxdb.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/models/export_influxdb.py b/src/models/export_influxdb.py index c6e0115b..57d82124 100755 --- a/src/models/export_influxdb.py +++ b/src/models/export_influxdb.py @@ -129,6 +129,7 @@ def detail(self, measurement_direction="consumption"): else: euro = kwatth * self.usage_point_config.consumption_price_hc else: + measure_type = "BASE" euro = kwatth * self.usage_point_config.production_price INFLUXDB.write( measurement=measurement, From b7a856ac735070bf2fd11e29f58f77457d14ee8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Wed, 22 May 2024 13:54:09 +0200 Subject: [PATCH 02/44] chore: add devcontainer --- .devcontainer/Dockerfile | 36 ++++++++++++++++++++++++++++++++- .devcontainer/boot.sh | 6 ++++++ .devcontainer/devcontainer.json | 5 +++-- .devcontainer/post-install.sh | 3 ++- 4 files changed, 46 insertions(+), 4 deletions(-) create mode 100755 .devcontainer/boot.sh diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 957b3c67..4bd938ce 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -19,4 +19,38 @@ ENV LANG ${LOCAL} RUN docker context use default RUN docker buildx use default -WORKDIR /workspace \ No newline at end of file +ARG home=/home/vscode + +USER vscode + +COPY ./ /workspace + +# ZSH +RUN mkdir -p ${home}/.local/share/fonts +RUN curl -fL https://raw.githubusercontent.com/ryanoasis/nerd-fonts/master/patched-fonts/DroidSansMono/DroidSansMNerdFontMono-Regular.otf --output ${home}/.local/share/fonts/DroidSansMNerdFontMono-Regular.otf +RUN git clone https://github.com/tarjoilija/zgen.git "${home}/.zgen" +RUN git clone --depth=1 https://github.com/romkatv/powerlevel10k.git ${ZSH_CUSTOM:-${home}/.oh-my-zsh/custom}/themes/powerlevel10k +RUN cp /workspace/.devcontainer/zshrc ${home}/.zshrc + +# ASDF +RUN git clone https://github.com/asdf-vm/asdf.git ${home}/.asdf +RUN echo '. "$HOME/.asdf/asdf.sh"' >> ${home}/.bashrc +RUN echo '. "$HOME/.asdf/completions/asdf.bash"' >> ${home}/.bashrc +RUN echo '. "$HOME/.asdf/asdf.sh"' >> ${home}/.zshrc +RUN echo '. "$HOME/.asdf/completions/asdf.bash"' >> ${home}/.zshrc +RUN export PATH=~/.asdf/bin:~/.asdf/shims:$PATH + +# CURL SSL DISABLE +RUN sudo update-ca-certificates --fresh +RUN echo "insecure" >> ${home}/.curlrc + +# RUN POETRY_VERSION=$(cat .tool-versions|grep 'poetry' | cut -d " " -f 2) && export PATH=${home}/.asdf/installs/poetry/$POETRY_VERSION/bin:$PATH + +RUN sudo chown -Rf vscode:vscode ${home} +RUN sudo chown -Rf vscode:vscode /workspace + +WORKDIR /workspace + +SHELL ["/bin/bash", "-c"] +RUN source ${home}/.asdf/asdf.sh && make install +RUN sudo rm -rf /workspace diff --git a/.devcontainer/boot.sh b/.devcontainer/boot.sh new file mode 100755 index 00000000..3ec54bba --- /dev/null +++ b/.devcontainer/boot.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -x +export PATH=~/.asdf/bin:~/.asdf/shims:$PATH +export PATH=~/.asdf/installs/poetry/$POETRY_VERSION/bin:$PATH +make configure-poetry +make dev diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 159614d7..555370d4 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,9 +2,9 @@ "name": "MyElectricalData development", "dockerComposeFile": ["docker-compose.yaml"], "shutdownAction": "stopCompose", - "postCreateCommand": "/bin/bash -lc ./.devcontainer/post-install.sh", + // "postCreateCommand": "/bin/zsh -lc ./.devcontainer/post-install.sh", // "postCreateCommand": "make init-devcontainer", - "postStartCommand": "make dev", + "postStartCommand": "/bin/zsh -lc ./.devcontainer/boot.sh", "service": "workspace", "workspaceFolder": "/workspace", "forwardPorts": [], @@ -56,6 +56,7 @@ "git.autofetch": true, "window.title": "${rootName}${dirty}${activeEditorShort}${separator}${separator}${profileName}${separator}${appName}", "http.proxyStrictSSL": false, + "terminal.integrated.defaultProfile.linux": "zsh", // PYTHON "python.testing.pytestEnabled": true, "python.analysis.autoImportCompletions": false, diff --git a/.devcontainer/post-install.sh b/.devcontainer/post-install.sh index 3a8ef1c8..e37e151c 100755 --- a/.devcontainer/post-install.sh +++ b/.devcontainer/post-install.sh @@ -22,4 +22,5 @@ echo "insecure" >> ${HOME}/.curlrc sudo update-ca-certificates --fresh echo "" echo "Install environment" -make install \ No newline at end of file +make install +chmod +x /workspace/.devcontainer/boot.sh \ No newline at end of file From a1157195d4aa3c9b968503e547a9284fa142b499 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Wed, 22 May 2024 13:54:21 +0200 Subject: [PATCH 03/44] chore: vsconfig --- .vscode/settings.json | 41 +++++++++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 40e2b38a..3a50b6f0 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,17 +1,26 @@ { - "sqltools.connections": [ - { - "previewLimit": 50, - "server": "127.0.0.1", - "port": 5432, - "driver": "PostgreSQL", - "name": "MED Import", - "group": "MED", - "database": "myelectricaldata", - "username": "myelectricaldata", - "password": "myelectricaldata" - } - ], - "github-actions.workflows.pinned.workflows": [], - "GitHooks.hooksDirectory": "/home/cvalentin/git/myelectricaldata/myelectricaldata_import/.git/hooks" -} \ No newline at end of file + "sqltools.connections": [ + { + "previewLimit": 50, + "server": "127.0.0.1", + "port": 5432, + "driver": "PostgreSQL", + "name": "MED Import", + "group": "MED", + "database": "myelectricaldata", + "username": "myelectricaldata", + "password": "myelectricaldata" + } + ], + "github-actions.workflows.pinned.workflows": [], + "GitHooks.hooksDirectory": "/home/cvalentin/git/myelectricaldata/myelectricaldata_import/.git/hooks", + "files.exclude": { + "**/.git": true, + "**/.svn": true, + "**/.hg": true, + "**/CVS": true, + "**/.DS_Store": true, + "**/Thumbs.db": true + }, + "hide-files.files": [] +} From cbb3352bf93c27e38f83f9ba0358febba9b85730 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Wed, 22 May 2024 13:54:35 +0200 Subject: [PATCH 04/44] chore: tnr --- tests/test_ajax_ecowatt.py | 2 +- tests/test_ajax_get_account_status.py | 2 +- tests/test_ajax_get_gateway_status.py | 2 +- tests/test_ajax_tempo.py | 2 +- tests/test_job_get_account_status.py | 3 +-- tests/test_job_get_contract.py | 2 +- tests/test_job_get_ecowatt.py | 2 +- tests/test_job_get_gateway_status.py | 2 +- tests/test_job_get_tempo.py | 2 +- 9 files changed, 9 insertions(+), 10 deletions(-) diff --git a/tests/test_ajax_ecowatt.py b/tests/test_ajax_ecowatt.py index 1000fd19..3951e744 100644 --- a/tests/test_ajax_ecowatt.py +++ b/tests/test_ajax_ecowatt.py @@ -6,7 +6,7 @@ import pytest from db_schema import Ecowatt -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("response, status_code, expect_exception, expect_success", [ diff --git a/tests/test_ajax_get_account_status.py b/tests/test_ajax_get_account_status.py index 5ed10d59..924710f5 100644 --- a/tests/test_ajax_get_account_status.py +++ b/tests/test_ajax_get_account_status.py @@ -2,7 +2,7 @@ import pytest -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("usage_point_id", ["pdl1"]) diff --git a/tests/test_ajax_get_gateway_status.py b/tests/test_ajax_get_gateway_status.py index 737c50b1..24e17437 100644 --- a/tests/test_ajax_get_gateway_status.py +++ b/tests/test_ajax_get_gateway_status.py @@ -3,7 +3,7 @@ import pytest -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("usage_point_id", [None, "pdl1"]) diff --git a/tests/test_ajax_tempo.py b/tests/test_ajax_tempo.py index f7d585e2..744afc3e 100644 --- a/tests/test_ajax_tempo.py +++ b/tests/test_ajax_tempo.py @@ -5,7 +5,7 @@ import pytest from db_schema import Tempo -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("response, status_code", diff --git a/tests/test_job_get_account_status.py b/tests/test_job_get_account_status.py index 510db42e..1f5b5c18 100644 --- a/tests/test_job_get_account_status.py +++ b/tests/test_job_get_account_status.py @@ -1,8 +1,7 @@ import pytest -from test_jobs import job from db_schema import UsagePoints -from tests.conftest import contains_logline +from conftest import contains_logline import logging diff --git a/tests/test_job_get_contract.py b/tests/test_job_get_contract.py index 55ae0c0c..42bd78a5 100644 --- a/tests/test_job_get_contract.py +++ b/tests/test_job_get_contract.py @@ -4,7 +4,7 @@ from db_schema import UsagePoints from test_jobs import job -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize( diff --git a/tests/test_job_get_ecowatt.py b/tests/test_job_get_ecowatt.py index 79b85263..b413e00b 100644 --- a/tests/test_job_get_ecowatt.py +++ b/tests/test_job_get_ecowatt.py @@ -3,7 +3,7 @@ from dateutil.relativedelta import relativedelta import pytest from test_jobs import job -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"2099-01-01": {"value": 9000, "message": "mock message", "detail": "mock detail"}}, 200)]) diff --git a/tests/test_job_get_gateway_status.py b/tests/test_job_get_gateway_status.py index 9f5f3ab8..94b0e950 100644 --- a/tests/test_job_get_gateway_status.py +++ b/tests/test_job_get_gateway_status.py @@ -2,7 +2,7 @@ import pytest from test_jobs import job -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"mock": "response"}, 200)]) diff --git a/tests/test_job_get_tempo.py b/tests/test_job_get_tempo.py index 59f706ba..740abfbb 100644 --- a/tests/test_job_get_tempo.py +++ b/tests/test_job_get_tempo.py @@ -3,7 +3,7 @@ from dateutil.relativedelta import relativedelta import pytest from test_jobs import job -from tests.conftest import contains_logline +from conftest import contains_logline @pytest.mark.parametrize("response, status_code", [(None, 200), (None, 500), ({"2099-01-01": "turquoise"}, 200)]) From 7ac2a649449ec977545026b9dc1da265996f5bd8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20VALENTIN?= Date: Wed, 22 May 2024 13:55:49 +0200 Subject: [PATCH 05/44] refactor: rework database --- src/__init__.py | 1 + src/config.py | 32 + src/database/__init__.py | 6 + src/database/addresses.py | 80 + src/database/config.py | 78 + src/database/contracts.py | 110 ++ src/database/daily.py | 425 +++++ src/database/detail.py | 473 +++++ src/database/ecowatt.py | 71 + src/database/main.py | 150 ++ src/database/max_power.py | 347 ++++ src/database/statistique.py | 44 + src/database/tempo.py | 109 ++ src/database/usage_points.py | 194 ++ src/db_schema.py | 58 +- src/dependencies.py | 96 +- src/init.py | 31 +- src/main.py | 47 +- src/models/__init__.py | 1 + src/models/ajax.py | 344 ++-- src/models/config.py | 19 +- src/models/database.py | 1871 -------------------- src/models/export_home_assistant.py | 68 +- src/models/export_home_assistant_ws.py | 36 +- src/models/export_influxdb.py | 93 +- src/models/export_mqtt.py | 232 +-- src/models/export_mqttv1.py | 441 ----- src/models/influxdb.py | 257 ++- src/models/jobs.py | 150 +- src/models/query.py | 4 +- src/models/query_address.py | 50 +- src/models/query_contract.py | 47 +- src/models/query_daily.py | 122 +- src/models/query_detail.py | 153 +- src/models/query_ecowatt.py | 32 +- src/models/query_power.py | 130 +- src/models/query_status.py | 34 +- src/models/query_tempo.py | 94 +- src/models/stat.py | 430 +++-- src/routers/data.py | 79 +- src/routers/html.py | 37 +- src/routers/info.py | 4 +- src/templates/loading.py | 1 - src/templates/models/configuration.py | 102 +- src/templates/models/usage_point_select.py | 11 +- src/templates/usage_point.py | 256 +-- 46 files changed, 4003 insertions(+), 3447 deletions(-) create mode 100644 src/database/addresses.py create mode 100644 src/database/config.py create mode 100644 src/database/contracts.py create mode 100644 src/database/daily.py create mode 100644 src/database/detail.py create mode 100644 src/database/ecowatt.py create mode 100644 src/database/main.py create mode 100644 src/database/max_power.py create mode 100644 src/database/statistique.py create mode 100644 src/database/tempo.py create mode 100644 src/database/usage_points.py delete mode 100644 src/models/database.py delete mode 100644 src/models/export_mqttv1.py diff --git a/src/__init__.py b/src/__init__.py index e69de29b..ded90369 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -0,0 +1 @@ +"""Init file for MyElectricalData.""" diff --git a/src/config.py b/src/config.py index 99e3d6d1..b3704a3a 100755 --- a/src/config.py +++ b/src/config.py @@ -1,3 +1,12 @@ +"""Configuration file for myelectricaldata.""" + +from pathlib import Path + +import pytz +import yaml + +from dependencies import APPLICATION_PATH_DATA + LOG_FORMAT = "%(asctime)s.%(msecs)03d - %(levelname)8s : %(message)s" LOG_FORMAT_DATE = "%Y-%m-%d %H:%M:%S" @@ -8,3 +17,26 @@ DAILY_MAX_DAYS = 1094 DETAIL_MAX_DAYS = 728 + +TEMPO_BEGIN = 6 +TEMPO_END = 22 + +# Return code +CODE_200_SUCCESS = 200 +CODE_204_NO_CONTENT = 204 +CODE_400_BAD_REQUEST = 400 +CODE_404_NOT_FOUND = 404 +CODE_409_CONFLICT = 409 +CODE_403_FORBIDDEN = 403 +CODE_422_UNPROCESSABLE_ENTITY = 422 +CODE_429_TOO_MANY_REQUEST = 429 +CODE_500_INTERNAL_SERVER_ERROR = 500 + +TIMEZONE = pytz.timezone("Europe/Paris") +TIMEZONE_UTC = pytz.timezone("UTC") + +CONFIG_PATH_FILE = f"{APPLICATION_PATH_DATA}/config.yaml" +CONFIG_FILENAME = f"{CONFIG_PATH_FILE}" +if Path(CONFIG_FILENAME).exists(): + with Path(CONFIG_FILENAME).open(encoding="utf-8") as file: + CONFIG_FILE = yaml.safe_load(file) diff --git a/src/database/__init__.py b/src/database/__init__.py index e69de29b..b99b2b65 100644 --- a/src/database/__init__.py +++ b/src/database/__init__.py @@ -0,0 +1,6 @@ +"""Module to manage database data.""" +from database.main import Database + +DB = Database() +DB.init_database() +DB.unlock() diff --git a/src/database/addresses.py b/src/database/addresses.py new file mode 100644 index 00000000..ae1e7379 --- /dev/null +++ b/src/database/addresses.py @@ -0,0 +1,80 @@ +"""Manage Addresses table in database.""" +from sqlalchemy import delete, select + +from database import DB +from db_schema import ( + Addresses, + UsagePoints, +) + + +class DatabaseAddresses: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id): + """Initialize DatabaseConfig.""" + self.session = DB.session + self.usage_point_id = usage_point_id + + def get( + self, + ): + """Retrieve the address associated with the given usage point ID.""" + query = ( + select(Addresses) + .join(UsagePoints.relation_addressess) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ) + data = self.session.scalars(query).one_or_none() + self.session.close() + return data + + def set(self, data, count=0): + """Set the address associated with the given usage point ID. + + Args: + data (dict): The address data. + count (int, optional): The count value. Defaults to 0. + """ + query = ( + select(Addresses) + .join(UsagePoints.relation_addressess) + .where(Addresses.usage_point_id == self.usage_point_id) + ) + addresses = self.session.scalars(query).one_or_none() + if addresses is not None: + addresses.street = data["street"] + addresses.locality = data["locality"] + addresses.postal_code = data["postal_code"] + addresses.insee_code = data["insee_code"] + addresses.city = data["city"] + addresses.country = data["country"] + addresses.geo_points = data["geo_points"] + addresses.count = count + else: + self.session.add( + Addresses( + usage_point_id=self.usage_point_id, + street=data["street"], + locality=data["locality"], + postal_code=data["postal_code"], + insee_code=data["insee_code"], + city=data["city"], + country=data["country"], + geo_points=data["geo_points"], + count=count, + ) + ) + self.session.flush() + self.session.close() + + def delete(self): + """Delete the address associated with the given usage point ID. + + Returns: + bool: True if the address is successfully deleted, False otherwise. + """ + self.session.execute(delete(Addresses).where(Addresses.usage_point_id == self.usage_point_id)) + self.session.flush() + self.session.close() + return True diff --git a/src/database/config.py b/src/database/config.py new file mode 100644 index 00000000..7a376059 --- /dev/null +++ b/src/database/config.py @@ -0,0 +1,78 @@ +"""Manage Config table in database.""" + +import json +import logging + +from sqlalchemy import select + +from database import DB +from database.usage_points import DatabaseUsagePoints +from db_schema import Config as ConfigTable +from dependencies import title +from models.config import Config + + +class DatabaseConfig: + """Manage configuration for the database.""" + + def __init__(self): + """Initialize DatabaseConfig.""" + self.session = DB.session + self.config = Config() + + def load_config_file(self): + """Load the database configuration and clean the database.""" + title("Chargement du config.yaml...") + logging.info(" - Home Assistant") + if self.config.home_assistant_config() is not None: + self.set("home_assistant", self.config.home_assistant_config()) + logging.info(" => Success") + else: + logging.warning("Aucune configuration Home Assistant détectée.") + logging.info(" - Home Assistant Websocket") + if self.config.home_assistant_ws_config() is not None: + self.set("home_assistant_ws", self.config.home_assistant_ws_config()) + logging.info(" => Success") + else: + logging.warning("Aucune configuration Home Assistant Websocket détectée.") + logging.info(" - InfluxDB") + if self.config.influxdb_config() is not None: + self.set("influxdb", self.config.influxdb_config()) + logging.info(" => Success") + else: + logging.warning("Aucune configuration InfluxDB détectée.") + logging.info(" - MQTT") + if self.config.mqtt_config() is not None: + self.set("mqtt", self.config.mqtt_config()) + logging.info(" => Success") + else: + logging.warning("Aucune configuration MQTT détectée.") + logging.info(" - Point de livraison") + usage_point_list = [] + if self.config.list_usage_point() is not None: + for upi, upi_data in self.config.list_usage_point().items(): + logging.info(f" {upi}") + DatabaseUsagePoints(upi).set(upi_data) + usage_point_list.append(upi) + logging.info(" => Success") + else: + logging.warning("Aucun point de livraison détecté.") + + def get(self, key): + """Get data from config table.""" + query = select(ConfigTable).where(ConfigTable.key == key) + data = self.session.scalars(query).one_or_none() + self.session.close() + return data + + def set(self, key, value): + """Set data from config table.""" + query = select(ConfigTable).where(ConfigTable.key == key) + config = self.session.scalars(query).one_or_none() + if config: + config.value = json.dumps(value) + else: + self.session.add(ConfigTable(key=key, value=json.dumps(value))) + self.session.flush() + self.session.close() + DB.refresh_object() diff --git a/src/database/contracts.py b/src/database/contracts.py new file mode 100644 index 00000000..a6fceb6a --- /dev/null +++ b/src/database/contracts.py @@ -0,0 +1,110 @@ +"""Manage Contracts table in database.""" + +from sqlalchemy import delete, select + +from database import DB +from db_schema import ( + Contracts, + UsagePoints, +) + + +class DatabaseContracts: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id): + """Initialize DatabaseConfig.""" + self.session = DB.session + self.usage_point_id = usage_point_id + + def get(self): + """Retrieve the contract associated with the given usage point ID. + + Returns: + Contracts: The contract object if found, None otherwise. + """ + query = ( + select(Contracts) + .join(UsagePoints.relation_contract) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ) + data = self.session.scalars(query).one_or_none() + self.session.close() + return data + + def set( + self, + data, + count=0, + ): + """Set the contract details for the given usage point ID. + + Args: + usage_point_id (int): The ID of the usage point. + data (dict): A dictionary containing the contract details. + count (int, optional): The count value. Defaults to 0. + + Returns: + None + """ + query = ( + select(Contracts) + .join(UsagePoints.relation_contract) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ) + contract = self.session.scalars(query).one_or_none() + if contract is not None: + contract.usage_point_status = data["usage_point_status"] + contract.meter_type = data["meter_type"] + contract.segment = data["segment"] + contract.subscribed_power = data["subscribed_power"] + contract.last_activation_date = data["last_activation_date"] + contract.distribution_tariff = data["distribution_tariff"] + contract.offpeak_hours_0 = data["offpeak_hours_0"] + contract.offpeak_hours_1 = data["offpeak_hours_1"] + contract.offpeak_hours_2 = data["offpeak_hours_2"] + contract.offpeak_hours_3 = data["offpeak_hours_3"] + contract.offpeak_hours_4 = data["offpeak_hours_4"] + contract.offpeak_hours_5 = data["offpeak_hours_5"] + contract.offpeak_hours_6 = data["offpeak_hours_6"] + contract.contract_status = data["contract_status"] + contract.last_distribution_tariff_change_date = data["last_distribution_tariff_change_date"] + contract.count = count + else: + self.session.add( + Contracts( + usage_point_id=self.usage_point_id, + usage_point_status=data["usage_point_status"], + meter_type=data["meter_type"], + segment=data["segment"], + subscribed_power=data["subscribed_power"], + last_activation_date=data["last_activation_date"], + distribution_tariff=data["distribution_tariff"], + offpeak_hours_0=data["offpeak_hours_0"], + offpeak_hours_1=data["offpeak_hours_1"], + offpeak_hours_2=data["offpeak_hours_2"], + offpeak_hours_3=data["offpeak_hours_3"], + offpeak_hours_4=data["offpeak_hours_4"], + offpeak_hours_5=data["offpeak_hours_5"], + offpeak_hours_6=data["offpeak_hours_6"], + contract_status=data["contract_status"], + last_distribution_tariff_change_date=data["last_distribution_tariff_change_date"], + count=count, + ) + ) + self.session.flush() + self.session.close() + + def delete(self): + """Delete the contract associated with the given usage point ID. + + Args: + usage_point_id (int): The ID of the usage point. + + Returns: + bool: True if the address is successfully deleted, False otherwise. + """ + self.session.execute(delete(Contracts).where(Contracts.usage_point_id == self.usage_point_id)) + self.session.flush() + self.session.close() + return True diff --git a/src/database/daily.py b/src/database/daily.py new file mode 100644 index 00000000..61657164 --- /dev/null +++ b/src/database/daily.py @@ -0,0 +1,425 @@ +"""Manage Config table in database.""" + +import hashlib +import logging +from datetime import datetime, timedelta + +from sqlalchemy import asc, delete, desc, func, select, update + +from config import MAX_IMPORT_TRY, TIMEZONE_UTC +from database import DB +from db_schema import ConsumptionDaily, ProductionDaily, UsagePoints + + +class DatabaseDaily: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id, measurement_direction="consumption"): + """Initialize DatabaseConfig.""" + self.session = DB.session + self.usage_point_id = usage_point_id + self.measurement_direction = measurement_direction + if self.measurement_direction == "consumption": + self.table = ConsumptionDaily + self.relation = UsagePoints.relation_consumption_daily + else: + self.table = ProductionDaily + self.relation = UsagePoints.relation_production_daily + + def get_all(self): + """Retrieve all daily data for a given usage point and measurement direction.""" + data = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .order_by(self.table.date.desc()) + ).all() + self.session.close() + return data + + def get_datatable( + self, + order_column="date", + order_dir="asc", + search=None, + ): + """Retrieve datatable for a given usage point, search term, and measurement direction. + + Args: + order_column (str, optional): The column to order the datatable by. Defaults to "date". + order_dir (str, optional): The direction to order the datatable. Defaults to "asc". + search (str, optional): The search term. Defaults to None. + + Returns: + list: The datatable. + """ + yesterday = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1), datetime.max.time()) + sort = asc(order_column) if order_dir == "desc" else desc(order_column) + if search is not None and search != "": + result = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where((self.table.date.like(f"%{search}%")) | (self.table.value.like(f"%{search}%"))) + .where(self.table.date <= yesterday) + .order_by(sort) + ) + else: + result = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where(self.table.date <= yesterday) + .order_by(sort) + ) + return result.all() + + def get_count(self): + """Retrieve the count of daily data for a given usage point and measurement direction. + + Returns: + int: The count of daily data. + """ + data = self.session.scalars( + select([func.count()]) + .select_from(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ).one_or_none() + self.session.close() + return data + + def get_date(self, date): + """Retrieve the data for a given usage point, date, and measurement direction. + + Args: + date (str): The date. + + Returns: + object: The data. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + data = self.session.scalars(select(self.table).join(self.relation).where(self.table.id == unique_id)).first() + self.session.flush() + self.session.close() + return data + + def get_state(self, date): + """Check the state of daily data for a given usage point, date, and measurement direction. + + Args: + date (str): The date. + + Returns: + bool: True if the daily data exists, False otherwise. + """ + if self.get_date(date) is not None: + return True + else: + return False + + def get_last_date(self): + """Retrieve the last date for a given usage point and measurement direction. + + Returns: + str: The last date. + """ + current_data = self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(self.table.date) + ).first() + self.session.flush() + self.session.close() + if current_data is None: + return False + else: + return current_data.date + + def get_last(self): + """Retrieve the last data point for a given usage point and measurement direction. + + Returns: + object: The last data point. + """ + current_data = self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .where(self.table.value != 0) + .order_by(self.table.date.desc()) + ).first() + self.session.flush() + self.session.close() + if current_data is None: + return False + else: + return current_data + + def get_first_date(self): + """Retrieve the first date for a given usage point and measurement direction. + + Returns: + str: The first date. + """ + query = ( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(self.table.date.desc()) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).first() + if current_data is None: + return False + else: + return current_data.date + + def get_fail_count(self, date): + """Retrieve the fail count for a given usage point, date, and measurement direction. + + Args: + date (str): The date. + + Returns: + int: The fail count. + """ + result = self.get_date(date) + if hasattr(result, "fail_count"): + return result.fail_count + else: + return 0 + + def fail_increment(self, date): + """Increment the fail count for a given usage point, date, and measurement direction. + + Args: + date (str): The date. + + Returns: + int: The updated fail count. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + daily = self.session.scalars(query).one_or_none() + if daily is not None: + fail_count = int(daily.fail_count) + 1 + if fail_count >= MAX_IMPORT_TRY: + blacklist = 1 + fail_count = 0 + else: + blacklist = 0 + daily.id = unique_id + daily.usage_point_id = self.usage_point_id + daily.date = date + daily.value = 0 + daily.blacklist = blacklist + daily.fail_count = fail_count + else: + fail_count = 0 + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + blacklist=0, + fail_count=0, + ) + ) + self.session.flush() + return fail_count + + def get_range(self, begin, end): + """Retrieve the range of data for a given usage point, begin date, end date, and measurement direction. + + Args: + begin (str): The begin date. + end (str): The end date. + + Returns: + list: The list of data within the specified range. + """ + query = ( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .where(self.table.date >= begin) + .where(self.table.date <= end) + .order_by(self.table.date.desc()) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).all() + if current_data is None: + return False + else: + return current_data + + def get(self, begin, end): + """Retrieve the data for a given usage point, begin date, end date, and measurement direction. + + Args: + begin (str): The begin date. + end (str): The end date. + + Returns: + dict: A dictionary containing the retrieved data. + """ + delta = end - begin + result = {"missing_data": False, "date": {}, "count": 0} + for i in range(delta.days + 1): + check_date = begin + timedelta(days=i) + check_date = datetime.combine(check_date, datetime.min.time()) + query_result = self.get_date(check_date) + check_date = check_date.strftime("%Y-%m-%d") + if query_result is None: + # NEVER QUERY + result["date"][check_date] = { + "status": False, + "blacklist": 0, + "value": 0, + } + result["missing_data"] = True + else: + consumption = query_result.value + blacklist = query_result.blacklist + if consumption == 0: + # ENEDIS RETURN NO DATA + result["date"][check_date] = { + "status": False, + "blacklist": blacklist, + "value": consumption, + } + result["missing_data"] = True + else: + # SUCCESS or BLACKLIST + result["date"][check_date] = { + "status": True, + "blacklist": blacklist, + "value": consumption, + } + return result + + def insert( + self, + date, + value, + blacklist=0, + fail_count=0, + ): + """Insert daily data for a given usage point, date, value, blacklist, fail count, and measurement direction. + + Args: + date (str): The date of the data. + value (float): The value of the data. + blacklist (int, optional): The blacklist status. Defaults to 0. + fail_count (int, optional): The fail count. Defaults to 0. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + daily = self.session.scalars(query).one_or_none() + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + if daily is not None: + daily.id = unique_id + daily.usage_point_id = self.usage_point_id + daily.date = date + daily.value = value + daily.blacklist = blacklist + daily.fail_count = fail_count + else: + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=value, + blacklist=blacklist, + fail_count=fail_count, + ) + ) + self.session.flush() + + def reset( + self, + date=None, + ): + """Reset the daily data for a given usage point, date, and measurement type. + + Args: + date (str, optional): The date of the data. Defaults to None. + + Returns: + bool: True if the data was reset, False otherwise. + """ + data = self.get_date(date) + if data is not None: + values = { + self.table.value: 0, + self.table.blacklist: 0, + self.table.fail_count: 0, + } + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(update(self.table, values=values).where(self.table.id == unique_id)) + self.session.flush() + return True + else: + return False + + def delete(self, date=None): + """Delete the daily data for a given usage point, date, and measurement direction. + + Args: + date (str, optional): The date of the data. Defaults to None. + + Returns: + bool: True if the data was deleted, False otherwise. + """ + if date is not None: + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(delete(self.table).where(self.table.id == unique_id)) + else: + self.session.execute(delete(self.table).where(self.table.usage_point_id == self.usage_point_id)) + self.session.flush() + return True + + def blacklist(self, date, action=True): + """Blacklist or unblacklist the daily data for a given usage point, date, and measurement direction. + + Args: + date (str): The date of the data. + action (bool, optional): The action to perform. True to blacklist, False to unblacklist. Defaults to True. + + Returns: + bool: True if the data was blacklisted or unblacklisted, False otherwise. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + daily = self.session.scalars(query).one_or_none() + if daily is not None: + daily.blacklist = action + else: + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + blacklist=action, + fail_count=0, + ) + ) + self.session.flush() + return True + + def get_date_range(self): + """Get the date range for a given usage point. + + Returns: + dict: A dictionary containing the begin and end dates of the date range. + """ + return { + "begin": self.get_last_date(), + "end": self.get_first_date(), + } diff --git a/src/database/detail.py b/src/database/detail.py new file mode 100644 index 00000000..f092fe70 --- /dev/null +++ b/src/database/detail.py @@ -0,0 +1,473 @@ +"""Manage Config table in database.""" + +import hashlib +import logging +from datetime import datetime, timedelta + +import pytz +from sqlalchemy import asc, delete, desc, func, select + +from config import MAX_IMPORT_TRY +from database import DB +from db_schema import ConsumptionDetail, ProductionDetail, UsagePoints + + +class DatabaseDetail: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id, measurement_direction="consumption"): + """Initialize DatabaseConfig.""" + self.session = DB.session + self.min_entry = 300 + self.usage_point_id = usage_point_id + self.measurement_direction = measurement_direction + if self.measurement_direction == "consumption": + self.table = ConsumptionDetail + self.relation = UsagePoints.relation_consumption_detail + else: + self.table = ProductionDetail + self.relation = UsagePoints.relation_production_detail + + def get_all( + self, + begin=None, + end=None, + order_dir="desc", + ): + """Retrieve all records from the database. + + Args: + begin (datetime, optional): The start date of the range. Defaults to None. + end (datetime, optional): The end date of the range. Defaults to None. + order_dir (str, optional): The order direction. Defaults to "desc". + + Returns: + list: A list of records. + """ + sort = asc("date") if order_dir == "desc" else desc("date") + if begin is None and end is None: + return self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(sort) + ).all() + elif begin is not None and end is None: + return self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .filter(self.table.date >= begin) + .order_by(sort) + ).all() + elif end is not None and begin is None: + return self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .filter(self.table.date <= end) + .order_by(sort) + ).all() + else: + return self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .filter(self.table.date <= end) + .filter(self.table.date >= begin) + .order_by(sort) + ).all() + + def get_datatable( + self, + order_column="date", + order_dir="asc", + search=None, + ): + """Retrieve datatable from the database. + + Args: + order_column (str, optional): The column to order the datatable by. Defaults to "date". + order_dir (str, optional): The order direction. Defaults to "asc". + search (str, optional): The search query to filter the datatable. Defaults to None. + + Returns: + list: A list of datatable records. + """ + yesterday = datetime.combine(datetime.now(tz=pytz.utc) - timedelta(days=1), datetime.max.time()) + sort = asc(order_column) if order_dir == "desc" else desc(order_column) + if search is not None and search != "": + result = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where((self.table.date.like(f"%{search}%")) | (self.table.value.like(f"%{search}%"))) + .where(self.table.date <= yesterday) + .order_by(sort) + ) + else: + result = self.session.scalars( + select(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where(self.table.date <= yesterday) + .order_by(sort) + ) + return result.all() + + def get_count(self): + """Retrieve the count of records for a specific usage point and measurement direction. + + Returns: + int: The count of records. + """ + return self.session.scalars( + select([func.count()]) + .select_from(self.table) + .join(self.relation) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ).one_or_none() + + def get_date(self, date): + """Retrieve the data for a specific date from the database. + + Args: + date (str): The date in the format 'YYYY-MM-DD'. + + Returns: + object: The data for the specified date. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + return self.session.scalars(select(self.table).join(self.relation).where(self.table.id == unique_id)).first() + + def get_range( + self, + begin, + end, + order="desc", + ): + """Retrieve a range of data from the database. + + Args: + begin (datetime): The start of the range. + end (datetime): The end of the range. + order (str, optional): The order direction. Defaults to "desc". + + Returns: + list: A list of data records within the specified range. + """ + if order == "desc": + order = self.table.date.desc() + else: + order = self.table.date.asc() + query = ( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .where(self.table.date >= begin) + .where(self.table.date <= end) + .order_by(order) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).all() + if current_data is None: + return False + else: + return current_data + + def get(self, begin, end): + """Retrieve data for a specific range from the database. + + Args: + begin (datetime): The start of the range. + end (datetime): The end of the range. + + Returns: + dict: A dictionary containing the retrieved data. + """ + delta = begin - begin + + result = {"missing_data": False, "date": {}, "count": 0} + + for _ in range(delta.days + 1): + query_result = self.get_all( + begin=begin, + end=end, + ) + time_delta = abs(int((begin - end).total_seconds() / 60)) + total_internal = 0 + for query in query_result: + total_internal = total_internal + query.interval + total_time = abs(total_internal - time_delta) + if total_time > self.min_entry: + logging.info(f" - {total_time}m absente du relevé.") + result["missing_data"] = True + else: + for query in query_result: + result["date"][query.date] = { + "value": query.value, + "interval": query.interval, + "measure_type": query.measure_type, + "blacklist": query.blacklist, + } + return result + + def get_state(self, date): + """Get the state of a specific data record in the database. + + Args: + date (datetime): The date of the data record. + + Returns: + bool: True if the data record exists, False otherwise. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + current_data = self.session.scalars( + select(self.table).join(self.relation).where(self.table.id == unique_id) + ).one_or_none() + if current_data is None: + return False + else: + return True + + def insert( # noqa: PLR0913 + self, + date, + value, + interval, + blacklist=0, + fail_count=0, + ): + """Insert a new record into the database for the given consumption or production detail. + + Args: + date (datetime): The date of the record. + value (float): The value of the record. + interval (int): The interval of the record. + blacklist (int, optional): The blacklist status of the record. Defaults to 0. + fail_count (int, optional): The fail count of the record. Defaults to 0. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + detail = self.get_date(date) + if detail is not None: + detail.id = unique_id + detail.usage_point_id = self.usage_point_id + detail.date = date + detail.value = value + detail.interval = interval + detail.measure_type = self.measurement_direction + detail.blacklist = blacklist + detail.fail_count = fail_count + else: + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=value, + interval=interval, + measure_type=self.measurement_direction, + blacklist=blacklist, + fail_count=fail_count, + ) + ) + self.session.flush() + + def reset(self, date=None): + """Reset the values of a consumption or production detail record. + + Args: + date (datetime, optional): The date of the record. Defaults to None. + + Returns: + bool: True if the reset was successful, False otherwise. + """ + detail = self.get_date(date) + if detail is not None: + detail.value = 0 + detail.interval = 0 + detail.blacklist = 0 + detail.fail_count = 0 + self.session.flush() + return True + else: + return False + + def reset_range(self, begin, end): + """Reset the values of consumption or production detail records within a specified range. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + + Returns: + bool: True if the reset was successful, False otherwise. + """ + detail = self.get_range(begin, end) + if detail is not None: + for row in detail: + row.value = 0 + row.interval = 0 + row.blacklist = 0 + row.fail_count = 0 + self.session.flush() + return True + else: + return False + + def delete(self, date=None): + """Delete a consumption or production detail record. + + Args: + date (datetime, optional): The date of the record. Defaults to None. + + Returns: + bool: True if the deletion was successful, False otherwise. + """ + if date is not None: + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(delete(self.table).where(self.table.id == unique_id)) + else: + self.session.execute(delete(self.table).where(self.table.usage_point_id == self.usage_point_id)) + self.session.flush() + return True + + def delete_range(self, date): + """Delete a range of consumption or production detail records. + + Args: + date (datetime): The date of the records to be deleted. + + Returns: + bool: True if the deletion was successful, False otherwise. + """ + if date is not None: + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(delete(self.table).where(self.table.id == unique_id)) + else: + self.session.execute(delete(self.table).where(self.table.usage_point_id == self.usage_point_id)) + self.session.flush() + return True + + def get_ratio_hc_hp(self, begin, end): + """Calculate the ratio of high consumption (HC) to high production (HP) for a given usage point and time range. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + + Returns: + dict: A dictionary with the ratio of HC and HP. + """ + result = { + "HC": 0, + "HP": 0, + } + detail_data = self.get_all( + begin=begin, + end=end, + ) + for data in detail_data: + result[data.measure_type] = result[data.measure_type] + data.value + return result + + def get_fail_count(self, date): + """Get the fail count for a specific usage point, date, and measurement type. + + Args: + date (datetime): The date of the record. + + Returns: + int: The fail count for the specified usage point, date, and measurement type. + """ + return self.get_detail_date(date).fail_count + + def fail_increment(self, date): + """Increment the fail count for a specific usage point, date, and measurement type. + + Args: + date (datetime): The date of the record. + + Returns: + int: The updated fail count. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + query = select(self.table).join(self.relation).where(self.table.id == unique_id) + detail = self.session.scalars(query).one_or_none() + if detail is not None: + fail_count = int(detail.fail_count) + 1 + if fail_count >= MAX_IMPORT_TRY: + blacklist = 1 + fail_count = 0 + else: + blacklist = 0 + detail.usage_point_id = self.usage_point_id + detail.date = date + detail.value = 0 + detail.interval = 0 + detail.measure_type = "HP" + detail.blacklist = blacklist + detail.fail_count = fail_count + else: + fail_count = 0 + self.session.add( + self.table( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + interval=0, + measure_type="HP", + blacklist=0, + fail_count=0, + ) + ) + self.session.flush() + return fail_count + + def get_last_date(self): + """Get the last date for a specific usage point and measurement type. + + Returns: + datetime: The last date for the specified usage point and measurement type. + """ + current_data = self.session.scalars( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(self.table.date) + ).first() + if current_data is None: + return False + else: + return current_data.date + + def get_first_date(self): + """Get the first date for a specific usage point and measurement type. + + Returns: + datetime: The first date for the specified usage point and measurement type. + """ + query = ( + select(self.table) + .join(self.relation) + .where(self.table.usage_point_id == self.usage_point_id) + .order_by(self.table.date.desc()) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).first() + if current_data is None: + return False + else: + return current_data.date + + def get_date_range(self): + """Get the date range (begin and end dates) for a specific usage point. + + Returns: + dict: A dictionary containing the begin and end dates. + """ + return { + "begin": self.get_last_date(self.usage_point_id), + "end": self.get_first_date(self.usage_point_id), + } diff --git a/src/database/ecowatt.py b/src/database/ecowatt.py new file mode 100644 index 00000000..b9e26eee --- /dev/null +++ b/src/database/ecowatt.py @@ -0,0 +1,71 @@ +"""Manage Config table in database.""" + +from datetime import datetime + +from sqlalchemy import select + +from database import DB +from db_schema import Ecowatt + + +class DatabaseEcowatt: + """Manage configuration for the database.""" + + def __init__(self): + """Initialize DatabaseConfig.""" + self.session = DB.session + + def get(self, order="desc"): + """Retrieve Ecowatt data from the database. + + Args: + order (str, optional): The order in which to retrieve the data. Defaults to "desc". + + Returns: + list: A list of Ecowatt data. + """ + if order == "desc": + order = Ecowatt.date.desc() + else: + order = Ecowatt.date.asc() + return self.session.scalars(select(Ecowatt).order_by(order)).all() + + def get_range(self, begin, end, order="desc"): + """Retrieve a range of Ecowatt data from the database. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + order (str, optional): The order in which to retrieve the data. Defaults to "desc". + + Returns: + list: A list of Ecowatt data within the specified range. + """ + if order == "desc": + order = Ecowatt.date.desc() + else: + order = Ecowatt.date.asc() + return self.session.scalars( + select(Ecowatt).where(Ecowatt.date >= begin).where(Ecowatt.date <= end).order_by(order) + ).all() + + def set(self, date, value, message, detail): + """Set the Ecowatt data in the database. + + Args: + date (datetime): The date of the data. + value (float): The value of the data. + message (str): The message associated with the data. + detail (str): The detail information of the data. + """ + date = datetime.combine(date, datetime.min.time()) + ecowatt = self.get_range(date, date) + if ecowatt: + for item in ecowatt: + item.value = value + item.message = message + item.detail = detail + else: + self.session.add(Ecowatt(date=date, value=value, message=message, detail=detail)) + self.session.flush() + return True diff --git a/src/database/main.py b/src/database/main.py new file mode 100644 index 00000000..73ebaf48 --- /dev/null +++ b/src/database/main.py @@ -0,0 +1,150 @@ +"""Manage all database operations.""" + +import logging +import subprocess +import traceback +from datetime import datetime +from pathlib import Path + +from sqlalchemy import create_engine, inspect, select +from sqlalchemy.orm import scoped_session, sessionmaker +from sqlalchemy.pool import NullPool + +from config import TIMEZONE +from db_schema import ( + Config as ConfigSchema, +) +from dependencies import APPLICATION_PATH, APPLICATION_PATH_DATA, get_version +from models.config import Config + +available_database = ["sqlite", "postgresql"] + + +class Database: + """Represents a database connection and provides methods for database operations.""" + + def __init__(self, path=APPLICATION_PATH_DATA): + """Initialize a Database object. + + Args: + config (Config): The configuration object. + path (str, optional): The path to the database. Defaults to APPLICATION_PATH_DATA. + """ + self.path = path + + # DBURI CONFIGURATION + if "storage_uri" in Config().config: + storage_uri = self.config["storage_uri"] + else: + storage_uri = False + if not storage_uri or storage_uri.startswith("sqlite"): + self.db_name = "cache.db" + self.db_path = f"{self.path}/{self.db_name}" + self.uri = f"sqlite:///{self.db_path}?check_same_thread=False" + else: + self.storage_type = self.config.storage_config().split(":")[0] + if self.storage_type in available_database: + self.uri = self.config.storage_config() + else: + logging.critical(f"Database {self.storage_type} not supported (only SQLite & PostgresSQL)") + + subprocess.run( + f"cd {APPLICATION_PATH}; DB_URL='{self.uri}' alembic upgrade head", shell=True, check=True # noqa: S602 + ) + + self.engine = create_engine( + self.uri, + echo=False, + query_cache_size=0, + isolation_level="READ UNCOMMITTED", + poolclass=NullPool, + ) + self.session = scoped_session(sessionmaker(self.engine, autocommit=True, autoflush=True)) + self.inspector = inspect(self.engine) + self.lock_file = f"{self.path}/.lock" + + def init_database(self): + """Initialize the database with default values.""" + try: + logging.info("Configure Databases") + query = select(ConfigSchema).where(ConfigSchema.key == "day") + day = self.session.scalars(query).one_or_none() + if day: + day.value = datetime.now(tz=TIMEZONE).strftime("%Y-%m-%d") + else: + self.session.add(ConfigSchema(key="day", value=datetime.now(tz=TIMEZONE).strftime("%Y-%m-%d"))) + logging.info(" => day") + query = select(ConfigSchema).where(ConfigSchema.key == "call_number") + if not self.session.scalars(query).one_or_none(): + self.session.add(ConfigSchema(key="call_number", value="0")) + logging.info(" => call_number") + query = select(ConfigSchema).where(ConfigSchema.key == "max_call") + if not self.session.scalars(query).one_or_none(): + self.session.add(ConfigSchema(key="max_call", value="500")) + logging.info(" => max_call") + query = select(ConfigSchema).where(ConfigSchema.key == "version") + version = self.session.scalars(query).one_or_none() + if version: + version.value = get_version() + else: + self.session.add(ConfigSchema(key="version", value=get_version())) + logging.info(" => version") + query = select(ConfigSchema).where(ConfigSchema.key == "lock") + if not self.session.scalars(query).one_or_none(): + self.session.add(ConfigSchema(key="lock", value="0")) + logging.info(" => lock") + query = select(ConfigSchema).where(ConfigSchema.key == "lastUpdate") + if not self.session.scalars(query).one_or_none(): + self.session.add(ConfigSchema(key="lastUpdate", value=str(datetime.now(tz=TIMEZONE)))) + logging.info(" => lastUpdate") + logging.info(" Success") + except Exception as e: + traceback.print_exc() + logging.error(e) + logging.critical("Database initialize failed!") + + def purge_database(self): + """Purges the SQLite database.""" + logging.separator_warning() + logging.info("Reset SQLite Database") + if Path(f"{self.path}/cache.db").exists(): + Path(f"{self.path}/cache.db").unlink() + logging.info(" => Success") + else: + logging.info(" => No cache detected") + + def lock_status(self): + """Check the lock status of the database. + + Returns: + bool: True if the database is locked, False otherwise. + """ + if Path(self.lock_file).exists(): + return True + else: + return False + + def lock(self): + """Locks the database. + + Returns: + bool: True if the database is locked, False otherwise. + """ + with Path(self.lock_file).open("xt") as f: + f.write(str(datetime.now(tz=TIMEZONE))) + f.close() + return self.lock_status() + + def unlock(self): + """Unlocks the database. + + Returns: + bool: True if the database is unlocked, False otherwise. + """ + if Path(self.lock_file).exists(): + Path(self.lock_file).unlink() + return self.lock_status() + + def refresh_object(self): + """Refresh the ORM objects.""" + self.session.expire_all() diff --git a/src/database/max_power.py b/src/database/max_power.py new file mode 100644 index 00000000..d8f5416f --- /dev/null +++ b/src/database/max_power.py @@ -0,0 +1,347 @@ +"""Manage Config table in database.""" + +import hashlib +import logging +from datetime import datetime, timedelta + +import pytz +from sqlalchemy import asc, delete, desc, func, select + +from config import MAX_IMPORT_TRY +from database import DB +from db_schema import ConsumptionDailyMaxPower, UsagePoints + + +class DatabaseMaxPower: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id, measurement_direction="consumption"): + """Initialize DatabaseConfig.""" + self.session = DB.session + self.usage_point_id = usage_point_id + self.measurement_direction = measurement_direction + + def get_all(self, order="desc"): + """Retrieve all consumption daily max power records from the database. + + Args: + order (str, optional): The order in which the records should be sorted. Defaults to "desc". + + Returns: + list: A list of consumption daily max power records. + """ + if order == "desc": + order = ConsumptionDailyMaxPower.date.desc() + else: + order = ConsumptionDailyMaxPower.date.asc() + return self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .order_by(order) + ).all() + + def get_range(self, begin, end): + """Retrieve the range of consumption daily max power records from the database. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + + Returns: + list: A list of consumption daily max power records within the specified range. + """ + query = ( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(ConsumptionDailyMaxPower.usage_point_id == self.usage_point_id) + .where(ConsumptionDailyMaxPower.date >= begin) + .where(ConsumptionDailyMaxPower.date <= end) + .order_by(ConsumptionDailyMaxPower.date.desc()) + ) + logging.debug(query.compile(compile_kwargs={"literal_binds": True})) + current_data = self.session.scalars(query).all() + if current_data is None: + return False + else: + return current_data + + def get_power(self, begin, end): + """Retrieve the power data for a given date range. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + + Returns: + dict: A dictionary containing the power data for each date within the range. + """ + delta = end - begin + result = {"missing_data": False, "date": {}, "count": 0} + for i in range(delta.days + 1): + check_date = begin + timedelta(days=i) + check_date = datetime.combine(check_date, datetime.min.time()) + query_result = self.get_date(check_date) + check_date = check_date.strftime("%Y-%m-%d") + if query_result is None: + # NEVER QUERY + result["date"][check_date] = { + "status": False, + "blacklist": 0, + "value": 0, + } + result["missing_data"] = True + else: + consumption = query_result.value + blacklist = query_result.blacklist + if consumption == 0: + # ENEDIS RETURN NO DATA + result["date"][check_date] = { + "status": False, + "blacklist": blacklist, + "value": consumption, + } + result["missing_data"] = True + else: + # SUCCESS or BLACKLIST + result["date"][check_date] = { + "status": True, + "blacklist": blacklist, + "value": consumption, + } + return result + + def get_last_date(self): + """Retrieve the last date of consumption daily max power record from the database. + + Returns: + datetime: The last date of consumption daily max power record. + """ + current_data = self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(ConsumptionDailyMaxPower.usage_point_id == self.usage_point_id) + .order_by(ConsumptionDailyMaxPower.date) + ).first() + if current_data is None: + return False + else: + return current_data.date + + def get_date(self, date): + """Retrieve the consumption daily max power record for a given date. + + Args: + date (datetime): The date for which to retrieve the record. + + Returns: + ConsumptionDailyMaxPower: The consumption daily max power record for the given date. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + return self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(ConsumptionDailyMaxPower.id == unique_id) + ).one_or_none() + + def insert(self, date, event_date, value, blacklist=0, fail_count=0): # noqa: PLR0913, D417 + """Insert the daily max power record into the database. + + Args: + date (datetime): The date of the record. + event_date (datetime): The event date of the record. + value (float): The value of the record. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + daily = self.get_date(date) + if daily is not None: + daily.id = unique_id + daily.usage_point_id = self.usage_point_id + daily.date = date + daily.event_date = event_date + daily.value = value + daily.blacklist = blacklist + daily.fail_count = fail_count + else: + self.session.add( + ConsumptionDailyMaxPower( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + event_date=event_date, + value=value, + blacklist=blacklist, + fail_count=fail_count, + ) + ) + self.session.flush() + + def get_daily_count(self): + """Retrieve the count of consumption daily max power records from the database. + + Returns: + int: The count of consumption daily max power records. + """ + return self.session.scalars( + select([func.count()]) + .select_from(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(UsagePoints.usage_point_id == self.usage_point_id) + ).one_or_none() + + def get_daily_datatable(self, order_column="date", order_dir="asc", search=None): + """Retrieve the datatable of consumption daily max power records from the database. + + Args: + order_column (str, optional): The column to order the datatable by. Defaults to "date". + order_dir (str, optional): The direction to order the datatable in. Defaults to "asc". + search (str, optional): The search term to filter the datatable. Defaults to None. + + Returns: + list: The datatable of consumption daily max power records. + """ + yesterday = datetime.combine(datetime.now(pytz.utc) - timedelta(days=1), datetime.max.time()) + sort = asc(order_column) if order_dir == "desc" else desc(order_column) + if search is not None and search != "": + result = self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where( + (ConsumptionDailyMaxPower.date.like(f"%{search}%")) + | (ConsumptionDailyMaxPower.value.like(f"%{search}%")) + ) + .where(ConsumptionDailyMaxPower.date <= yesterday) + .order_by(sort) + ) + else: + result = self.session.scalars( + select(ConsumptionDailyMaxPower) + .join(UsagePoints.relation_consumption_daily_max_power) + .where(UsagePoints.usage_point_id == self.usage_point_id) + .where(ConsumptionDailyMaxPower.date <= yesterday) + .order_by(sort) + ) + return result.all() + + def daily_fail_increment(self, date): + """Increment the fail count for a specific date in the consumption daily max power records. + + Args: + date (datetime): The date for which to increment the fail count. + + Returns: + int: The updated fail count. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + daily = self.get_date(date) + if daily is not None: + fail_count = int(daily.fail_count) + 1 + if fail_count >= MAX_IMPORT_TRY: + blacklist = 1 + fail_count = 0 + else: + blacklist = 0 + daily.id = unique_id + daily.usage_point_id = self.usage_point_id + daily.date = date + daily.event_date = None + daily.value = 0 + daily.blacklist = blacklist + daily.fail_count = fail_count + else: + fail_count = 0 + self.session.add( + ConsumptionDailyMaxPower( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + event_date=None, + value=0, + blacklist=0, + fail_count=0, + ) + ) + self.session.flush() + return fail_count + + def reset_daily(self, date=None): + """Reset the daily max power record for a specific date. + + Args: + date (datetime, optional): The date to reset the record for. Defaults to None. + + Returns: + bool: True if the reset is successful, False otherwise. + """ + daily = self.get_date(date) + if daily is not None: + daily.event_date = None + daily.value = 0 + daily.blacklist = 0 + daily.fail_count = 0 + self.session.flush() + return True + else: + return False + + def delete_daily(self, date=None): + """Delete the daily max power record for a specific date or all records for the usage point. + + Args: + date (datetime, optional): The date to delete the record for. Defaults to None. + + Returns: + bool: True if the deletion is successful, False otherwise. + """ + if date is not None: + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + self.session.execute(delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.id == unique_id)) + else: + self.session.execute( + delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.usage_point_id == self.usage_point_id) + ) + self.session.flush() + return True + + def blacklist_daily(self, date, action=True): + """Blacklist or unblacklist the daily max power record for a specific date. + + Args: + date (datetime): The date to blacklist or unblacklist the record for. + action (bool, optional): True to blacklist the record, False to unblacklist it. Defaults to True. + + Returns: + bool: True if the operation is successful, False otherwise. + """ + unique_id = hashlib.md5(f"{self.usage_point_id}/{date}".encode("utf-8")).hexdigest() # noqa: S324 + daily = self.get_date(date) + if daily is not None: + daily.blacklist = action + else: + self.session.add( + ConsumptionDailyMaxPower( + id=unique_id, + usage_point_id=self.usage_point_id, + date=date, + value=0, + blacklist=action, + fail_count=0, + ) + ) + self.session.flush() + return True + + def get_fail_count(self, date): + """Get the fail count for a specific date. + + Args: + date (datetime): The date to get the fail count for. + + Returns: + int: The fail count for the specified date. + """ + result = self.get_date(date) + if hasattr(result, "fail_count"): + return result.fail_count + return 0 diff --git a/src/database/statistique.py b/src/database/statistique.py new file mode 100644 index 00000000..294bdabf --- /dev/null +++ b/src/database/statistique.py @@ -0,0 +1,44 @@ +"""Manage Config table in database.""" + + +from sqlalchemy import delete, select + +from database import DB +from db_schema import Statistique, UsagePoints + + +class DatabaseStatistique: + """Manage configuration for the database.""" + + def __init__(self, usage_point_id): + """Initialize DatabaseConfig.""" + self.session = DB.session + self.usage_point_id = usage_point_id + + def get(self, key): + """Retrieve the value associated with the given key.""" + return self.session.scalars( + select(Statistique) + .join(UsagePoints.relation_stats) + .where(Statistique.usage_point_id == self.usage_point_id) + .where(Statistique.key == key) + ).all() + + def set(self, key, value): + """Set the value associated with the given key. + + If the key already exists, the value will be updated. + If the key does not exist, it will be created. + """ + current_value = self.get(key) + if current_value: + for item in current_value: + item.value = value + else: + self.session.add(Statistique(usage_point_id=self.usage_point_id, key=key, value=value)) + self.session.flush() + return True + + def delete(self): + """Delete the statistics associated with the usage point.""" + self.session.execute(delete(Statistique).where(Statistique.usage_point_id == self.usage_point_id)) diff --git a/src/database/tempo.py b/src/database/tempo.py new file mode 100644 index 00000000..553114ee --- /dev/null +++ b/src/database/tempo.py @@ -0,0 +1,109 @@ +"""Manage Tempo table in database.""" + +import json +from datetime import datetime + +from sqlalchemy import select + +from database import DB +from db_schema import Tempo, TempoConfig + + +class DatabaseTempo: + """Manage configuration for the database.""" + + def __init__(self): + """Initialize DatabaseTempo.""" + self.session = DB.session + + def get(self, order="desc"): + """Retrieve Tempo data from the database. + + Args: + order (str, optional): The order in which to retrieve the data. Defaults to "desc". + + Returns: + list: List of Tempo data. + """ + if order == "desc": + order = Tempo.date.desc() + else: + order = Tempo.date.asc() + return self.session.scalars(select(Tempo).order_by(order)).all() + + def get_range(self, begin, end, order="desc"): + """Retrieve Tempo data within a specified date range from the database. + + Args: + begin (datetime): The start date of the range. + end (datetime): The end date of the range. + order (str, optional): The order in which to retrieve the data. Defaults to "desc". + + Returns: + list: List of Tempo data within the specified date range. + """ + if order == "desc": + order = Tempo.date.desc() + else: + order = Tempo.date.asc() + return self.session.scalars( + select(Tempo).where(Tempo.date >= begin).where(Tempo.date <= end).order_by(order) + ).all() + + def set(self, date, color): + """Set the color for a specific date in the Tempo data. + + Args: + date (datetime): The date for which to set the color. + color (str): The color to set. + + Returns: + bool: True if the operation is successful. + """ + date = datetime.combine(date, datetime.min.time()) + tempo = self.get_range(date, date) + if tempo: + for item in tempo: + item.color = color + else: + self.session.add(Tempo(date=date, color=color)) + self.session.flush() + return True + + # ----------------------------------------------------------------------------------------------------------------- + # TEMPO CONFIG + # ----------------------------------------------------------------------------------------------------------------- + def get_config(self, key): + """Retrieve the value of a configuration key from the database. + + Args: + key (str): The key of the configuration. + + Returns: + Any: The value associated with the key, or None if the key is not found. + """ + query = select(TempoConfig).where(TempoConfig.key == key) + data = self.session.scalars(query).one_or_none() + if data is not None: + data = json.loads(data.value) + self.session.close() + return data + + def set_config(self, key, value): + """Set the value of a configuration key in the database. + + Args: + key (str): The key of the configuration. + value (Any): The value to set. + + Returns: + None + """ + query = select(TempoConfig).where(TempoConfig.key == key) + config = self.session.scalars(query).one_or_none() + if config: + config.value = json.dumps(value) + else: + self.session.add(TempoConfig(key=key, value=json.dumps(value))) + self.session.flush() + self.session.close() diff --git a/src/database/usage_points.py b/src/database/usage_points.py new file mode 100644 index 00000000..ca4ea47e --- /dev/null +++ b/src/database/usage_points.py @@ -0,0 +1,194 @@ +"""Manage UsagePoints table in database.""" + +from datetime import datetime, timedelta + +from sqlalchemy import delete, select, update + +from config import TIMEZONE_UTC +from database import DB +from db_schema import ( + Addresses, + ConsumptionDaily, + ConsumptionDailyMaxPower, + ConsumptionDetail, + Contracts, + ProductionDaily, + ProductionDetail, + Statistique, + UsagePoints, +) +from dependencies import check_format + + +class DatabaseUsagePoints: + """Manage configuration for the database.""" + + class UsagePointsConfig: # pylint: disable=R0902 + """Default configuration for UsagePoints.""" + + def __init__(self) -> None: + self.usage_point_id: str = "------ SET_YOUR_USAGE_POINT_ID ------" + self.enable: bool = True + self.name: str = "Maison" + self.token: str = "------- SET_YOUR_TOKEN --------" + self.cache: bool = True + self.consumption: bool = True + self.consumption_detail: bool = True + self.consumption_price_base: float = 0 + self.consumption_price_hc: float = 0 + self.consumption_price_hp: float = 0 + self.consumption_max_power: bool = True + self.production: bool = False + self.production_detail: bool = False + self.production_price: float = 0 + self.offpeak_hours_0: str = None + self.offpeak_hours_1: str = None + self.offpeak_hours_2: str = None + self.offpeak_hours_3: str = None + self.offpeak_hours_4: str = None + self.offpeak_hours_5: str = None + self.offpeak_hours_6: str = None + self.plan: str = "BASE" + self.refresh_addresse: bool = False + self.refresh_contract: bool = False + self.consumption_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.consumption_detail_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.production_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.production_detail_max_date: datetime = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1095) + self.call_number: int = 0 + self.quota_reached: bool = False + self.quota_limit: bool = False + self.quota_reset_at: datetime = None + self.ban: bool = False + self.consentement_expiration: datetime = None + self.progress: int = 0 + self.progress_status: str = "" + + def __init__(self, usage_point_id=None): + """Initialize DatabaseConfig.""" + self.usage_point_id = usage_point_id + self.session = DB.session + self.usage_point_config = self.UsagePointsConfig() + + def get_all(self): + """Get all data from usage point table.""" + query = select(UsagePoints) + data = self.session.scalars(query).all() + self.session.close() + return data + + def get(self): + """Get data from usage point table.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + data = self.session.scalars(query).one_or_none() + self.session.close() + return data + + def get_plan( + self, + ): + """Get plan from usage point table.""" + data = self.get() + if data.plan in ["HP/HC"]: + return "HC/HP" + return data.plan.upper() + + def set(self, data): + """Set data from usage point table.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + usage_points = self.session.execute(query).scalar_one_or_none() + + if usage_points is not None: + self.usage_point_config = self.UsagePointsConfig() + for key in self.usage_point_config.__dict__: + if data.get(key): + setattr(usage_points, key, check_format(data[key])) + usage_points.usage_point_id = self.usage_point_id + else: + insert_value = {} + self.usage_point_config = self.UsagePointsConfig() + for key, value in self.usage_point_config.__dict__.items(): + if data.get(key): + insert_value[key] = check_format(data[key]) + else: + insert_value[key] = value + insert_value["usage_point_id"] = self.usage_point_id + self.session.add(UsagePoints(**insert_value)) + self.session.flush() + self.session.close() + + def progress(self, increment): + """Update progress in database.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + usage_points = self.session.scalars(query).one_or_none() + usage_points.progress = usage_points.progress + increment + self.session.close() + + def last_call_update(self) -> None: + """Update last call in database.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + usage_points = self.session.scalars(query).one_or_none() + usage_points.last_call = datetime.now(tz=TIMEZONE_UTC) + self.session.flush() + self.session.close() + + def update( # noqa: PLR0913 + self, + consentement_expiration=None, + call_number=None, + quota_reached=None, + quota_limit=None, + quota_reset_at=None, + last_call=None, + ban=None, + ) -> None: + """Update usage point in database.""" + query = select(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id) + usage_points = self.session.scalars(query).one_or_none() + if consentement_expiration is not None: + usage_points.consentement_expiration = consentement_expiration + if call_number is not None: + usage_points.call_number = call_number + if quota_reached is not None: + usage_points.quota_reached = quota_reached + if quota_limit is not None: + usage_points.quota_limit = quota_limit + if quota_reset_at is not None: + usage_points.quota_reset_at = quota_reset_at + if last_call is not None: + usage_points.last_call = last_call + if ban is not None: + usage_points.ban = ban + self.session.flush() + self.session.close() + + def delete(self) -> True: + """Delete usage point from database.""" + self.session.execute(delete(Addresses).where(Addresses.usage_point_id == self.usage_point_id)) + self.session.execute(delete(Contracts).where(Contracts.usage_point_id == self.usage_point_id)) + self.session.execute( + delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.usage_point_id == self.usage_point_id) + ) + self.session.execute(delete(ConsumptionDetail).where(ConsumptionDetail.usage_point_id == self.usage_point_id)) + self.session.execute(delete(ConsumptionDaily).where(ConsumptionDaily.usage_point_id == self.usage_point_id)) + self.session.execute(delete(ProductionDetail).where(ProductionDetail.usage_point_id == self.usage_point_id)) + self.session.execute(delete(ProductionDaily).where(ProductionDaily.usage_point_id == self.usage_point_id)) + self.session.execute(delete(UsagePoints).where(UsagePoints.usage_point_id == self.usage_point_id)) + self.session.execute(delete(Statistique).where(Statistique.usage_point_id == self.usage_point_id)) + self.session.flush() + self.session.close() + return True + + def get_error_log(self): + """Get error log in usage point table.""" + data = self.get(self.usage_point_id) + return data.last_error + + def set_error_log(self, message): + """Set error log in usage point table.""" + values = {UsagePoints.last_error: message} + self.session.execute( + update(UsagePoints, values=values).where(UsagePoints.usage_point_id == self.usage_point_id) + ) + self.session.flush() + return True diff --git a/src/db_schema.py b/src/db_schema.py index 8bb02b69..43ea8391 100644 --- a/src/db_schema.py +++ b/src/db_schema.py @@ -1,5 +1,7 @@ """This module defines the database schema for the application.""" +import typing + from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, String, Text from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship @@ -8,6 +10,8 @@ class Config(Base): + """This class represents the configuration table in the database.""" + __tablename__ = "config" key = Column(Text, primary_key=True, index=True, unique=True) @@ -18,14 +22,16 @@ def __init__(self, key, value): self.value = value def __str__(self): + """Return the string representation of the Config object.""" return self.value class UsagePoints(Base): + """This class represents the usage points table in the database.""" + __tablename__ = "usage_points" usage_point_id = Column(Text, primary_key=True, unique=True, nullable=False, index=True) - name = Column(Text, nullable=False) cache = Column(Boolean, nullable=False, default=False) consumption = Column(Boolean, nullable=False, default=True) @@ -58,10 +64,7 @@ class UsagePoints(Base): last_call = Column(DateTime, nullable=True) ban = Column(Boolean, nullable=True) consumption_max_date = Column(DateTime, nullable=True) - consumption_detail_max_date = Column( - DateTime, - nullable=True, - ) + consumption_detail_max_date = Column(DateTime, nullable=True) production_max_date = Column(DateTime, nullable=True) production_detail_max_date = Column(DateTime, nullable=True) consumption_max_power = Column(Boolean, nullable=False, default=True) @@ -77,6 +80,7 @@ class UsagePoints(Base): relation_consumption_daily_max_power = relationship("ConsumptionDailyMaxPower", back_populates="usage_point") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"UsagePoints(" f"usage_point_id={self.usage_point_id!r}, " @@ -122,8 +126,10 @@ def __repr__(self): class Addresses(Base): + """Represents the Addresses class.""" + __tablename__ = "addresses" - __table_args__ = {"sqlite_autoincrement": True} + __table_args__: typing.ClassVar[dict] = {"sqlite_autoincrement": True} id = Column(Integer, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -139,6 +145,7 @@ class Addresses(Base): usage_point = relationship("UsagePoints", back_populates="relation_addressess") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"Addresses(" f"id={self.id!r}, " @@ -156,8 +163,10 @@ def __repr__(self): class Contracts(Base): + """Represents the Contracts class.""" + __tablename__ = "contracts" - __table_args__ = {"sqlite_autoincrement": True} + __table_args__: typing.ClassVar[dict] = {"sqlite_autoincrement": True} id = Column(Integer, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -181,6 +190,7 @@ class Contracts(Base): usage_point = relationship("UsagePoints", back_populates="relation_contract") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"Contracts(" f"id={self.id!r}, " @@ -206,8 +216,9 @@ def __repr__(self): class ConsumptionDaily(Base): + """Represents the ConsumptionDaily class.""" + __tablename__ = "consumption_daily" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -219,6 +230,7 @@ class ConsumptionDaily(Base): usage_point = relationship("UsagePoints", back_populates="relation_consumption_daily") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ConsumptionDaily(" f"id={self.id!r}, " @@ -232,8 +244,9 @@ def __repr__(self): class ConsumptionDetail(Base): + """Represents the ConsumptionDetail class.""" + __tablename__ = "consumption_detail" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -247,6 +260,7 @@ class ConsumptionDetail(Base): usage_point = relationship("UsagePoints", back_populates="relation_consumption_detail") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ConsumptionDetail(" f"id={self.id!r}, " @@ -262,8 +276,9 @@ def __repr__(self): class ProductionDaily(Base): + """Represents the ProductionDaily class.""" + __tablename__ = "production_daily" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -275,6 +290,7 @@ class ProductionDaily(Base): usage_point = relationship("UsagePoints", back_populates="relation_production_daily") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ProductionDaily(" f"id={self.id!r}, " @@ -288,8 +304,9 @@ def __repr__(self): class ProductionDetail(Base): + """Represents the ProductionDetail class.""" + __tablename__ = "production_detail" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -303,6 +320,7 @@ class ProductionDetail(Base): usage_point = relationship("UsagePoints", back_populates="relation_production_detail") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ProductionDetail(" f"id={self.id!r}, " @@ -318,8 +336,10 @@ def __repr__(self): class Statistique(Base): + """Represents the Statistique class.""" + __tablename__ = "statistique" - __table_args__ = {"sqlite_autoincrement": True} + __table_args__: typing.ClassVar[dict] = {"sqlite_autoincrement": True} id = Column(Integer, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -329,6 +349,7 @@ class Statistique(Base): usage_point = relationship("UsagePoints", back_populates="relation_stats") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"Statistique(" f"id={self.id!r}, " @@ -340,8 +361,9 @@ def __repr__(self): class ConsumptionDailyMaxPower(Base): + """Represents the ConsumptionDailyMaxPower class.""" + __tablename__ = "consumption_daily_max_power" - # __table_args__ = {'sqlite_autoincrement': True} id = Column(String, primary_key=True, index=True, unique=True) usage_point_id = Column(Text, ForeignKey("usage_points.usage_point_id"), nullable=False, index=True) @@ -354,6 +376,7 @@ class ConsumptionDailyMaxPower(Base): usage_point = relationship("UsagePoints", back_populates="relation_consumption_daily_max_power") def __repr__(self): + """Return the string representation of the Config object.""" return ( f"ConsumptionDailyMaxPower(" f"id={self.id!r}, " @@ -368,16 +391,21 @@ def __repr__(self): class Tempo(Base): + """Represents the Tempo class.""" + __tablename__ = "tempo" date = Column(DateTime, primary_key=True, index=True, unique=True) color = Column(Text, nullable=False, index=True) def __repr__(self): + """Return the string representation of the Config object.""" return f"Tempo(" f"date={self.date!r}, " f"color={self.color!r}, " f")" class TempoConfig(Base): + """Represents the TempoConfig class.""" + __tablename__ = "tempo_config" key = Column(Text, primary_key=True, index=True, unique=True) @@ -388,10 +416,13 @@ def __init__(self, key, value): self.value = value def __str__(self): + """Return the string representation of the Config object.""" return self.value class Ecowatt(Base): + """Represents the Ecowatt class.""" + __tablename__ = "ecowatt" date = Column(DateTime, primary_key=True, index=True, unique=True) @@ -400,6 +431,7 @@ class Ecowatt(Base): detail = Column(Text, nullable=False, index=True) def __repr__(self): + """Return the string representation of the Config object.""" return ( f"Ecowatt(" f"date={self.date!r}, " diff --git a/src/dependencies.py b/src/dependencies.py index 53cec4b8..80c77195 100755 --- a/src/dependencies.py +++ b/src/dependencies.py @@ -1,11 +1,12 @@ """This module contains dependencies for the application.""" - -import datetime import logging +from datetime import datetime, timedelta from math import floor from os import environ, getenv +import pytz from art import decor, text2art +from dateutil.parser import parse from __version__ import VERSION @@ -37,7 +38,7 @@ def daterange(start_date, end_date): """ for n in range(int((end_date - start_date).days)): - yield start_date + datetime.timedelta(n) + yield start_date + timedelta(n) def is_bool(v): @@ -65,10 +66,9 @@ def str2bool(v): bool: The boolean value. """ - if type(v) != bool: + if not isinstance(v, bool): return v and v.lower() in ("yes", "true", "t", "1") - else: - return v + return v def is_float(element): @@ -88,6 +88,24 @@ def is_float(element): return False +def is_datetime(element, fuzzy=False): + """Check if a value can be parsed as a datetime. + + Args: + element (str): The value to check. + fuzzy (bool, optional): Whether to allow fuzzy parsing. Defaults to False. + + Returns: + bool: True if the value can be parsed as a datetime, False otherwise. + + """ + try: + parse(element, fuzzy=fuzzy) + return True + except ValueError: + return False + + def is_integer(element): """Check if a value can be converted to an integer. @@ -118,7 +136,7 @@ def reformat_json(yaml): for key, value in yaml.items(): if value in ["true", "false"]: result[key] = str2bool(value) - elif type(value) == dict: + elif isinstance(value, dict): result[key] = value elif not isinstance(value, bool) and is_float(value): result[key] = float(value) @@ -149,7 +167,7 @@ def title(message): """ separator() - if type(message) is list: + if isinstance(message, list): for msg in message: logging.info(f"{msg.upper()}") else: @@ -172,21 +190,24 @@ def title_warning(message): def separator(): """Print a separator line.""" logging.info( - "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ◦ ❖ ◦ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ◦ ❖ ◦ " + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" ) def separator_warning(): """Print a warning separator line.""" logging.warning( - "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ▲ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ▲ " + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" ) def export_finish(): """Finish the export process.""" logging.info( - "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ◦ TERMINE ◦ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ◦ TERMINE ◦ " + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" ) @@ -216,11 +237,60 @@ def get_version(): def logo(version): - Art = text2art("MyElectricalData") + """Print the logo of MyElectricalData with the version number. + + Args: + version (str): The version number of MyElectricalData. + + """ + art = text2art("MyElectricalData") separator() - for line in Art.splitlines(): + for line in art.splitlines(): logging.info(f'{decor("barcode1")}{line: ^93}{decor("barcode1", reverse=True)}') separator() version = f"VERSION : {version}" logging.info(f'{decor("barcode1")}{version: ^93}{decor("barcode1", reverse=True)}') separator() + + +def check_format(value): + """Check the format of a value and convert it if necessary. + + Args: + value (any): The value to check and convert. + + Returns: + any: The checked and converted value. + + """ + if is_bool(value): + new_value = str2bool(value) + elif value is None or value == "None" or not value: + new_value = None + elif isinstance(value, int): + new_value = int(value) + elif is_float(value): + new_value = float(value) + elif is_datetime(value): + new_value = datetime.strptime(value, "%Y-%m-%d").replace(tzinfo=pytz.utc) + else: + new_value = str(value) + return new_value + + +def is_between(time, time_range): + """Check if a given time is between a specified time range. + + Args: + time (datetime): The time to check. + time_range (tuple): The time range represented by a tuple of two datetime objects. + + Returns: + bool: True if the time is between the time range, False otherwise. + """ + time = time.replace(":", "") + start = time_range[0].replace(":", "") + end = time_range[1].replace(":", "") + if end < start: + return time >= start or time < end + return start <= time < end diff --git a/src/init.py b/src/init.py index ffea92eb..a658dda0 100644 --- a/src/init.py +++ b/src/init.py @@ -11,9 +11,9 @@ import yaml from config import LOG_FORMAT, LOG_FORMAT_DATE, cycle_minimun +from database.config import DatabaseConfig from dependencies import APPLICATION_PATH_DATA, APPLICATION_PATH_LOG, str2bool from models.config import Config -from models.database import Database from models.influxdb import InfluxDB from models.mqtt import Mqtt @@ -24,6 +24,10 @@ with Path(CONFIG_PATH).open() as file: config = yaml.safe_load(file) +root_logger = logging.getLogger() +if len(root_logger.handlers) > 0: + root_logger.removeHandler(root_logger.handlers[0]) + if "DEBUG" in environ and str2bool(getenv("DEBUG")): logging_level = logging.DEBUG else: @@ -44,6 +48,12 @@ else: logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_FORMAT_DATE, level=logging_level) +# # Clear the default handler +# root_logger = logging.getLogger() +# if len(root_logger.handlers) > 0: +# # remove the first handler +# root_logger.removeHandler(root_logger.handlers[0]) + if not Path(CONFIG_PATH).exists(): logging.critical(f"Config file is not found ({CONFIG_PATH})") sys.exit() @@ -73,16 +83,12 @@ def filter(self, record: logging.LogRecord) -> bool: MINIMUN_CYCLE = cycle_minimun -CONFIG = Config(path=APPLICATION_PATH_DATA) +CONFIG = Config() CONFIG.load() CONFIG.display() CONFIG.check() -DB = Database(CONFIG) -DB.init_database() -DB.unlock() - -CONFIG.set_db(DB) +DatabaseConfig().load_config_file() INFLUXB_ENABLE = False INFLUXDB = None @@ -100,16 +106,7 @@ def filter(self, record: logging.LogRecord) -> bool: write_options = [] if "batching_options" in INFLUXDB_CONFIG: write_options = INFLUXDB_CONFIG["batching_options"] - INFLUXDB = InfluxDB( - scheme=INFLUXDB_CONFIG["scheme"], - hostname=INFLUXDB_CONFIG["hostname"], - port=INFLUXDB_CONFIG["port"], - token=INFLUXDB_CONFIG["token"], - org=INFLUXDB_CONFIG["org"], - bucket=INFLUXDB_CONFIG["bucket"], - method=method, - write_options=write_options, - ) + INFLUXDB = InfluxDB() if CONFIG.get("wipe_influxdb"): INFLUXDB.purge_influxdb() CONFIG.set("wipe_influxdb", False) diff --git a/src/main.py b/src/main.py index d8e2e441..7ea1233d 100755 --- a/src/main.py +++ b/src/main.py @@ -1,3 +1,4 @@ +"""Main module of the application.""" import logging from os import environ, getenv @@ -8,8 +9,10 @@ from fastapi_utils.tasks import repeat_every from config import LOG_FORMAT, LOG_FORMAT_DATE, cycle_minimun +from database.config import DatabaseConfig +from database.usage_points import DatabaseUsagePoints from dependencies import APPLICATION_PATH, get_version, logo, str2bool, title, title_warning -from init import CONFIG, DB +from init import CONFIG from models.jobs import Job from routers import account, action, data, html, info @@ -18,25 +21,19 @@ else: title("Run in production mode") -title("Chargement du config.yaml...") usage_point_list = [] if CONFIG.list_usage_point() is not None: - for upi, upi_data in CONFIG.list_usage_point().items(): - logging.info(f"{upi}") - DB.set_usage_point(upi, upi_data) + for upi, _ in CONFIG.list_usage_point().items(): usage_point_list.append(upi) - logging.info(" => Success") -else: - logging.warning("Aucun point de livraison détecté.") title("Nettoyage de la base de données...") -DB.clean_database(usage_point_list) +for usage_point in DatabaseUsagePoints().get_all(): + if usage_point.usage_point_id not in usage_point_list: + DatabaseUsagePoints(usage_point.usage_point_id).delete() swagger_configuration = { "operationsSorter": "method", - # "defaultModelRendering": "model", "tagsSorter": "alpha", - # "docExpansion": "none", "deepLinking": True, } APP = FastAPI(title="MyElectricalData", swagger_ui_parameters=swagger_configuration) @@ -83,44 +80,34 @@ CYCLE = CONFIG.get("cycle") if not CYCLE: CYCLE = 14400 -else: - if CYCLE < cycle_minimun: - logging.warning("Le cycle minimun est de 3600s") - CYCLE = cycle_minimun - CONFIG.set("cycle", cycle_minimun) +elif CYCLE < cycle_minimun: + logging.warning("Le cycle minimun est de 3600s") + CYCLE = cycle_minimun + CONFIG.set("cycle", cycle_minimun) @APP.on_event("startup") @repeat_every(seconds=CYCLE, wait_first=False) def import_job(): + """Perform the import job.""" Job().boot() @APP.on_event("startup") @repeat_every(seconds=3600, wait_first=True) def home_assistant_export(): + """Perform the home assistant export job.""" Job().export_home_assistant(target="ecowatt") @APP.on_event("startup") @repeat_every(seconds=600, wait_first=False) def gateway_status(): + """Perform gateway status.""" Job().get_gateway_status() if __name__ == "__main__": - # from pypdf import PdfReader - # import requests - # url = "https://particulier.edf.fr/content/dam/2-Actifs/Documents/Offres/Grille_prix_Tarif_Bleu.pdf" - # file = "/tmp/Grille_prix_Tarif_Bleu.pdf" - # r = requests.get(url, allow_redirects=True, verify=False) - # reader = PdfReader(file) - # text = reader.pages[0].extract_text() + "\n" - # for line in text.splitlines(): - # if line.startswith("6 "): - # print(line) - # exit() - logo(get_version()) log_config = uvicorn.config.LOGGING_CONFIG log_config["formatters"]["access"]["fmt"] = LOG_FORMAT @@ -128,11 +115,11 @@ def gateway_status(): log_config["formatters"]["default"]["fmt"] = LOG_FORMAT log_config["formatters"]["default"]["datefmt"] = LOG_FORMAT_DATE uvicorn_params = { - "host": "0.0.0.0", + "host": "0.0.0.0", # noqa: S104 "port": CONFIG.port(), "log_config": log_config, } - if ("DEV" in environ and str2bool(getenv("DEV"))) or ("DEBUG" in environ and str2bool(getenv("DEBUG"))): + if "DEV" in environ and str2bool(getenv("DEV")) or "DEBUG" in environ and str2bool(getenv("DEBUG")): uvicorn_params["reload"] = True uvicorn_params["reload_dirs"] = [APPLICATION_PATH] diff --git a/src/models/__init__.py b/src/models/__init__.py index e69de29b..a7413b77 100644 --- a/src/models/__init__.py +++ b/src/models/__init__.py @@ -0,0 +1 @@ +"""This module contains the models for the MyElectricalData application.""" \ No newline at end of file diff --git a/src/models/ajax.py b/src/models/ajax.py index 98727565..adf4fab3 100755 --- a/src/models/ajax.py +++ b/src/models/ajax.py @@ -1,10 +1,17 @@ +"""This module represents an Ajax object.""" import logging from datetime import datetime import pytz +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.max_power import DatabaseMaxPower +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints from dependencies import APPLICATION_PATH, get_version, title -from init import CONFIG, DB +from init import CONFIG from models.jobs import Job from models.query_cache import Cache from models.query_daily import Daily @@ -19,15 +26,17 @@ class Ajax: + """This class represents an Ajax object.""" + def __init__(self, usage_point_id=None): + """Initialize Ajax.""" self.config = CONFIG - self.db = DB self.application_path = APPLICATION_PATH self.usage_point_id = usage_point_id self.date_format = "%Y-%m-%d" self.date_format_detail = "%Y-%m-%d %H:%M:%S" if self.usage_point_id is not None: - self.usage_point_config = self.db.get_usage_point(self.usage_point_id) + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() if hasattr(self.usage_point_config, "token"): self.headers = { "Content-Type": "application/json", @@ -44,6 +53,7 @@ def __init__(self, usage_point_id=None): self.usage_points_id_list = "" def gateway_status(self): + """Check the status of the gateway.""" if self.usage_point_id is not None: msg = f"[{self.usage_point_id}] Check de l'état de la passerelle." else: @@ -52,6 +62,7 @@ def gateway_status(self): return Status().ping() def account_status(self): + """Check the status of the account.""" title(f"[{self.usage_point_id}] Check du statut du compte.") data = Status(headers=self.headers).status(self.usage_point_id) if isinstance(self.usage_point_config.last_call, datetime): @@ -61,30 +72,35 @@ def account_status(self): return data def fetch_tempo(self): - title(f"Récupération des jours Tempo.") + title("Récupération des jours Tempo.") return Tempo().fetch() def get_tempo(self): - title(f"Affichage des jours Tempo.") + title("Affichage des jours Tempo.") return Tempo().get() def fetch_ecowatt(self): - title(f"Récupération des jours Ecowatt.") + """Fetch the days of Ecowatt.""" + title("Récupération des jours Ecowatt.") return Ecowatt().fetch() def get_ecowatt(self): - title(f"Affichage des jours Ecowatt.") + """Get the days of Ecowatt.""" + title("Affichage des jours Ecowatt.") return Ecowatt().get() def generate_price(self): + """Generate the costs by subscription type.""" title(f"[{self.usage_point_id}] Calcul des coûts par type d'abonnements.") return Stat(self.usage_point_id, "consumption").generate_price() def get_price(self): + """Get the result of the subscription comparator.""" title(f"[{self.usage_point_id}] Retourne le résultat du comparateur d'abonnements.") return Stat(self.usage_point_id, "consumption").get_price() def reset_all_data(self): + """Reset all the data.""" title(f"[{self.usage_point_id}] Reset de la consommation journalière.") Daily( headers=self.headers, @@ -118,6 +134,7 @@ def reset_all_data(self): } def delete_all_data(self): + """Delete all the data.""" title(f"[{self.usage_point_id}] Suppression de la consommation journalière.") Daily( headers=self.headers, @@ -153,17 +170,25 @@ def delete_all_data(self): } def reset_gateway(self): + """Reset the gateway cache.""" title(f"[{self.usage_point_id}] Reset du cache de la passerelle.") return Cache(headers=self.headers, usage_point_id=self.usage_point_id).reset() def reset_data(self, target, date): + """Reset the specified data for the given target and date. + + Args: + target (str): The target to reset. + date (str): The date to reset. + + Returns: + dict: The result of the reset. + """ result = {} if target == "consumption": title(f"[{self.usage_point_id}] Reset de la consommation journalière du {date}:") result["consumption"] = Daily(headers=self.headers, usage_point_id=self.usage_point_id).reset(date) elif target == "consumption_detail": - # date = date.replace("---", " ") - # date = date.replace("--", ":") title(f"[{self.usage_point_id}] Reset de la consommation détaillée du {date}:") result["consumption_detail"] = Detail( headers=self.headers, usage_point_id=self.usage_point_id @@ -181,8 +206,6 @@ def reset_data(self, target, date): measure_type="production", ).reset(date) elif target == "production_detail": - # date = date.replace("---", " ") - # date = date.replace("--", ":") title(f"[{self.usage_point_id}] Reset de la production détaillée du {date}:") result["production_detail"] = Detail( headers=self.headers, @@ -204,51 +227,72 @@ def reset_data(self, target, date): "result": result[target], } - def fetch(self, target, date): + def fetch(self, target, date): # noqa: C901, PLR0912 + """Fetch the specified data for the given target and date. + + Args: + target (str): The target to fetch. + date (str): The date to fetch. + + Returns: + dict: The fetched data. + """ result = {} - if target == "consumption": - if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: - title(f"[{self.usage_point_id}] Importation de la consommation journalière du {date}:") - result["consumption"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).fetch(date) - elif target == "consumption_max_power": - if hasattr(self.usage_point_config, "consumption_max_power") and self.usage_point_config.consumption: - title(f"[{self.usage_point_id}] Importation de la puissance maximum journalière du {date}:") - result["consumption_max_power"] = Power( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).fetch(date) - elif target == "consumption_detail": - # date = date.replace("---", " ") - # date = date.replace("--", ":") - if hasattr(self.usage_point_config, "consumption_detail") and self.usage_point_config.consumption_detail: - title(f"[{self.usage_point_id}] Importation de la consommation détaillée du {date}:") - result["consumption_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - ).fetch(date) - elif target == "production": - if hasattr(self.usage_point_config, "production") and self.usage_point_config.production: - title(f"[{self.usage_point_id}] Importation de la production journalière du {date}:") - result["production"] = Daily( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).fetch(date) - elif target == "production_detail": - # date = date.replace("---", " ") - # date = date.replace("--", ":") - if hasattr(self.usage_point_config, "production_detail") and self.usage_point_config.production_detail: - title(f"[{self.usage_point_id}] Importation de la production détaillée du {date}:") - result["production_detail"] = Detail( - headers=self.headers, - usage_point_id=self.usage_point_id, - measure_type="production", - ).fetch(date) + if ( + target == "consumption" + and hasattr(self.usage_point_config, "consumption") + and self.usage_point_config.consumption + ): + title(f"[{self.usage_point_id}] Importation de la consommation journalière du {date}:") + result["consumption"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).fetch(date) + elif ( + target == "consumption_max_power" + and hasattr(self.usage_point_config, "consumption_max_power") + and self.usage_point_config.consumption_max_power + ): + title(f"[{self.usage_point_id}] Importation de la puissance maximum journalière du {date}:") + result["consumption_max_power"] = Power( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).fetch(date) + elif ( + target == "consumption_detail" + and hasattr(self.usage_point_config, "consumption_detail") + and self.usage_point_config.consumption_detail + ): + title(f"[{self.usage_point_id}] Importation de la consommation détaillée du {date}:") + result["consumption_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + ).fetch(date) + elif ( + target == "production" + and hasattr(self.usage_point_config, "production") + and self.usage_point_config.production + ): + title(f"[{self.usage_point_id}] Importation de la production journalière du {date}:") + result["production"] = Daily( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).fetch(date) + elif ( + target == "production_detail" + and hasattr(self.usage_point_config, "production_detail") + and self.usage_point_config.production_detail + ): + title(f"[{self.usage_point_id}] Importation de la production détaillée du {date}:") + result["production_detail"] = Detail( + headers=self.headers, + usage_point_id=self.usage_point_id, + measure_type="production", + ).fetch(date) else: return {"error": "true", "notif": "Target inconnue.", "result": ""} + if "error" in result[target] and result[target]["error"]: data = { "error": "true", @@ -289,7 +333,16 @@ def fetch(self, target, date): } return data - def blacklist(self, target, date): + def blacklist(self, target, date): # noqa: C901, PLR0912 + """Blacklist the specified target for the given date. + + Args: + target (str): The target to blacklist. + date (str): The date to blacklist. + + Returns: + dict: A dictionary containing the result of the blacklist operation. + """ result = {} if target == "consumption": if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: @@ -346,7 +399,16 @@ def blacklist(self, target, date): "result": result[target], } - def whitelist(self, target, date): + def whitelist(self, target, date): # noqa: C901, PLR0912 + """Whitelist the specified target for the given date. + + Args: + target (str): The target to whitelist. + date (str): The date to whitelist. + + Returns: + dict: A dictionary containing the result of the whitelist operation. + """ result = {} if target == "consumption": if hasattr(self.usage_point_config, "consumption") and self.usage_point_config.consumption: @@ -404,6 +466,14 @@ def whitelist(self, target, date): } def import_data(self, target=None): + """Import data for the specified target. + + Args: + target (str, optional): The target to import data for. Defaults to None. + + Returns: + dict: A dictionary containing the result of the import data operation. + """ result = Job(self.usage_point_id).job_import_data(wait=False, target=target) if not result: return { @@ -419,34 +489,60 @@ def import_data(self, target=None): } def new_account(self, configs): - print(vars(configs)) + """Add a new account. + + Args: + configs (dict): A dictionary containing the configuration for the new account. + + Returns: + dict: A dictionary containing the output of the new account operation. + """ self.usage_point_id = configs["usage_point_id"] title(f"[{self.usage_point_id}] Ajout d'un nouveau point de livraison:") output = {} for key, value in configs.items(): if key != "usage_point_id": + new_value = value if value is None or value == "None": - value = "" - logging.info(f"{str(key)} => {str(value)}") - output[key] = value - self.config.set_usage_point_config(self.usage_point_id, key, value) - self.db.set_usage_point(self.usage_point_id, output) + new_value = "" + logging.info("%s => %s", str(key), str(new_value)) + output[key] = new_value + self.config.set_usage_point_config(self.usage_point_id, key, new_value) + DatabaseUsagePoints(self.usage_point_id).set(output) return output def configuration(self, configs): + """Change the configuration for the specified usage point. + + Args: + configs (dict): A dictionary containing the new configuration values. + + Returns: + dict: A dictionary containing the updated configuration values. + """ title(f"[{self.usage_point_id}] Changement de configuration:") output = {} for key, value in configs.items(): + new_value = value if value is None or value == "None": - value = "" - logging.info(f"{str(key)} => {str(value)}") - output[key] = value - self.config.set_usage_point_config(self.usage_point_id, key, value) - self.db.set_usage_point(self.usage_point_id, output) + new_value = "" + logging.info("%s => %s", str(key), str(new_value)) + output[key] = new_value + self.config.set_usage_point_config(self.usage_point_id, key, new_value) + DatabaseUsagePoints(self.usage_point_id).set(output) return output def datatable(self, measurement_direction, args): - recordsTotal = 0 + """Retrieve datatable for the specified measurement direction. + + Args: + measurement_direction (str): The measurement direction. + args (object): The arguments. + + Returns: + dict: A dictionary containing the datatable result. + """ + records_total = 0 args = args._query_params draw = int(args.get("draw")) length = int(args.get("length")) @@ -458,9 +554,7 @@ def datatable(self, measurement_direction, args): all_data = [] data = [] if measurement_direction == "consumption": - recordsTotal = self.db.get_daily_count( - usage_point_id=self.usage_point_id, measurement_direction="consumption" - ) + records_total = DatabaseDaily(self.usage_point_id, "consumption").get_count() col_spec = { 0: "date", 1: "value", @@ -472,19 +566,15 @@ def datatable(self, measurement_direction, args): 7: "import_clean", 8: "blacklist", } - all_data = self.db.get_daily_datatable( - usage_point_id=self.usage_point_id, + all_data = DatabaseDaily(self.usage_point_id, "consumption").get_datatable( order_column=col_spec[order_column], order_dir=order_dir, search=search, - measurement_direction="consumption", ) data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) elif measurement_direction == "consumption_detail": - recordsTotal = self.db.get_detail_count( - usage_point_id=self.usage_point_id, measurement_direction="consumption" - ) + records_total = DatabaseDetail(self.usage_point_id, "consumption").get_count() col_spec = { 0: "date", 1: "date", @@ -495,19 +585,15 @@ def datatable(self, measurement_direction, args): 6: "import_clean", 7: "blacklist", } - all_data = self.db.get_detail_datatable( - usage_point_id=self.usage_point_id, + all_data = DatabaseDetail(self.usage_point_id, "consumption").get_datatable( order_column=col_spec[order_column], order_dir=order_dir, search=search, - measurement_direction="consumption", ) data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) elif measurement_direction == "production": - recordsTotal = self.db.get_daily_count( - usage_point_id=self.usage_point_id, measurement_direction="production" - ) + records_total = DatabaseDaily(self.usage_point_id, "production").get_count() col_spec = { 0: "date", 1: "value", @@ -517,18 +603,14 @@ def datatable(self, measurement_direction, args): 5: "import_clean", 6: "blacklist", } - all_data = self.db.get_daily_datatable( - usage_point_id=self.usage_point_id, + all_data = DatabaseDaily(self.usage_point_id, "consumption").get_datatable( order_column=col_spec[order_column], order_dir=order_dir, search=search, - measurement_direction="production", ) data = self.datatable_daily(all_data, start_index, end_index, measurement_direction) elif measurement_direction == "production_detail": - recordsTotal = self.db.get_detail_count( - usage_point_id=self.usage_point_id, measurement_direction="production" - ) + records_total = DatabaseDetail(self.usage_point_id, "production").get_count() col_spec = { 0: "date", 1: "date", @@ -539,16 +621,14 @@ def datatable(self, measurement_direction, args): 6: "import_clean", 7: "blacklist", } - all_data = self.db.get_detail_datatable( - usage_point_id=self.usage_point_id, + all_data = DatabaseDetail(self.usage_point_id, "production").get_datatable( order_column=col_spec[order_column], order_dir=order_dir, search=search, - measurement_direction="production", ) data = self.datatable_detail(all_data, start_index, end_index, measurement_direction) elif measurement_direction == "consumption_max_power": - recordsTotal = self.db.get_daily_max_power_count(usage_point_id=self.usage_point_id) + records_total = DatabaseMaxPower(self.usage_point_id).get_daily_count() col_spec = { 0: "date", 1: "date", @@ -560,8 +640,7 @@ def datatable(self, measurement_direction, args): 7: "import_clean", 8: "blacklist", } - all_data = self.db.get_daily_max_power_datatable( - usage_point_id=self.usage_point_id, + all_data = DatabaseMaxPower(self.usage_point_id).get_daily_datatable( order_column=col_spec[order_column], order_dir=order_dir, search=search, @@ -569,13 +648,22 @@ def datatable(self, measurement_direction, args): data = self.datatable_max_power(all_data, start_index, end_index) result = { "draw": draw + 1, - "recordsTotal": recordsTotal, + "recordsTotal": records_total, "recordsFiltered": len(all_data), "data": data, } return result def datatable_button(self, measurement_direction, db_data): + """Generate HTML code for datatable buttons based on measurement direction and database data. + + Args: + measurement_direction (str): The measurement direction. + db_data (object): The database data. + + Returns: + dict: The generated HTML code for the buttons. + """ date_text = db_data.date.strftime(self.date_format) value = db_data.value blacklist = db_data.blacklist @@ -601,23 +689,45 @@ def datatable_button(self, measurement_direction, db_data): btn_whitelist = "display:none" cache_html = f""" -
-
-
-
+
+ +
+
+ +
""" blacklist_html = f""" -
-
-
-
+
+ +
+
+ +
""" btn = {"cache": cache_html, "blacklist": blacklist_html} return btn - def datatable_daily(self, all_data, start_index, end_index, measurement_direction): + def datatable_daily(self, all_data, start_index, end_index, measurement_direction): # noqa: PLR0912 + """Generate the HTML code for the daily datatable based on the provided data. + + Args: + all_data (list): The list of database data. + start_index (int): The start index of the datatable. + end_index (int): The end index of the datatable. + measurement_direction (str): The measurement direction. + + Returns: + list: The generated HTML code for the daily datatable. + """ index = 0 result = [] for db_data in all_data: @@ -637,7 +747,7 @@ def datatable_daily(self, all_data, start_index, end_index, measurement_directio cache_state = ( f'
0
' ) - tempo = self.db.get_tempo_range( + tempo = DatabaseTempo().get_range( db_data.date.strftime(self.date_format), db_data.date.strftime(self.date_format) ) if tempo and tempo[0]: @@ -695,11 +805,21 @@ def datatable_daily(self, all_data, start_index, end_index, measurement_directio return result def datatable_detail(self, all_data, start_index, end_index, measurement_direction): + """Generate the datatable for the detailed view of the electrical data. + + Args: + all_data (list): List of all data. + start_index (int): Start index of the data. + end_index (int): End index of the data. + measurement_direction (str): Measurement direction. + + Returns: + list: Resulting datatable. + """ index = 0 result = [] for db_data in all_data: if start_index <= index <= end_index: - # print(db_data) date_text = db_data.date.strftime(self.date_format) date_hour = db_data.date.strftime("%H:%M:%S") target = "detail" @@ -731,12 +851,22 @@ def datatable_detail(self, all_data, start_index, end_index, measurement_directi return result def datatable_max_power(self, all_data, start_index, end_index): + """Generate the datatable for the maximum power data. + + Args: + all_data (list): List of all data. + start_index (int): Start index of the data. + end_index (int): End index of the data. + + Returns: + list: Resulting datatable. + """ index = 0 result = [] measurement_direction = "consumption_max_power" event_date = "" target = "daily" - contract = self.db.get_contract(self.usage_point_id) + contract = DatabaseContracts(self.usage_point_id).get() if hasattr(contract, "subscribed_power") and contract.subscribed_power is not None: max_power = int(contract.subscribed_power.split(" ")[0]) * 1000 else: diff --git a/src/models/config.py b/src/models/config.py index 4a155d03..d7da4cac 100755 --- a/src/models/config.py +++ b/src/models/config.py @@ -1,5 +1,4 @@ """Configuration class loader and checker.""" - import logging import re from pathlib import Path @@ -23,8 +22,8 @@ class Config: default (dict): The default configuration settings. """ - def __init__(self, path=APPLICATION_PATH_DATA): - self.path = path + def __init__(self): + self.path = APPLICATION_PATH_DATA self.db = None self.file = "config.yaml" self.path_file = f"{self.path}/{self.file}" @@ -101,6 +100,7 @@ def __init__(self, path=APPLICATION_PATH_DATA): "keyfile": None, }, } + self.load() def set_db(self, db): """Set the database.""" @@ -109,16 +109,11 @@ def set_db(self, db): def load(self): """Load the configuration.""" config_file = f"{self.path_file}" - if Path(config_file).exists(): - with Path(config_file).open(encoding="utf-8") as file: - self.config = yaml.safe_load(file) - - else: + if not Path(config_file).exists(): with Path(config_file).open(mode="a", encoding="utf-8") as file: file.write(yaml.dump(self.default)) - with Path(config_file).open(encoding="utf-8") as file: - self.config = yaml.safe_load(file) - + with Path(config_file).open(encoding="utf-8") as file: + self.config = yaml.safe_load(file) if self.config is None: return { "error": True, @@ -173,7 +168,7 @@ def display(self): Returns: None """ - logging.info("Display configuration :") + logging.debug("Display configuration :") for key, value in self.config.items(): if isinstance(value, dict): logging.info(f" {key}:") diff --git a/src/models/database.py b/src/models/database.py deleted file mode 100644 index d6cf2fa0..00000000 --- a/src/models/database.py +++ /dev/null @@ -1,1871 +0,0 @@ -"""Manage all database operations.""" -import hashlib -import json -import logging -import os -import traceback -from datetime import datetime, timedelta -from os.path import exists - -from sqlalchemy import asc, create_engine, delete, desc, func, inspect, select, update -from sqlalchemy.orm import scoped_session, sessionmaker -from sqlalchemy.pool import NullPool - -from config import MAX_IMPORT_TRY -from db_schema import ( - Addresses, - Config, - ConsumptionDaily, - ConsumptionDailyMaxPower, - ConsumptionDetail, - Contracts, - Ecowatt, - ProductionDaily, - ProductionDetail, - Statistique, - Tempo, - TempoConfig, - UsagePoints, -) -from dependencies import APPLICATION_PATH, APPLICATION_PATH_DATA, get_version, str2bool, title, title_warning - -# available_database = ["sqlite", "postgresql", "mysql+pymysql"] -available_database = ["sqlite", "postgresql"] - - -class Database: - """Represents a database connection and provides methods for database operations.""" - - def __init__(self, config, path=APPLICATION_PATH_DATA): - """Initialize a Database object. - - Args: - config (Config): The configuration object. - path (str, optional): The path to the database. Defaults to APPLICATION_PATH_DATA. - """ - self.config = config - self.path = path - - if not self.config.storage_config() or self.config.storage_config().startswith("sqlite"): - self.db_name = "cache.db" - self.db_path = f"{self.path}/{self.db_name}" - self.uri = f"sqlite:///{self.db_path}?check_same_thread=False" - else: - self.storage_type = self.config.storage_config().split(":")[0] - if self.storage_type in available_database: - self.uri = self.config.storage_config() - else: - logging.critical(f"Database {self.storage_type} not supported (only SQLite & PostgresSQL)") - - os.system(f"cd {APPLICATION_PATH}; DB_URL='{self.uri}' alembic upgrade head ") - - self.engine = create_engine( - self.uri, - echo=False, - query_cache_size=0, - isolation_level="READ UNCOMMITTED", - poolclass=NullPool, - ) - self.session = scoped_session(sessionmaker(self.engine, autocommit=True, autoflush=True)) - self.inspector = inspect(self.engine) - - self.lock_file = f"{self.path}/.lock" - - # MIGRATE v7 to v8 - if os.path.isfile(f"{self.path}/enedisgateway.db"): - title_warning("=> Migration de l'ancienne base de données vers la nouvelle structure.") - self.migratev7tov8() - - def migratev7tov8(self): - """Migrates the database from version 7 to version 8.""" - uri = f"sqlite:///{self.path}/enedisgateway.db" - engine = create_engine(uri, echo=True, query_cache_size=0) - session = scoped_session(sessionmaker(engine, autocommit=True, autoflush=True)) - - for measurement_direction in ["consumption", "production"]: - logging.warning(f'Migration des "{measurement_direction}_daily"') - if measurement_direction == "consumption": - table = ConsumptionDaily - else: - table = ProductionDaily - daily_data = session.execute(f"select * from {measurement_direction}_daily order by date").all() - current_date = "" - year_value = 0 - bulk_insert = [] - for daily in daily_data: - usage_point_id = daily[0] - date = datetime.strptime(daily[1], "%Y-%m-%d") - value = daily[2] - year_value = year_value + value - bulk_insert.append( - table( - usage_point_id=usage_point_id, - date=date, - value=value, - blacklist=0, - fail_count=0, - ) - ) - if current_date != date.strftime("%Y"): - logging.warning(f" - {date.strftime('%Y')} => {round(year_value / 1000, 2)}kW") - current_date = date.strftime("%Y") - year_value = 0 - self.session.add_all(bulk_insert) - - logging.warning(f'Migration des "{measurement_direction}_detail"') - if measurement_direction == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - detail_data = session.execute(f"select * from {measurement_direction}_detail order by date").all() - current_date = "" - day_value = 0 - bulk_insert = [] - for detail in detail_data: - usage_point_id = detail[0] - date = datetime.strptime(detail[1], "%Y-%m-%d %H:%M:%S") - timedelta(minutes=30) - value = detail[2] - interval = detail[3] - measure_type = detail[4] - day_value = day_value + value / (60 / interval) - bulk_insert.append( - table( - usage_point_id=usage_point_id, - date=date, - value=value, - interval=interval, - measure_type=measure_type, - blacklist=0, - fail_count=0, - ) - ) - if current_date != date.strftime("%m"): - logging.warning(f" - {date.strftime('%Y-%m')} => {round(day_value / 1000, 2)}kW") - current_date = date.strftime("%m") - day_value = 0 - self.session.add_all(bulk_insert) - os.replace(f"{self.path}/enedisgateway.db", f"{self.path}/enedisgateway.db.migrate") - - def init_database(self): - """Initialize the database with default values.""" - try: - logging.info("Configure Databases") - query = select(Config).where(Config.key == "day") - day = self.session.scalars(query).one_or_none() - if day: - day.value = datetime.now().strftime("%Y-%m-%d") - else: - self.session.add(Config(key="day", value=datetime.now().strftime("%Y-%m-%d"))) - logging.info(" => day") - query = select(Config).where(Config.key == "call_number") - if not self.session.scalars(query).one_or_none(): - self.session.add(Config(key="call_number", value="0")) - logging.info(" => call_number") - query = select(Config).where(Config.key == "max_call") - if not self.session.scalars(query).one_or_none(): - self.session.add(Config(key="max_call", value="500")) - logging.info(" => max_call") - query = select(Config).where(Config.key == "version") - version = self.session.scalars(query).one_or_none() - if version: - version.value = get_version() - else: - self.session.add(Config(key="version", value=get_version())) - logging.info(" => version") - query = select(Config).where(Config.key == "lock") - if not self.session.scalars(query).one_or_none(): - self.session.add(Config(key="lock", value="0")) - logging.info(" => lock") - query = select(Config).where(Config.key == "lastUpdate") - if not self.session.scalars(query).one_or_none(): - self.session.add(Config(key="lastUpdate", value=str(datetime.now()))) - logging.info(" => lastUpdate") - logging.info(" Success") - except Exception as e: - traceback.print_exc() - logging.error(e) - logging.critical("Database initialize failed!") - - def purge_database(self): - """Purges the SQLite database.""" - logging.separator_warning() - logging.info("Reset SQLite Database") - if os.path.exists(f"{self.path}/cache.db"): - os.remove(f"{self.path}/cache.db") - logging.info(" => Success") - else: - logging.info(" => No cache detected") - - def lock_status(self): - """Check the lock status of the database. - - Returns: - bool: True if the database is locked, False otherwise. - """ - if exists(self.lock_file): - return True - else: - return False - - def lock(self): - """Locks the database. - - Returns: - bool: True if the database is locked, False otherwise. - """ - with open(self.lock_file, "xt") as f: - f.write(str(datetime.now())) - f.close() - return self.lock_status() - - def unlock(self): - """Unlocks the database. - - Returns: - bool: True if the database is unlocked, False otherwise. - """ - if os.path.exists(self.lock_file): - os.remove(self.lock_file) - return self.lock_status() - - def clean_database(self, current_usage_point_id): - """Clean the database by removing unused data. - - Args: - current_usage_point_id (list): List of current usage point IDs. - - Returns: - bool: True if the database is cleaned successfully, False otherwise. - """ - for usage_point in self.get_usage_point_all(): - if usage_point.usage_point_id not in current_usage_point_id: - logging.warning(f"- Suppression du point de livraison {usage_point.usage_point_id}") - self.delete_usage_point(usage_point.usage_point_id) - self.delete_addresse(usage_point.usage_point_id) - self.delete_daily(usage_point.usage_point_id) - self.delete_detail(usage_point.usage_point_id) - self.delete_daily_max_power(usage_point.usage_point_id) - return True - - def refresh_object(self): - """Refreshe the ORM objects.""" - title("Refresh ORM Objects") - self.session.expire_all() - - # ---------------------------------------------------------------------------------------------------------------- - # CONFIG - # ---------------------------------------------------------------------------------------------------------------- - def get_config(self, key): - query = select(Config).where(Config.key == key) - data = self.session.scalars(query).one_or_none() - self.session.close() - return data - - def set_config(self, key, value): - query = select(Config).where(Config.key == key) - config = self.session.scalars(query).one_or_none() - if config: - config.value = json.dumps(value) - else: - self.session.add(Config(key=key, value=json.dumps(value))) - self.session.flush() - self.session.close() - self.refresh_object() - - # ---------------------------------------------------------------------------------------------------------------- - # USAGE POINTS - # ---------------------------------------------------------------------------------------------------------------- - def get_usage_point_all(self): - query = select(UsagePoints) - data = self.session.scalars(query).all() - self.session.close() - return data - - def get_usage_point(self, usage_point_id): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - data = self.session.scalars(query).one_or_none() - self.session.close() - return data - - def get_usage_point_plan(self, usage_point): - data = self.get_usage_point(usage_point) - if data.plan in ["HP/HC"]: - return "HC/HP" - return data.plan - - def set_usage_point(self, usage_point_id, data): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - usage_points = self.session.scalars(query).one_or_none() - - if usage_points is not None: - if "enable" in data and data["enable"] is not None: - usage_points.enable = str2bool(data["enable"]) - if "name" in data and data["name"] is not None: - usage_points.name = data["name"] - if "cache" in data and data["cache"] is not None: - usage_points.cache = str2bool(data["cache"]) - if "consumption" in data and data["consumption"] is not None: - usage_points.consumption = str2bool(data["consumption"]) - if "consumption_detail" in data and data["consumption_detail"] is not None: - usage_points.consumption_detail = str2bool(data["consumption_detail"]) - if "consumption_max_power" in data and data["consumption_max_power"] is not None: - usage_points.consumption_max_power = str2bool(data["consumption_max_power"]) - if "production" in data and data["production"] is not None: - usage_points.production = str2bool(data["production"]) - if "production_detail" in data and data["production_detail"] is not None: - usage_points.production_detail = str2bool(data["production_detail"]) - if "production_price" in data and data["production_price"] is not None: - usage_points.production_price = data["production_price"] - if "consumption_price_base" in data and data["consumption_price_base"] is not None: - usage_points.consumption_price_base = data["consumption_price_base"] - if "consumption_price_hc" in data and data["consumption_price_hc"] is not None: - usage_points.consumption_price_hc = data["consumption_price_hc"] - if "consumption_price_hp" in data and data["consumption_price_hp"] is not None: - usage_points.consumption_price_hp = data["consumption_price_hp"] - if "offpeak_hours_0" in data and data["offpeak_hours_0"] is not None: - usage_points.offpeak_hours_0 = data["offpeak_hours_0"] - if "offpeak_hours_1" in data and data["offpeak_hours_1"] is not None: - usage_points.offpeak_hours_1 = data["offpeak_hours_1"] - if "offpeak_hours_2" in data and data["offpeak_hours_2"] is not None: - usage_points.offpeak_hours_2 = data["offpeak_hours_2"] - if "offpeak_hours_3" in data and data["offpeak_hours_3"] is not None: - usage_points.offpeak_hours_3 = data["offpeak_hours_3"] - if "offpeak_hours_4" in data and data["offpeak_hours_4"] is not None: - usage_points.offpeak_hours_4 = data["offpeak_hours_4"] - if "offpeak_hours_5" in data and data["offpeak_hours_5"] is not None: - usage_points.offpeak_hours_5 = data["offpeak_hours_5"] - if "offpeak_hours_6" in data and data["offpeak_hours_6"] is not None: - usage_points.offpeak_hours_6 = data["offpeak_hours_6"] - if "plan" in data and data["plan"] is not None: - usage_points.plan = data["plan"] - else: - usage_points.plan = "BASE" - if "refresh_addresse" in data and data["refresh_addresse"] is not None: - usage_points.refresh_addresse = str2bool(data["refresh_addresse"]) - if "refresh_contract" in data and data["refresh_contract"] is not None: - usage_points.refresh_contract = str2bool(data["refresh_contract"]) - if "token" in data and data["token"] is not None: - usage_points.token = data["token"] - if "progress" in data and data["progress"] is not None: - usage_points.progress = data["progress"] - if "progress_status" in data and data["progress_status"] is not None: - usage_points.progress_status = data["progress_status"] - if "consumption_max_date" in data: - if data["consumption_max_date"] and data["consumption_max_date"] is not None: - consumption_max_date = data["consumption_max_date"] - if isinstance(consumption_max_date, datetime): - usage_points.consumption_max_date = consumption_max_date - else: - usage_points.consumption_max_date = datetime.strptime(consumption_max_date, "%Y-%m-%d") - if "consumption_detail_max_date" in data: - if data["consumption_detail_max_date"] and data["consumption_detail_max_date"] is not None: - consumption_detail_max_date = data["consumption_detail_max_date"] - if isinstance(consumption_detail_max_date, datetime): - usage_points.consumption_detail_max_date = consumption_detail_max_date - else: - usage_points.consumption_detail_max_date = datetime.strptime( - consumption_detail_max_date, "%Y-%m-%d" - ) - if "production_max_date" in data: - if data["production_max_date"] and data["production_max_date"] is not None: - production_max_date = data["production_max_date"] - if isinstance(production_max_date, datetime): - usage_points.production_max_date = production_max_date - else: - usage_points.production_max_date = datetime.strptime(production_max_date, "%Y-%m-%d") - if "production_detail_max_date" in data: - if data["production_detail_max_date"] and data["production_detail_max_date"] is not None: - production_detail_max_date = data["production_detail_max_date"] - if isinstance(production_detail_max_date, datetime): - usage_points.production_detail_max_date = production_detail_max_date - else: - usage_points.production_detail_max_date = datetime.strptime( - production_detail_max_date, "%Y-%m-%d" - ) - if "call_number" in data and data["call_number"] is not None: - usage_points.call_number = data["call_number"] - if "quota_reached" in data and data["quota_reached"] is not None: - usage_points.quota_reached = str2bool(data["quota_reached"]) - if "quota_limit" in data and data["quota_limit"] is not None: - usage_points.quota_limit = data["quota_limit"] - if "quota_reset_at" in data and data["quota_reset_at"] is not None: - usage_points.quota_reset_at = data["quota_reset_at"] - if "last_call" in data and data["last_call"] is not None: - usage_points.last_call = data["last_call"] - if "ban" in data and data["ban"] is not None: - usage_points.ban = str2bool(data["ban"]) - if "consentement_expiration" in data and data["consentement_expiration"] is not None: - usage_points.consentement_expiration = data["consentement_expiration"] - else: - if "enable" in data and data["enable"] is not None: - enable = data["enable"] - else: - enable = True - if "name" in data and data["name"] is not None: - name = data["name"] - else: - name = "" - if "cache" in data and data["cache"] is not None: - cache = data["cache"] - else: - cache = True - if "consumption" in data and data["consumption"] is not None: - consumption = data["consumption"] - else: - consumption = True - if "consumption_max_power" in data and data["consumption_max_power"] is not None: - consumption_max_power = data["consumption_max_power"] - else: - consumption_max_power = True - if "consumption_detail" in data and data["consumption_detail"] is not None: - consumption_detail = data["consumption_detail"] - else: - consumption_detail = True - if "production" in data and data["production"] is not None: - production = data["production"] - else: - production = False - if "production_detail" in data and data["production_detail"] is not None: - production_detail = data["production_detail"] - else: - production_detail = False - if "production_price" in data and data["production_price"] is not None: - production_price = data["production_price"] - else: - production_price = 0 - if ( - "consumption_price_base" in data - and data["consumption_price_base"] is not None - and data["consumption_price_base"] != "" - ): - consumption_price_base = data["consumption_price_base"] - else: - consumption_price_base = 0 - if ( - "consumption_price_hc" in data - and data["consumption_price_hc"] is not None - and data["consumption_price_hc"] != "" - ): - consumption_price_hc = data["consumption_price_hc"] - else: - consumption_price_hc = 0 - if ( - "consumption_price_hp" in data - and data["consumption_price_hp"] is not None - and data["consumption_price_hp"] != "" - ): - consumption_price_hp = data["consumption_price_hp"] - else: - consumption_price_hp = 0 - if "offpeak_hours_0" in data and data["offpeak_hours_0"] is not None: - offpeak_hours_0 = data["offpeak_hours_0"] - else: - offpeak_hours_0 = "" - if "offpeak_hours_1" in data and data["offpeak_hours_1"] is not None: - offpeak_hours_1 = data["offpeak_hours_1"] - else: - offpeak_hours_1 = "" - if "offpeak_hours_2" in data and data["offpeak_hours_2"] is not None: - offpeak_hours_2 = data["offpeak_hours_2"] - else: - offpeak_hours_2 = "" - if "offpeak_hours_3" in data and data["offpeak_hours_3"] is not None: - offpeak_hours_3 = data["offpeak_hours_3"] - else: - offpeak_hours_3 = "" - if "offpeak_hours_4" in data and data["offpeak_hours_4"] is not None: - offpeak_hours_4 = data["offpeak_hours_4"] - else: - offpeak_hours_4 = "" - if "offpeak_hours_5" in data and data["offpeak_hours_5"] is not None: - offpeak_hours_5 = data["offpeak_hours_5"] - else: - offpeak_hours_5 = "" - if "offpeak_hours_6" in data and data["offpeak_hours_6"] is not None: - offpeak_hours_6 = data["offpeak_hours_6"] - else: - offpeak_hours_6 = "" - if "plan" in data and data["plan"] is not None: - plan = data["plan"] - else: - plan = "BASE" - if "refresh_addresse" in data and data["refresh_addresse"] is not None: - refresh_addresse = data["refresh_addresse"] - else: - refresh_addresse = False - if "refresh_contract" in data and data["refresh_contract"] is not None: - refresh_contract = data["refresh_contract"] - else: - refresh_contract = False - if "token" in data and data["token"] is not None: - token = data["token"] - else: - token = "" - progress = 0 - if "progress" in data and data["progress"] is not None: - progress = data["progress"] - progress_status = "" - if "progress_status" in data and data["progress_status"] is not None: - progress_status = data["progress_status"] - consumption_max_date = None - if "consumption_max_date" in data: - if not data["consumption_max_date"] or data["consumption_max_date"] is None: - consumption_max_date = None - else: - consumption_max_date = data["consumption_max_date"] - if not isinstance(consumption_max_date, datetime): - consumption_max_date = datetime.strptime(consumption_max_date, "%Y-%m-%d") - consumption_detail_max_date = None - if "consumption_detail_max_date" in data: - if "consumption_detail_max_date" in data or data["consumption_detail_max_date"] is None: - if not data["consumption_detail_max_date"] or data["consumption_detail_max_date"] is None: - consumption_detail_max_date = None - else: - consumption_detail_max_date = data["consumption_detail_max_date"] - if not isinstance(consumption_detail_max_date, datetime): - consumption_detail_max_date = datetime.strptime(consumption_detail_max_date, "%Y-%m-%d") - production_max_date = None - if "production_max_date" in data: - if not data["production_max_date"] or data["production_max_date"] is None: - production_max_date = None - else: - production_max_date = data["production_max_date"] - if not isinstance(production_max_date, datetime): - production_max_date = datetime.strptime(production_max_date, "%Y-%m-%d") - production_detail_max_date = None - if "production_detail_max_date" in data: - if not data["production_detail_max_date"] or data["production_detail_max_date"] is None: - production_detail_max_date = None - else: - production_detail_max_date = data["production_detail_max_date"] - if isinstance(production_detail_max_date, datetime): - production_detail_max_date = production_detail_max_date - else: - production_detail_max_date = datetime.strptime(production_detail_max_date, "%Y-%m-%d") - - if "call_number" in data and data["call_number"] is not None: - call_number = data["call_number"] - else: - call_number = 0 - if "quota_reached" in data and data["quota_reached"] is not None: - quota_reached = str2bool(data["quota_reached"]) - else: - quota_reached = False - if "quota_limit" in data and data["quota_limit"] is not None: - quota_limit = data["quota_limit"] - else: - quota_limit = 0 - if "quota_reset_at" in data and data["quota_reset_at"] is not None: - quota_reset_at = data["quota_reset_at"] - else: - quota_reset_at = None - if "last_call" in data and data["last_call"] is not None: - last_call = data["last_call"] - else: - last_call = None - if "ban" in data and data["ban"] is not None: - ban = str2bool(data["ban"]) - else: - ban = False - if "consentement_expiration" in data and data["consentement_expiration"] is not None: - consentement_expiration = data["consentement_expiration"] - else: - consentement_expiration = None - - self.session.add( - UsagePoints( - usage_point_id=usage_point_id, - name=name, - cache=str2bool(cache), - consumption=str2bool(consumption), - consumption_detail=str2bool(consumption_detail), - consumption_max_power=str2bool(consumption_max_power), - production=str2bool(production), - production_detail=str2bool(production_detail), - production_price=production_price, - consumption_price_base=consumption_price_base, - consumption_price_hc=consumption_price_hc, - consumption_price_hp=consumption_price_hp, - offpeak_hours_0=offpeak_hours_0, - offpeak_hours_1=offpeak_hours_1, - offpeak_hours_2=offpeak_hours_2, - offpeak_hours_3=offpeak_hours_3, - offpeak_hours_4=offpeak_hours_4, - offpeak_hours_5=offpeak_hours_5, - offpeak_hours_6=offpeak_hours_6, - plan=plan, - refresh_addresse=str2bool(refresh_addresse), - refresh_contract=str2bool(refresh_contract), - token=token, - progress=progress, - progress_status=progress_status, - enable=str2bool(enable), - consumption_max_date=consumption_max_date, - consumption_detail_max_date=consumption_detail_max_date, - production_max_date=production_max_date, - production_detail_max_date=production_detail_max_date, - call_number=call_number, - quota_reached=str2bool(quota_reached), - quota_limit=quota_limit, - quota_reset_at=quota_reset_at, - last_call=last_call, - ban=str2bool(ban), - consentement_expiration=consentement_expiration, - ) - ) - self.session.flush() - self.session.close() - - def progress(self, usage_point_id, increment): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - usage_points = self.session.scalars(query).one_or_none() - usage_points.progress = usage_points.progress + increment - self.session.close() - - def last_call_update(self, usage_point_id): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - usage_points = self.session.scalars(query).one_or_none() - usage_points.last_call = datetime.now() - self.session.flush() - self.session.close() - - def usage_point_update( - self, - usage_point_id, - consentement_expiration=None, - call_number=None, - quota_reached=None, - quota_limit=None, - quota_reset_at=None, - last_call=None, - ban=None, - ): - query = select(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id) - usage_points = self.session.scalars(query).one_or_none() - if consentement_expiration is not None: - usage_points.consentement_expiration = consentement_expiration - if call_number is not None: - usage_points.call_number = call_number - if quota_reached is not None: - usage_points.quota_reached = quota_reached - if quota_limit is not None: - usage_points.quota_limit = quota_limit - if quota_reset_at is not None: - usage_points.quota_reset_at = quota_reset_at - if last_call is not None: - usage_points.last_call = last_call - if ban is not None: - usage_points.ban = ban - self.session.flush() - self.session.close() - - def delete_usage_point(self, usage_point_id): - self.session.execute(delete(Addresses).where(Addresses.usage_point_id == usage_point_id)) - self.session.execute(delete(Contracts).where(Contracts.usage_point_id == usage_point_id)) - self.session.execute( - delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.usage_point_id == usage_point_id) - ) - self.session.execute(delete(ConsumptionDetail).where(ConsumptionDetail.usage_point_id == usage_point_id)) - self.session.execute(delete(ConsumptionDaily).where(ConsumptionDaily.usage_point_id == usage_point_id)) - self.session.execute(delete(ProductionDetail).where(ProductionDetail.usage_point_id == usage_point_id)) - self.session.execute(delete(ProductionDaily).where(ProductionDaily.usage_point_id == usage_point_id)) - self.session.execute(delete(UsagePoints).where(UsagePoints.usage_point_id == usage_point_id)) - self.session.flush() - self.session.close() - return True - - def get_error_log(self, usage_point_id): - data = self.get_usage_point(usage_point_id) - return data.last_error - - def set_error_log(self, usage_point_id, message): - values = {UsagePoints.last_error: message} - self.session.execute(update(UsagePoints, values=values).where(UsagePoints.usage_point_id == usage_point_id)) - self.session.flush() - return True - - # ---------------------------------------------------------------------------------------------------------------- - # ADDRESSES - # ---------------------------------------------------------------------------------------------------------------- - def get_addresse(self, usage_point_id): - query = ( - select(Addresses).join(UsagePoints.relation_addressess).where(UsagePoints.usage_point_id == usage_point_id) - ) - data = self.session.scalars(query).one_or_none() - self.session.close() - return data - - def set_addresse(self, usage_point_id, data, count=0): - query = ( - select(Addresses).join(UsagePoints.relation_addressess).where(Addresses.usage_point_id == usage_point_id) - ) - addresses = self.session.scalars(query).one_or_none() - if addresses is not None: - addresses.street = data["street"] - addresses.locality = data["locality"] - addresses.postal_code = data["postal_code"] - addresses.insee_code = data["insee_code"] - addresses.city = data["city"] - addresses.country = data["country"] - addresses.geo_points = data["geo_points"] - addresses.count = count - else: - self.session.add( - Addresses( - usage_point_id=usage_point_id, - street=data["street"], - locality=data["locality"], - postal_code=data["postal_code"], - insee_code=data["insee_code"], - city=data["city"], - country=data["country"], - geo_points=data["geo_points"], - count=count, - ) - ) - self.session.flush() - self.session.close() - - def delete_addresse(self, usage_point_id): - self.session.execute(delete(Addresses).where(Addresses.usage_point_id == usage_point_id)) - self.session.flush() - self.session.close() - return True - - # ---------------------------------------------------------------------------------------------------------------- - # CONTRACTS - # ---------------------------------------------------------------------------------------------------------------- - def get_contract(self, usage_point_id): - query = ( - select(Contracts).join(UsagePoints.relation_contract).where(UsagePoints.usage_point_id == usage_point_id) - ) - data = self.session.scalars(query).one_or_none() - self.session.close() - return data - - def set_contract( - self, - usage_point_id, - data, - count=0, - ): - query = ( - select(Contracts).join(UsagePoints.relation_contract).where(UsagePoints.usage_point_id == usage_point_id) - ) - contract = self.session.scalars(query).one_or_none() - if contract is not None: - contract.usage_point_status = data["usage_point_status"] - contract.meter_type = data["meter_type"] - contract.segment = data["segment"] - contract.subscribed_power = data["subscribed_power"] - contract.last_activation_date = data["last_activation_date"] - contract.distribution_tariff = data["distribution_tariff"] - contract.offpeak_hours_0 = data["offpeak_hours_0"] - contract.offpeak_hours_1 = data["offpeak_hours_1"] - contract.offpeak_hours_2 = data["offpeak_hours_2"] - contract.offpeak_hours_3 = data["offpeak_hours_3"] - contract.offpeak_hours_4 = data["offpeak_hours_4"] - contract.offpeak_hours_5 = data["offpeak_hours_5"] - contract.offpeak_hours_6 = data["offpeak_hours_6"] - contract.contract_status = data["contract_status"] - contract.last_distribution_tariff_change_date = data["last_distribution_tariff_change_date"] - contract.count = count - else: - self.session.add( - Contracts( - usage_point_id=usage_point_id, - usage_point_status=data["usage_point_status"], - meter_type=data["meter_type"], - segment=data["segment"], - subscribed_power=data["subscribed_power"], - last_activation_date=data["last_activation_date"], - distribution_tariff=data["distribution_tariff"], - offpeak_hours_0=data["offpeak_hours_0"], - offpeak_hours_1=data["offpeak_hours_1"], - offpeak_hours_2=data["offpeak_hours_2"], - offpeak_hours_3=data["offpeak_hours_3"], - offpeak_hours_4=data["offpeak_hours_4"], - offpeak_hours_5=data["offpeak_hours_5"], - offpeak_hours_6=data["offpeak_hours_6"], - contract_status=data["contract_status"], - last_distribution_tariff_change_date=data["last_distribution_tariff_change_date"], - count=count, - ) - ) - self.session.flush() - self.session.close() - - # ---------------------------------------------------------------------------------------------------------------- - # DAILY - # ---------------------------------------------------------------------------------------------------------------- - def get_daily_all(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - data = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .order_by(table.date.desc()) - ).all() - self.session.close() - return data - - def get_daily_datatable( - self, - usage_point_id, - order_column="date", - order_dir="asc", - search=None, - measurement_direction="consumption", - ): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - - sort = asc(order_column) if order_dir == "desc" else desc(order_column) - - yesterday = datetime.combine(datetime.now() - timedelta(days=1), datetime.max.time()) - if search is not None and search != "": - result = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .where((table.date.like(f"%{search}%")) | (table.value.like(f"%{search}%"))) - .where(table.date <= yesterday) - .order_by(sort) - ) - else: - result = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .where(table.date <= yesterday) - .order_by(sort) - ) - return result.all() - - def get_daily_count(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - data = self.session.scalars( - select([func.count()]) - .select_from(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - ).one_or_none() - self.session.close() - return data - - def get_daily_date(self, usage_point_id, date, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - data = self.session.scalars(select(table).join(relation).where(table.id == unique_id)).first() - self.session.flush() - self.session.close() - return data - - def get_daily_state(self, usage_point_id, date, measurement_direction="consumption"): - if self.get_daily_date(usage_point_id, date, measurement_direction) is not None: - return True - else: - return False - - def get_daily_last_date(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - current_data = self.session.scalars( - select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(table.date) - ).first() - self.session.flush() - self.session.close() - if current_data is None: - return False - else: - return current_data.date - - def get_daily_last(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - current_data = self.session.scalars( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.value != 0) - .order_by(table.date.desc()) - ).first() - self.session.flush() - self.session.close() - if current_data is None: - return False - else: - return current_data - - def get_daily_first_date(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(table.date.desc()) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).first() - if current_data is None: - return False - else: - return current_data.date - - def get_daily_fail_count(self, usage_point_id, date, measurement_direction="consumption"): - result = self.get_daily_date(usage_point_id, date, measurement_direction) - if hasattr(result, "fail_count"): - return result.fail_count - else: - return 0 - - def daily_fail_increment(self, usage_point_id, date, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = select(table).join(relation).where(table.id == unique_id) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - daily = self.session.scalars(query).one_or_none() - if daily is not None: - fail_count = int(daily.fail_count) + 1 - if fail_count >= MAX_IMPORT_TRY: - blacklist = 1 - fail_count = 0 - else: - blacklist = 0 - daily.id = unique_id - daily.usage_point_id = usage_point_id - daily.date = date - daily.value = 0 - daily.blacklist = blacklist - daily.fail_count = fail_count - else: - fail_count = 0 - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=0, - blacklist=0, - fail_count=0, - ) - ) - self.session.flush() - return fail_count - - def get_daily_range(self, usage_point_id, begin, end, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = ( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date >= begin) - .where(table.date <= end) - .order_by(table.date.desc()) - ) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).all() - if current_data is None: - return False - else: - return current_data - - def get_daily(self, usage_point_id, begin, end, measurement_direction="consumption"): - delta = end - begin - result = {"missing_data": False, "date": {}, "count": 0} - for i in range(delta.days + 1): - checkDate = begin + timedelta(days=i) - checkDate = datetime.combine(checkDate, datetime.min.time()) - query_result = self.get_daily_date(usage_point_id, checkDate, measurement_direction) - checkDate = checkDate.strftime("%Y-%m-%d") - if query_result is None: - # NEVER QUERY - result["date"][checkDate] = { - "status": False, - "blacklist": 0, - "value": 0, - } - result["missing_data"] = True - else: - consumption = query_result.value - blacklist = query_result.blacklist - if consumption == 0: - # ENEDIS RETURN NO DATA - result["date"][checkDate] = { - "status": False, - "blacklist": blacklist, - "value": consumption, - } - result["missing_data"] = True - else: - # SUCCESS or BLACKLIST - result["date"][checkDate] = { - "status": True, - "blacklist": blacklist, - "value": consumption, - } - return result - - def insert_daily( - self, - usage_point_id, - date, - value, - blacklist=0, - fail_count=0, - measurement_direction="consumption", - ): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = select(table).join(relation).where(table.id == unique_id) - daily = self.session.scalars(query).one_or_none() - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - if daily is not None: - daily.id = unique_id - daily.usage_point_id = usage_point_id - daily.date = date - daily.value = value - daily.blacklist = blacklist - daily.fail_count = fail_count - else: - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=value, - blacklist=blacklist, - fail_count=fail_count, - ) - ) - self.session.flush() - - def reset_daily(self, usage_point_id, date=None, mesure_type="consumption"): - data = self.get_daily_date(usage_point_id, date, mesure_type) - if mesure_type == "consumption": - table = ConsumptionDaily - else: - table = ProductionDaily - if data is not None: - values = { - table.value: 0, - table.blacklist: 0, - table.fail_count: 0, - } - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(update(table, values=values).where(table.id == unique_id)) - self.session.flush() - return True - else: - return False - - def delete_daily(self, usage_point_id, date=None, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDaily - else: - table = ProductionDaily - if date is not None: - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(delete(table).where(table.id == unique_id)) - else: - self.session.execute(delete(table).where(table.usage_point_id == usage_point_id)) - self.session.flush() - return True - - def blacklist_daily(self, usage_point_id, date, action=True, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDaily - relation = UsagePoints.relation_consumption_daily - else: - table = ProductionDaily - relation = UsagePoints.relation_production_daily - query = select(table).join(relation).where(table.id == unique_id) - daily = self.session.scalars(query).one_or_none() - if daily is not None: - daily.blacklist = action - else: - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=0, - blacklist=action, - fail_count=0, - ) - ) - self.session.flush() - return True - - def get_daily_date_range(self, usage_point_id): - return { - "begin": self.get_daily_last_date(usage_point_id), - "end": self.get_daily_first_date(usage_point_id), - } - - # ----------------------------------------------------------------------------------------------------------------- - # DETAIL CONSUMPTION - # ----------------------------------------------------------------------------------------------------------------- - def get_detail_all( - self, - usage_point_id, - begin=None, - end=None, - measurement_direction="consumption", - order_dir="desc", - ): - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - sort = asc("date") if order_dir == "desc" else desc("date") - if begin is None and end is None: - return self.session.scalars( - select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(sort) - ).all() - elif begin is not None and end is None: - return self.session.scalars( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .filter(table.date >= begin) - .order_by(sort) - ).all() - elif end is not None and begin is None: - return self.session.scalars( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .filter(table.date <= end) - .order_by(sort) - ).all() - else: - return self.session.scalars( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .filter(table.date <= end) - .filter(table.date >= begin) - .order_by(sort) - ).all() - - def get_detail_datatable( - self, - usage_point_id, - order_column="date", - order_dir="asc", - search=None, - measurement_direction="consumption", - ): - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - yesterday = datetime.combine(datetime.now() - timedelta(days=1), datetime.max.time()) - sort = asc(order_column) if order_dir == "desc" else desc(order_column) - if search is not None and search != "": - result = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .where((table.date.like(f"%{search}%")) | (table.value.like(f"%{search}%"))) - .where(table.date <= yesterday) - .order_by(sort) - ) - else: - result = self.session.scalars( - select(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - .where(table.date <= yesterday) - .order_by(sort) - ) - return result.all() - - def get_detail_count(self, usage_point_id, measurement_direction="consumption"): - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - return self.session.scalars( - select([func.count()]) - .select_from(table) - .join(relation) - .where(UsagePoints.usage_point_id == usage_point_id) - ).one_or_none() - - def get_detail_date(self, usage_point_id, date, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - return self.session.scalars(select(table).join(relation).where(table.id == unique_id)).first() - - def get_detail_range( - self, - usage_point_id, - begin, - end, - measurement_direction="consumption", - order="desc", - ): - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - if order == "desc": - order = table.date.desc() - else: - order = table.date.asc() - query = ( - select(table) - .join(relation) - .where(table.usage_point_id == usage_point_id) - .where(table.date >= begin) - .where(table.date <= end) - .order_by(order) - ) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).all() - if current_data is None: - return False - else: - return current_data - - def get_detail(self, usage_point_id, begin, end, measurement_direction="consumption"): - # begin = datetime.combine(begin, datetime.min.time()) - # end = datetime.combine(end, datetime.max.time()) - - delta = begin - begin - - result = {"missing_data": False, "date": {}, "count": 0} - - for i in range(delta.days + 1): - query_result = self.get_detail_all( - usage_point_id=usage_point_id, - begin=begin, - end=end, - measurement_direction=measurement_direction, - ) - time_delta = abs(int((begin - end).total_seconds() / 60)) - total_internal = 0 - for query in query_result: - total_internal = total_internal + query.interval - total_time = abs(total_internal - time_delta) - if total_time > 300: - logging.info(f" - {total_time}m absente du relevé.") - result["missing_data"] = True - else: - for query in query_result: - result["date"][query.date] = { - "value": query.value, - "interval": query.interval, - "measure_type": query.measure_type, - "blacklist": query.blacklist, - } - return result - - def get_detail_state(self, usage_point_id, date, measurement_direction="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if measurement_direction == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - current_data = self.session.scalars(select(table).join(relation).where(table.id == unique_id)).one_or_none() - if current_data is None: - return False - else: - return True - - # def insert_detail_bulk(self, data, mesure_type="consumption"): - # if mesure_type == "consumption": - # table = ConsumptionDetail - # else: - # table = ProductionDetail - # begin = "" - # end = "" - # for scalar in data: - # if begin == "": - # begin = scalar.date - # end = scalar.date - # self.session.execute( - # table.__table__.delete().filter(ConsumptionDetail.date.between(begin, end)) - # ) - # self.session.add_all(data) - - def insert_detail( - self, - usage_point_id, - date, - value, - interval, - measure_type, - blacklist=0, - fail_count=0, - mesure_type="consumption", - ): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if mesure_type == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - detail = self.get_detail_date(usage_point_id, date, mesure_type) - if detail is not None: - detail.id = unique_id - detail.usage_point_id = usage_point_id - detail.date = date - detail.value = value - detail.interval = interval - detail.measure_type = measure_type - detail.blacklist = blacklist - detail.fail_count = fail_count - else: - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=value, - interval=interval, - measure_type=measure_type, - blacklist=blacklist, - fail_count=fail_count, - ) - ) - self.session.flush() - - def reset_detail(self, usage_point_id, date=None, mesure_type="consumption"): - detail = self.get_detail_date(usage_point_id, date, mesure_type) - if detail is not None: - detail.value = 0 - detail.interval = 0 - detail.blacklist = 0 - detail.fail_count = 0 - self.session.flush() - return True - else: - return False - - def reset_detail_range(self, usage_point_id, begin, end, mesure_type="consumption"): - detail = self.get_detail_range(usage_point_id, begin, end, mesure_type) - if detail is not None: - for row in detail: - row.value = 0 - row.interval = 0 - row.blacklist = 0 - row.fail_count = 0 - self.session.flush() - return True - else: - return False - - def delete_detail(self, usage_point_id, date=None, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - if date is not None: - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(delete(table).where(table.id == unique_id)) - else: - self.session.execute(delete(table).where(table.usage_point_id == usage_point_id)) - self.session.flush() - return True - - def delete_detail_range(self, usage_point_id, date, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - else: - table = ProductionDetail - if date is not None: - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(delete(table).where(table.id == unique_id)) - else: - self.session.execute(delete(table).where(table.usage_point_id == usage_point_id)) - self.session.flush() - return True - - def get_ratio_hc_hp(self, usage_point_id, begin, end, mesure_type="consumption"): - result = { - "HC": 0, - "HP": 0, - } - detail_data = self.get_detail_all( - usage_point_id=usage_point_id, - begin=begin, - end=end, - measurement_direction=mesure_type, - ) - for data in detail_data: - result[data.measure_type] = result[data.measure_type] + data.value - return result - - def get_detail_fail_count(self, usage_point_id, date, mesure_type="consumption"): - return self.get_detail_date(usage_point_id, date, mesure_type).fail_count - - def detail_fail_increment(self, usage_point_id, date, mesure_type="consumption"): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - if mesure_type == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - query = select(table).join(relation).where(table.id == unique_id) - detail = self.session.scalars(query).one_or_none() - if detail is not None: - fail_count = int(detail.fail_count) + 1 - if fail_count >= MAX_IMPORT_TRY: - blacklist = 1 - fail_count = 0 - else: - blacklist = 0 - detail.usage_point_id = usage_point_id - detail.date = date - detail.value = 0 - detail.interval = 0 - detail.measure_type = "HP" - detail.blacklist = blacklist - detail.fail_count = fail_count - else: - fail_count = 0 - self.session.add( - table( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=0, - interval=0, - measure_type="HP", - blacklist=0, - fail_count=0, - ) - ) - self.session.flush() - return fail_count - - def get_detail_last_date(self, usage_point_id, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - current_data = self.session.scalars( - select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(table.date) - ).first() - if current_data is None: - return False - else: - return current_data.date - - def get_detail_first_date(self, usage_point_id, mesure_type="consumption"): - if mesure_type == "consumption": - table = ConsumptionDetail - relation = UsagePoints.relation_consumption_detail - else: - table = ProductionDetail - relation = UsagePoints.relation_production_detail - query = select(table).join(relation).where(table.usage_point_id == usage_point_id).order_by(table.date.desc()) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).first() - if current_data is None: - return False - else: - return current_data.date - - def get_detail_date_range(self, usage_point_id): - return { - "begin": self.get_detail_last_date(usage_point_id), - "end": self.get_detail_first_date(usage_point_id), - } - - # ----------------------------------------------------------------------------------------------------------------- - # DAILY POWER - # ----------------------------------------------------------------------------------------------------------------- - def get_daily_max_power_all(self, usage_point_id, order="desc"): - if order == "desc": - order = ConsumptionDailyMaxPower.date.desc() - else: - order = ConsumptionDailyMaxPower.date.asc() - return self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(UsagePoints.usage_point_id == usage_point_id) - .order_by(order) - ).all() - - def get_daily_max_power_range(self, usage_point_id, begin, end): - query = ( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(ConsumptionDailyMaxPower.usage_point_id == usage_point_id) - .where(ConsumptionDailyMaxPower.date >= begin) - .where(ConsumptionDailyMaxPower.date <= end) - .order_by(ConsumptionDailyMaxPower.date.desc()) - ) - logging.debug(query.compile(compile_kwargs={"literal_binds": True})) - current_data = self.session.scalars(query).all() - if current_data is None: - return False - else: - return current_data - - def get_daily_power(self, usage_point_id, begin, end): - delta = end - begin - result = {"missing_data": False, "date": {}, "count": 0} - for i in range(delta.days + 1): - checkDate = begin + timedelta(days=i) - checkDate = datetime.combine(checkDate, datetime.min.time()) - query_result = self.get_daily_max_power_date(usage_point_id, checkDate) - checkDate = checkDate.strftime("%Y-%m-%d") - if query_result is None: - # NEVER QUERY - result["date"][checkDate] = { - "status": False, - "blacklist": 0, - "value": 0, - } - result["missing_data"] = True - else: - consumption = query_result.value - blacklist = query_result.blacklist - if consumption == 0: - # ENEDIS RETURN NO DATA - result["date"][checkDate] = { - "status": False, - "blacklist": blacklist, - "value": consumption, - } - result["missing_data"] = True - else: - # SUCCESS or BLACKLIST - result["date"][checkDate] = { - "status": True, - "blacklist": blacklist, - "value": consumption, - } - return result - - def get_daily_max_power_last_date(self, usage_point_id): - current_data = self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(ConsumptionDailyMaxPower.usage_point_id == usage_point_id) - .order_by(ConsumptionDailyMaxPower.date) - ).first() - if current_data is None: - return False - else: - return current_data.date - - def get_daily_max_power_date(self, usage_point_id, date): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - return self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(ConsumptionDailyMaxPower.id == unique_id) - ).one_or_none() - - def insert_daily_max_power(self, usage_point_id, date, event_date, value, blacklist=0, fail_count=0): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - daily = self.get_daily_max_power_date(usage_point_id, date) - if daily is not None: - daily.id = unique_id - daily.usage_point_id = usage_point_id - daily.date = date - daily.event_date = event_date - daily.value = value - daily.blacklist = blacklist - daily.fail_count = fail_count - else: - self.session.add( - ConsumptionDailyMaxPower( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - event_date=event_date, - value=value, - blacklist=blacklist, - fail_count=fail_count, - ) - ) - self.session.flush() - - def get_daily_max_power_count(self, usage_point_id): - return self.session.scalars( - select([func.count()]) - .select_from(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(UsagePoints.usage_point_id == usage_point_id) - ).one_or_none() - - def get_daily_max_power_datatable(self, usage_point_id, order_column="date", order_dir="asc", search=None): - yesterday = datetime.combine(datetime.now() - timedelta(days=1), datetime.max.time()) - sort = asc(order_column) if order_dir == "desc" else desc(order_column) - if search is not None and search != "": - result = self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(UsagePoints.usage_point_id == usage_point_id) - .where( - (ConsumptionDailyMaxPower.date.like(f"%{search}%")) - | (ConsumptionDailyMaxPower.value.like(f"%{search}%")) - ) - .where(ConsumptionDailyMaxPower.date <= yesterday) - .order_by(sort) - ) - else: - result = self.session.scalars( - select(ConsumptionDailyMaxPower) - .join(UsagePoints.relation_consumption_daily_max_power) - .where(UsagePoints.usage_point_id == usage_point_id) - .where(ConsumptionDailyMaxPower.date <= yesterday) - .order_by(sort) - ) - return result.all() - - def daily_max_power_fail_increment(self, usage_point_id, date): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - daily = self.get_daily_max_power_date(usage_point_id, date) - if daily is not None: - fail_count = int(daily.fail_count) + 1 - if fail_count >= MAX_IMPORT_TRY: - blacklist = 1 - fail_count = 0 - else: - blacklist = 0 - daily.id = unique_id - daily.usage_point_id = usage_point_id - daily.date = date - daily.event_date = None - daily.value = 0 - daily.blacklist = blacklist - daily.fail_count = fail_count - else: - fail_count = 0 - self.session.add( - ConsumptionDailyMaxPower( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - event_date=None, - value=0, - blacklist=0, - fail_count=0, - ) - ) - self.session.flush() - return fail_count - - def reset_daily_max_power(self, usage_point_id, date=None): - daily = self.get_daily_max_power_date(usage_point_id, date) - if daily is not None: - daily.event_date = None - daily.value = 0 - daily.blacklist = 0 - daily.fail_count = 0 - self.session.flush() - return True - else: - return False - - def delete_daily_max_power(self, usage_point_id, date=None): - if date is not None: - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - self.session.execute(delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.id == unique_id)) - else: - self.session.execute( - delete(ConsumptionDailyMaxPower).where(ConsumptionDailyMaxPower.usage_point_id == usage_point_id) - ) - self.session.flush() - return True - - def blacklist_daily_max_power(self, usage_point_id, date, action=True): - unique_id = hashlib.md5(f"{usage_point_id}/{date}".encode("utf-8")).hexdigest() - daily = self.get_daily_max_power_date(usage_point_id, date) - if daily is not None: - daily.blacklist = action - else: - self.session.add( - ConsumptionDailyMaxPower( - id=unique_id, - usage_point_id=usage_point_id, - date=date, - value=0, - blacklist=action, - fail_count=0, - ) - ) - self.session.flush() - return True - - def get_daily_max_power_fail_count(self, usage_point_id, date): - result = self.get_daily_max_power_date(usage_point_id, date) - if hasattr(result, "fail_count"): - return result.fail_count - else: - return 0 - - # ----------------------------------------------------------------------------------------------------------------- - # TEMPO - # ----------------------------------------------------------------------------------------------------------------- - def get_tempo(self, order="desc"): - if order == "desc": - order = Tempo.date.desc() - else: - order = Tempo.date.asc() - return self.session.scalars(select(Tempo).order_by(order)).all() - - def get_tempo_range(self, begin, end, order="desc"): - if order == "desc": - order = Tempo.date.desc() - else: - order = Tempo.date.asc() - return self.session.scalars( - select(Tempo).where(Tempo.date >= begin).where(Tempo.date <= end).order_by(order) - ).all() - - def set_tempo(self, date, color): - date = datetime.combine(date, datetime.min.time()) - tempo = self.get_tempo_range(date, date) - if tempo: - for item in tempo: - item.color = color - else: - self.session.add(Tempo(date=date, color=color)) - self.session.flush() - return True - - # ----------------------------------------------------------------------------------------------------------------- - # TEMPO CONFIG - # ----------------------------------------------------------------------------------------------------------------- - def get_tempo_config(self, key): - query = select(TempoConfig).where(TempoConfig.key == key) - data = self.session.scalars(query).one_or_none() - if data is not None: - data = json.loads(data.value) - self.session.close() - return data - - def set_tempo_config(self, key, value): - query = select(TempoConfig).where(TempoConfig.key == key) - config = self.session.scalars(query).one_or_none() - if config: - config.value = json.dumps(value) - else: - self.session.add(TempoConfig(key=key, value=json.dumps(value))) - self.session.flush() - self.session.close() - - # ----------------------------------------------------------------------------------------------------------------- - # ECOWATT - # ----------------------------------------------------------------------------------------------------------------- - def get_ecowatt(self, order="desc"): - if order == "desc": - order = Ecowatt.date.desc() - else: - order = Ecowatt.date.asc() - return self.session.scalars(select(Ecowatt).order_by(order)).all() - - def get_ecowatt_range(self, begin, end, order="desc"): - if order == "desc": - order = Ecowatt.date.desc() - else: - order = Ecowatt.date.asc() - return self.session.scalars( - select(Ecowatt).where(Ecowatt.date >= begin).where(Ecowatt.date <= end).order_by(order) - ).all() - - def set_ecowatt(self, date, value, message, detail): - date = datetime.combine(date, datetime.min.time()) - ecowatt = self.get_ecowatt_range(date, date) - if ecowatt: - for item in ecowatt: - item.value = value - item.message = message - item.detail = detail - else: - self.session.add(Ecowatt(date=date, value=value, message=message, detail=detail)) - self.session.flush() - return True - - # ---------------------------------------------------------------------------------------------------------------- - # STATISTIQUES - # ---------------------------------------------------------------------------------------------------------------- - def get_stat(self, usage_point_id, key): - return self.session.scalars( - select(Statistique) - .join(UsagePoints.relation_stats) - .where(Statistique.usage_point_id == usage_point_id) - .where(Statistique.key == key) - ).all() - - def set_stat(self, usage_point_id, key, value): - current_value = self.get_stat(usage_point_id, key) - if current_value: - for item in current_value: - item.value = value - else: - self.session.add(Statistique(usage_point_id=usage_point_id, key=key, value=value)) - self.session.flush() - return True - - def del_stat(self, usage_point_id): - self.session.execute(delete(Statistique).where(Statistique.usage_point_id == usage_point_id)) diff --git a/src/models/export_home_assistant.py b/src/models/export_home_assistant.py index bc5634c1..cbdee828 100644 --- a/src/models/export_home_assistant.py +++ b/src/models/export_home_assistant.py @@ -4,15 +4,19 @@ import logging from datetime import datetime, timedelta -import pytz from dateutil.relativedelta import relativedelta +from config import TIMEZONE_UTC +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints from dependencies import get_version, truncate -from init import CONFIG, DB, MQTT +from init import CONFIG, MQTT from models.stat import Stat -UTC = pytz.UTC - def convert_kw(value): """Convert a value from kilowatts to watts. @@ -109,13 +113,11 @@ def __init__(self, usage_point_id): self.usage_point_id = usage_point_id self.date_format = "%Y-%m-%d" self.date_format_detail = "%Y-%m-%d %H:%M:%S" - self.config_usage_point = DB.get_usage_point(self.usage_point_id) self.config = None self.load_config() - self.usage_point = DB.get_usage_point(self.usage_point_id) + self.usage_point = DatabaseUsagePoints(self.usage_point_id).get() self.mqtt = MQTT self.tempo_color = None - print(self.config.__dict__) def load_config(self): """Load the configuration for Home Assistant. @@ -132,7 +134,7 @@ def load_config(self): if key in config_ha_config: setattr(self.config, key, config_ha_config[key]) - contract = DB.get_contract(self.usage_point_id) + contract = DatabaseContracts(self.usage_point_id).get() for key in self.config.__dict__: if hasattr(contract, key): setattr(self.config, key, getattr(contract, key)) @@ -198,8 +200,8 @@ def sensor(self, **kwargs): **{ "version": get_version(), "activationDate": self.config.activation_date, - "lastUpdate": datetime.now(tz=UTC).strftime(self.date_format_detail), - "timeLastCall": datetime.now(tz=UTC).strftime(self.date_format_detail), + "lastUpdate": datetime.now(tz=TIMEZONE_UTC).strftime(self.date_format_detail), + "timeLastCall": datetime.now(tz=TIMEZONE_UTC).strftime(self.date_format_detail), }, } @@ -218,11 +220,11 @@ def last_x_day(self, days, measurement_direction): measurement_direction (str): The direction of the measurement (e.g., consumption or production). """ uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_last{days}day" - end = datetime.combine(datetime.now(tz=UTC) - timedelta(days=1), datetime.max.time()) + end = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - timedelta(days=1), datetime.max.time()) begin = datetime.combine(end - timedelta(days), datetime.min.time()) - range = DB.get_detail_range(self.usage_point_id, begin, end, measurement_direction) + range_detail = DatabaseDetail(self.usage_point_id, measurement_direction).get_range(begin, end) attributes = {"time": [], measurement_direction: []} - for data in range: + for data in range_detail: attributes["time"].append(data.date.strftime("%Y-%m-%d %H:%M:%S")) attributes[measurement_direction].append(data.value) self.sensor( @@ -247,7 +249,7 @@ def history_usage_point_id(self, measurement_direction): """ uniq_id = f"myelectricaldata_linky_{self.usage_point_id}_{measurement_direction}_history" stats = Stat(self.usage_point_id, measurement_direction) - state = DB.get_daily_last(self.usage_point_id, measurement_direction) + state = DatabaseDaily(self.usage_point_id, measurement_direction).get_last() if state: state = state.value else: @@ -279,7 +281,7 @@ def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR09 monthly, and yearly values. """ stats = Stat(self.usage_point_id, measurement_direction) - state = DB.get_daily_last(self.usage_point_id, measurement_direction) + state = DatabaseDaily(self.usage_point_id, measurement_direction).get_last() if state: state = state.value else: @@ -316,7 +318,7 @@ def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR09 offpeak_hours.append(_offpeak_hours) idx = idx + 1 - yesterday = datetime.combine(datetime.now(tz=UTC) - relativedelta(days=1), datetime.max.time()) + yesterday = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - relativedelta(days=1), datetime.max.time()) previous_week = datetime.combine(yesterday - relativedelta(days=7), datetime.min.time()) yesterday_last_year = yesterday - relativedelta(years=1) @@ -403,8 +405,7 @@ def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR09 yesterday_evolution = stats.yesterday_evolution() monthly_evolution = stats.monthly_evolution() yearly_evolution = stats.yearly_evolution() - yesterday_last_year = DB.get_daily_date( - self.usage_point_id, + yesterday_last_year = DatabaseDaily(self.usage_point_id).get_date( datetime.combine(yesterday_last_year, datetime.min.time()), ) dailyweek_cost = [] @@ -415,7 +416,7 @@ def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR09 yesterday_hp_value_cost = 0 if measurement_direction == "consumption": daily_cost = 0 - plan = DB.get_usage_point_plan(self.usage_point_id) + plan = DatabaseUsagePoints(self.usage_point_id).get_plan() if plan == "HC/HP": for i in range(7): hp = stats.detail(i, "HP")["value"] @@ -433,7 +434,7 @@ def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR09 yesterday_hp_value_cost = convert_kw_to_euro(hp, self.config.consumption_price_hp) dailyweek_cost.append(round(value, 1)) elif plan == "TEMPO": - tempo_config = DB.get_tempo_config("price") + tempo_config = DatabaseTempo().get_config("price") for i in range(7): tempo_data = stats.tempo(i)["value"] hp = tempo_data["blue_hp"] + tempo_data["white_hp"] + tempo_data["red_hp"] @@ -503,7 +504,7 @@ def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR09 if self.config.consumption_max_power: yesterday_consumption_max_power = stats.max_power(0)["value"] - error_last_call = DB.get_error_log(self.usage_point_id) + error_last_call = DatabaseUsagePoints(self.usage_point_id).get_error_log() if error_last_call is None: error_last_call = "" @@ -511,7 +512,9 @@ def myelectricaldata_usage_point_id(self, measurement_direction): # noqa: PLR09 "yesterdayDate": stats.daily(0)["begin"], "yesterday": convert_kw(stats.daily(0)["value"]), "serviceEnedis": "myElectricalData", - "yesterdayLastYearDate": (datetime.now(tz=UTC) - relativedelta(years=1)).strftime(self.date_format), + "yesterdayLastYearDate": (datetime.now(tz=TIMEZONE_UTC) - relativedelta(years=1)).strftime( + self.date_format + ), "yesterdayLastYear": convert_kw(yesterday_last_year.value) if hasattr(yesterday_last_year, "value") else 0, "daily": [ convert_kw(stats.daily(0)["value"]), @@ -649,9 +652,9 @@ def tempo(self): """ uniq_id = "myelectricaldata_tempo_today" - begin = datetime.combine(datetime.now(tz=UTC), datetime.min.time()) - end = datetime.combine(datetime.now(tz=UTC), datetime.max.time()) - tempo_data = DB.get_tempo_range(begin, end, "asc") + begin = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) + end = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.max.time()) + tempo_data = DatabaseTempo().get_range(begin, end, "asc") if tempo_data: date = tempo_data[0].date.strftime(self.date_format_detail) state = tempo_data[0].color @@ -674,7 +677,6 @@ def tempo(self): uniq_id = "myelectricaldata_tempo_tomorrow" begin = begin + timedelta(days=1) end = end + timedelta(days=1) - tempo_data = DB.get_tempo_range(begin, end, "asc") if tempo_data: date = tempo_data[0].date.strftime(self.date_format_detail) state = tempo_data[0].color @@ -702,7 +704,7 @@ def tempo_days(self): Returns: None """ - tempo_days = DB.get_tempo_config("days") + tempo_days = DatabaseTempo().get_config("days") for color, days in tempo_days.items(): self.tempo_days_sensor(f"{color}", days) @@ -738,9 +740,9 @@ def tempo_info(self): None """ uniq_id = "myelectricaldata_tempo_info" - tempo_days = DB.get_tempo_config("days") - tempo_price = DB.get_tempo_config("price") - if 22 > int(datetime.now(tz=UTC).strftime("%H")) < 6: + tempo_days = DatabaseTempo().get_config("days") + tempo_price = DatabaseTempo().get_config("price") + if 22 > int(datetime.now(tz=TIMEZONE_UTC).strftime("%H")) < 6: measure_type = "hc" else: measure_type = "hp" @@ -779,7 +781,7 @@ def tempo_price(self): Returns: None """ - tempo_price = DB.get_tempo_config("price") + tempo_price = DatabaseTempo().get_config("price") for color, price in tempo_price.items(): self.tempo_price_sensor( f"{color}", @@ -837,9 +839,9 @@ def ecowatt_delta(self, name, delta): None """ uniq_id = f"myelectricaldata_ecowatt_{name}" - current_date = datetime.combine(datetime.now(tz=UTC), datetime.min.time()) + timedelta(days=delta) + current_date = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) + timedelta(days=delta) fetch_date = current_date - timedelta(days=1) - ecowatt_data = DB.get_ecowatt_range(fetch_date, fetch_date, "asc") + ecowatt_data = DatabaseEcowatt().get_range(fetch_date, fetch_date, "asc") day_value = 0 if ecowatt_data: forecast = {} diff --git a/src/models/export_home_assistant_ws.py b/src/models/export_home_assistant_ws.py index 301e3520..9da7d4e3 100644 --- a/src/models/export_home_assistant_ws.py +++ b/src/models/export_home_assistant_ws.py @@ -9,8 +9,12 @@ import pytz import websocket +from config import TEMPO_BEGIN, TEMPO_END +from database.detail import DatabaseDetail +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints from dependencies import is_integer, str2bool, truncate -from init import CONFIG, DB +from init import CONFIG from models.export_home_assistant import HomeAssistant from models.stat import Stat @@ -28,7 +32,7 @@ def __init__(self, usage_point_id): """ self.websocket = None self.usage_point_id = usage_point_id - self.usage_point_id_config = DB.get_usage_point(self.usage_point_id) + self.usage_point_id_config = DatabaseUsagePoints(self.usage_point_id).get() self.config = None self.url = None self.ssl = None @@ -201,21 +205,20 @@ def get_data(self, statistic_ids, begin, end): stat_period = self.send(statistics_during_period) return stat_period - def import_data(self): # noqa: C901 + def import_data(self): # noqa: C901, PLR0912, PLR0915 """Import the data for the usage point into Home Assistant.""" logging.info(f"Importation des données du point de livraison : {self.usage_point_id}") try: - plan = DB.get_usage_point_plan(self.usage_point_id) + plan = DatabaseUsagePoints(self.usage_point_id).get_plan() if self.usage_point_id_config.consumption_detail: logging.info("Consommation") measurement_direction = "consumption" if "max_date" in self.config: logging.warning("Max date détectée %s", self.config["max_date"]) begin = datetime.strptime(self.config["max_date"], "%Y-%m-%d") - # begin = datetime.strptime(self.config["max_date"], "%Y-%m-%d").replace(tzinfo=TZ_PARIS) - detail = DB.get_detail_all(begin=begin, usage_point_id=self.usage_point_id, order_dir="desc") + detail = DatabaseDetail(self.usage_point_id).get_all(begin=begin, order_dir="desc") else: - detail = DB.get_detail_all(usage_point_id=self.usage_point_id, order_dir="desc") + detail = DatabaseDetail(self.usage_point_id).get_all(order_dir="desc") cost = 0 last_year = None @@ -224,9 +227,9 @@ def import_data(self): # noqa: C901 stats_kwh = {} stats_euro = {} - db_tempo_price = DB.get_tempo_config("price") + db_tempo_price = DatabaseTempo().get_config("price") tempo_color_ref = {} - for tempo_data in DB.get_tempo(): + for tempo_data in DatabaseTempo().get(): tempo_color_ref[tempo_data.date] = tempo_data.color stats = Stat(usage_point_id=self.usage_point_id, measurement_direction="consumption") @@ -262,11 +265,11 @@ def import_data(self): # noqa: C901 cost = value * self.usage_point_id_config.consumption_price_hp / 1000 tag = "hp" elif plan.upper() == "TEMPO": - if 600 <= hour_minute < 2200: + if TEMPO_BEGIN <= hour_minute < TEMPO_END: hour_type = "HP" else: hour_type = "HC" - if 600 <= hour_minute <= 2330: + if TEMPO_BEGIN <= hour_minute <= 2359: date = datetime.combine(data.date, datetime.min.time()) else: date = datetime.combine(data.date - timedelta(days=1), datetime.min.time()) @@ -405,16 +408,9 @@ def import_data(self): # noqa: C901 if "max_date" in self.config: logging.warning("Max date détectée %s", self.config["max_date"]) begin = datetime.strptime(self.config["max_date"], "%Y-%m-%d") - detail = DB.get_detail_all( - begin=begin, - usage_point_id=self.usage_point_id, - measurement_direction="production", - order_dir="desc", - ) + detail = DatabaseDetail(self.usage_point_id, "production").get_all(begin=begin, order_dir="desc") else: - detail = DB.get_detail_all( - usage_point_id=self.usage_point_id, measurement_direction="production", order_dir="desc" - ) + detail = DatabaseDetail(self.usage_point_id, "production").get_all(order_dir="desc") cost = 0 last_year = None diff --git a/src/models/export_influxdb.py b/src/models/export_influxdb.py index 57d82124..d3321b06 100755 --- a/src/models/export_influxdb.py +++ b/src/models/export_influxdb.py @@ -1,15 +1,30 @@ +"""Class for exporting data to InfluxDB.""" + import ast import logging from datetime import datetime import pytz -from dependencies import title -from init import DB, INFLUXDB +from config import TIMEZONE_UTC +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.tempo import DatabaseTempo +from init import INFLUXDB from models.stat import Stat -def forceRound(x, n): +def force_round(x, n): + """Round a number to a specified number of decimal places. + + Args: + x (float): The number to be rounded. + n (int): The number of decimal places to round to. + + Returns: + float: The rounded number. + """ import decimal d = decimal.Decimal(repr(x)) @@ -19,30 +34,36 @@ def forceRound(x, n): class ExportInfluxDB: + """Class for exporting data to InfluxDB.""" + def __init__(self, influxdb_config, usage_point_config, measurement_direction="consumption"): self.influxdb_config = influxdb_config - self.db = DB self.usage_point_config = usage_point_config self.usage_point_id = self.usage_point_config.usage_point_id self.measurement_direction = measurement_direction self.stat = Stat(self.usage_point_id, measurement_direction=measurement_direction) self.time_format = "%Y-%m-%dT%H:%M:%SZ" if "timezone" not in self.influxdb_config or self.influxdb_config["timezone"] == "UTC": - self.tz = pytz.UTC + self.tz = TIMEZONE_UTC else: self.tz = pytz.timezone(self.influxdb_config["timezone"]) def daily(self, measurement_direction="consumption"): + """Export daily data to InfluxDB. + + Args: + measurement_direction (str, optional): The measurement direction. Defaults to "consumption". + """ current_month = "" if measurement_direction == "consumption": price = self.usage_point_config.consumption_price_base else: price = self.usage_point_config.production_price logging.info(f'Envoi des données "{measurement_direction.upper()}" dans influxdb') - get_daily_all = self.db.get_daily_all(self.usage_point_id) + get_daily_all = DatabaseDaily(self.usage_point_id).get_all() get_daily_all_count = len(get_daily_all) - last_data = self.db.get_daily_last_date(self.usage_point_id, measurement_direction) - first_data = self.db.get_daily_first_date(self.usage_point_id, measurement_direction) + last_data = DatabaseDaily(self.usage_point_id, measurement_direction).get_last_date() + first_data = DatabaseDaily(self.usage_point_id, measurement_direction).get_first_date() if last_data and first_data: start = datetime.strftime(last_data, self.time_format) end = datetime.strftime(first_data, self.time_format) @@ -55,8 +76,6 @@ def daily(self, measurement_direction="consumption"): logging.info(f" Cache : {get_daily_all_count} / InfluxDb : {count}") for daily in get_daily_all: date = daily.date - # start = datetime.strftime(date, "%Y-%m-%dT00:00:00Z") - # end = datetime.strftime(date, "%Y-%m-%dT23:59:59Z") if current_month != date.strftime("%m"): logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") # if len(INFLUXDB.get(start, end, measurement_direction)) == 0: @@ -73,27 +92,30 @@ def daily(self, measurement_direction="consumption"): }, fields={ "Wh": float(watt), - "kWh": float(forceRound(kwatt, 5)), - "price": float(forceRound(euro, 5)), + "kWh": float(force_round(kwatt, 5)), + "price": float(force_round(euro, 5)), }, ) current_month = date.strftime("%m") - logging.info(f" => OK") + logging.info(" => OK") else: logging.info(f" => Données synchronisées ({count} valeurs)") else: - logging.info(f" => Aucune donnée") + logging.info(" => Aucune donnée") def detail(self, measurement_direction="consumption"): + """Export detailed data to InfluxDB. + + Args: + measurement_direction (str, optional): The measurement direction. Defaults to "consumption". + """ current_month = "" measurement = f"{measurement_direction}_detail" logging.info(f'Envoi des données "{measurement.upper()}" dans influxdb') - get_detail_all = self.db.get_detail_all( - usage_point_id=self.usage_point_id, measurement_direction=measurement_direction - ) + get_detail_all = DatabaseDetail(self.usage_point_id, measurement_direction).get_all() get_detail_all_count = len(get_detail_all) - last_data = self.db.get_detail_last_date(self.usage_point_id, measurement_direction) - first_data = self.db.get_detail_first_date(self.usage_point_id, measurement_direction) + last_data = DatabaseDetail(self.usage_point_id, measurement_direction).get_last_date() + first_data = DatabaseDetail(self.usage_point_id, measurement_direction).get_first_date() if last_data and first_data: start = datetime.strftime(last_data, self.time_format) end = datetime.strftime(first_data, self.time_format) @@ -103,21 +125,12 @@ def detail(self, measurement_direction="consumption"): for record in data.records: count += record.get_value() - # print(len(get_detail_all)) - # print(count) if get_detail_all_count != count: logging.info(f" Cache : {get_detail_all_count} / InfluxDb : {count}") - for index, detail in enumerate(get_detail_all): + for _, detail in enumerate(get_detail_all): date = detail.date - # start = datetime.strftime(date, self.time_format) if current_month != date.strftime("%m"): logging.info(f" - {date.strftime('%Y')}-{date.strftime('%m')}") - # if index < (len(get_detail_all) - 1): - # next_item = get_detail_all[index + 1] - # end = datetime.strftime(next_item.date, self.time_format) - # else: - # end = datetime.strftime(date, "%Y-%m-%dT23:59:59Z") - # if len(INFLUXDB.get(start, end, measurement)) == 0: watt = detail.value kwatt = watt / 1000 watth = watt / (60 / detail.interval) @@ -143,23 +156,24 @@ def detail(self, measurement_direction="consumption"): }, fields={ "W": float(watt), - "kW": float(forceRound(kwatt, 5)), + "kW": float(force_round(kwatt, 5)), "Wh": float(watth), - "kWh": float(forceRound(kwatth, 5)), - "price": float(forceRound(euro, 5)), + "kWh": float(force_round(kwatth, 5)), + "price": float(force_round(euro, 5)), }, ) current_month = date.strftime("%m") - logging.info(f" => OK") + logging.info(" => OK") else: logging.info(f" => Données synchronisées ({count} valeurs)") else: - logging.info(f" => Aucune donnée") + logging.info(" => Aucune donnée") def tempo(self): + """Export tempo data to InfluxDB.""" measurement = "tempo" logging.info('Envoi des données "TEMPO" dans influxdb') - tempo_data = self.db.get_tempo() + tempo_data = DatabaseTempo().get() if tempo_data: for data in tempo_data: INFLUXDB.write( @@ -175,9 +189,10 @@ def tempo(self): logging.info(" => Pas de donnée") def ecowatt(self): + """Export ecowatt data to InfluxDB.""" measurement = "ecowatt" - logging.info(f'Envoi des données "ECOWATT" dans influxdb') - ecowatt_data = self.db.get_ecowatt() + logging.info('Envoi des données "ECOWATT" dans influxdb') + ecowatt_data = DatabaseEcowatt().get() if ecowatt_data: for data in ecowatt_data: INFLUXDB.write( @@ -190,10 +205,10 @@ def ecowatt(self): ) data_detail = ast.literal_eval(data.detail) for date, value in data_detail.items(): - date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") + date_format = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") INFLUXDB.write( measurement=f"{measurement}_detail", - date=self.tz.localize(date), + date=self.tz.localize(date_format), tags={ "usage_point_id": self.usage_point_id, }, diff --git a/src/models/export_mqtt.py b/src/models/export_mqtt.py index fd067d0a..b5fc289b 100644 --- a/src/models/export_mqtt.py +++ b/src/models/export_mqtt.py @@ -1,74 +1,62 @@ +"""Export des données vers MQTT.""" + import ast import logging from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta -from dependencies import title -from init import CONFIG, DB, MQTT +from config import TIMEZONE_UTC +from database.addresses import DatabaseAddresses +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.ecowatt import DatabaseEcowatt +from database.max_power import DatabaseMaxPower +from database.statistique import DatabaseStatistique +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from init import MQTT from models.stat import Stat class ExportMqtt: + """A class for exporting MQTT data.""" + def __init__(self, usage_point_id): - self.config = CONFIG - self.db = DB self.usage_point_id = usage_point_id self.date_format = "%Y-%m-%d" self.date_format_detail = "%Y-%m-%d %H:%M:%S" self.mqtt = MQTT def status(self): + """Get the status of the account.""" logging.info("Statut du compte.") - usage_point_id_config = self.db.get_usage_point(self.usage_point_id) - # consentement_expiration_date = usage_point_id_config.consentement_expiration.strftime("%Y-%m-%d %H:%M:%S") - if ( - hasattr(usage_point_id_config, "consentement_expiration") - and usage_point_id_config.consentement_expiration is not None - ): - consentement_expiration_date = usage_point_id_config.consentement_expiration.strftime("%Y-%m-%d %H:%M:%S") - else: - consentement_expiration_date = "" - if hasattr(usage_point_id_config, "call_number") and usage_point_id_config.call_number is not None: - call_number = usage_point_id_config.call_number - else: - call_number = "" - if hasattr(usage_point_id_config, "quota_reached") and usage_point_id_config.quota_reached is not None: - quota_reached = usage_point_id_config.quota_reached - else: - quota_reached = "" - if hasattr(usage_point_id_config, "quota_limit") and usage_point_id_config.quota_limit is not None: - quota_limit = usage_point_id_config.quota_limit - else: - quota_limit = "" - if hasattr(usage_point_id_config, "quota_reset_at") and usage_point_id_config.quota_reset_at is not None: - quota_reset_at = (usage_point_id_config.quota_reset_at.strftime("%Y-%m-%d %H:%M:%S"),) - else: - quota_reset_at = "" - if hasattr(usage_point_id_config, "last_call") and usage_point_id_config.last_call is not None: - last_call = (usage_point_id_config.last_call.strftime("%Y-%m-%d %H:%M:%S"),) - else: - last_call = "" - if hasattr(usage_point_id_config, "ban") and usage_point_id_config.ban is not None: - ban = usage_point_id_config.ban - else: - ban = "" - consentement_expiration = { - f"{self.usage_point_id}/status/consentement_expiration": consentement_expiration_date, - f"{self.usage_point_id}/status/call_number": str(call_number), - f"{self.usage_point_id}/status/quota_reached": str(quota_reached), - f"{self.usage_point_id}/status/quota_limit": str(quota_limit), - f"{self.usage_point_id}/status/quota_reset_at": str(quota_reset_at), - f"{self.usage_point_id}/status/last_call": str(last_call), - f"{self.usage_point_id}/status/ban": str(ban), - } - # print(consentement_expiration) + usage_point_id_config = DatabaseUsagePoints(self.usage_point_id).get() + send_data = [ + "consentement_expiration", + "call_number", + "quota_reached", + "quota_limit", + "quota_reset_at", + "last_call", + "ban", + ] + consentement_expiration = {} + for item in send_data: + if hasattr(usage_point_id_config, item): + queue = f"{self.usage_point_id}/status/{item}" + value = getattr(usage_point_id_config, item) + if isinstance(value, datetime): + value = value.strftime("%Y-%m-%d %H:%M:%S") + consentement_expiration[queue] = str(getattr(usage_point_id_config, item)) self.mqtt.publish_multiple(consentement_expiration) logging.info(" => OK") def contract(self): + """Get the contract data.""" logging.info("Génération des messages du contrat") - contract_data = self.db.get_contract(self.usage_point_id) + contract_data = DatabaseContracts(self.usage_point_id).get() if hasattr(contract_data, "__table__"): output = {} for column in contract_data.__table__.columns: @@ -79,8 +67,9 @@ def contract(self): logging.info(" => ERREUR") def address(self): - logging.info(f"Génération des messages d'addresse") - address_data = self.db.get_addresse(self.usage_point_id) + """Get the address data.""" + logging.info("Génération des messages d'addresse") + address_data = DatabaseAddresses(self.usage_point_id).get() if hasattr(address_data, "__table__"): output = {} for column in address_data.__table__.columns: @@ -91,20 +80,23 @@ def address(self): logging.info(" => ERREUR") def daily_annual(self, price, measurement_direction="consumption"): + """Get the daily annual data.""" logging.info("Génération des données annuelles") - date_range = self.db.get_daily_date_range(self.usage_point_id) + date_range = DatabaseDaily(self.usage_point_id).get_date_range() stat = Stat(self.usage_point_id, measurement_direction) if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) + date_begin_current = datetime.combine( + date_end.replace(month=1).replace(day=1), datetime.min.time() + ).astimezone(TIMEZONE_UTC) finish = False while not finish: year = int(date_begin_current.strftime("%Y")) get_daily_year = stat.get_year(year=year) get_daily_month = stat.get_month(year=year) get_daily_week = stat.get_week(year=year) - if year == int(datetime.now().strftime("%Y")): + if year == int(datetime.now(tz=TIMEZONE_UTC).strftime("%Y")): sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" else: sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" @@ -131,7 +123,11 @@ def daily_annual(self, price, measurement_direction="consumption"): for week in range(7): begin = stat.daily(week)["begin"] - begin_day = datetime.strptime(stat.daily(week)["begin"], self.date_format).strftime("%A") + begin_day = ( + datetime.strptime(stat.daily(week)["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) end = stat.daily(week)["end"] value = stat.daily(week)["value"] mqtt_data[f"{sub_prefix}/week/{begin_day}/dateBegin"] = begin @@ -152,14 +148,9 @@ def daily_annual(self, price, measurement_direction="consumption"): if date_begin_current == date_begin: finish = True - date_end = datetime.combine( - (date_end - relativedelta(years=1)).replace(month=12, day=31), - datetime.max.time(), - ) date_begin_current = date_begin_current - relativedelta(years=1) if date_begin_current < date_begin: date_begin_current = date_begin - self.mqtt.publish_multiple(mqtt_data) logging.info(" => OK") @@ -167,12 +158,13 @@ def daily_annual(self, price, measurement_direction="consumption"): logging.info(" => Pas de donnée") def daily_linear(self, price, measurement_direction="consumption"): + """Get the daily linear data.""" logging.info("Génération des données linéaires journalières.") - date_range = self.db.get_daily_date_range(self.usage_point_id) + date_range = DatabaseDaily(self.usage_point_id).get_date_range() stat = Stat(self.usage_point_id, measurement_direction) if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) date_begin_current = date_end - relativedelta(years=1) idx = 0 finish = False @@ -213,7 +205,7 @@ def daily_linear(self, price, measurement_direction="consumption"): finish = True date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: + if date_begin_current.astimezone(TIMEZONE_UTC) < date_begin.astimezone(TIMEZONE_UTC): date_begin_current = datetime.combine(date_begin, datetime.min.time()) idx = idx + 1 @@ -224,17 +216,18 @@ def daily_linear(self, price, measurement_direction="consumption"): logging.info(" => Pas de donnée") def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption"): + """Get the detailed annual data.""" logging.info("Génération des données annuelles détaillé.") - date_range = self.db.get_daily_date_range(self.usage_point_id) + date_range = DatabaseDetail(self.usage_point_id).get_date_range() stat = Stat(self.usage_point_id, measurement_direction) if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) finish = False while not finish: year = int(date_begin_current.strftime("%Y")) - month = int(datetime.now().strftime("%m")) + month = int(datetime.now(tz=TIMEZONE_UTC).strftime("%m")) get_detail_year_hp = stat.get_year(year=year, measure_type="HP") get_detail_year_hc = stat.get_year(year=year, measure_type="HC") get_detail_month_hp = stat.get_month(year=year, month=month, measure_type="HP") @@ -250,7 +243,7 @@ def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption measure_type="HC", ) - if year == int(datetime.now().strftime("%Y")): + if year == int(datetime.now(tz=TIMEZONE_UTC).strftime("%Y")): sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/current" else: sub_prefix = f"{self.usage_point_id}/{measurement_direction}/annual/{year}" @@ -283,14 +276,22 @@ def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption for week in range(7): # HP - begin_hp_day = datetime.strptime(stat.detail(week, "HP")["begin"], self.date_format).strftime("%A") + begin_hp_day = ( + datetime.strptime(stat.detail(week, "HP")["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) value_hp = stat.detail(week, "HP")["value"] prefix = f"{sub_prefix}/week/{begin_hp_day}/hp" mqtt_data[f"{prefix}/Wh"] = value_hp mqtt_data[f"{prefix}/kWh"] = round(value_hp / 1000, 2) mqtt_data[f"{prefix}/euro"] = round(value_hp / 1000 * price_hp, 2) # HC - begin_hc_day = datetime.strptime(stat.detail(week, "HC")["begin"], self.date_format).strftime("%A") + begin_hc_day = ( + datetime.strptime(stat.detail(week, "HC")["begin"], self.date_format) + .astimezone(TIMEZONE_UTC) + .strftime("%A") + ) value_hc = stat.detail(week, "HC")["value"] prefix = f"{sub_prefix}/week/{begin_hc_day}/hc" mqtt_data[f"{prefix}/Wh"] = value_hc @@ -298,16 +299,16 @@ def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption mqtt_data[f"{prefix}/euro"] = round(value_hc / 1000 * price_hc, 2) for month in range(12): - month = month + 1 + current_month = month + 1 # HP - get_detail_month_hp = stat.get_month(year=year, month=month, measure_type="HP") - prefix = f"{sub_prefix}/month/{month}/hp" + get_detail_month_hp = stat.get_month(year=year, month=current_month, measure_type="HP") + prefix = f"{sub_prefix}/month/{current_month}/hp" mqtt_data[f"{prefix}/Wh"] = get_detail_month_hp["value"] mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hp["value"] / 1000, 2) mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hp["value"] / 1000 * price_hp, 2) # HC - get_detail_month_hc = stat.get_month(year=year, month=month, measure_type="HC") - prefix = f"{sub_prefix}/month/{month}/hc" + get_detail_month_hc = stat.get_month(year=year, month=current_month, measure_type="HC") + prefix = f"{sub_prefix}/month/{current_month}/hc" mqtt_data[f"{prefix}/Wh"] = get_detail_month_hc["value"] mqtt_data[f"{prefix}/kWh"] = round(get_detail_month_hc["value"] / 1000, 2) mqtt_data[f"{prefix}/euro"] = round(get_detail_month_hc["value"] / 1000 * price_hc, 2) @@ -328,12 +329,13 @@ def detail_annual(self, price_hp, price_hc=0, measurement_direction="consumption logging.info(" => Pas de donnée") def detail_linear(self, price_hp, price_hc=0, measurement_direction="consumption"): + """Get the detailed linear data.""" logging.info("Génération des données linéaires détaillées") - date_range = self.db.get_detail_date_range(self.usage_point_id) + date_range = DatabaseDetail(self.usage_point_id).get_date_range() stat = Stat(self.usage_point_id, measurement_direction) if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) + date_begin = datetime.combine(date_range["begin"], datetime.min.time()).astimezone(TIMEZONE_UTC) + date_end = datetime.combine(date_range["end"], datetime.max.time()).astimezone(TIMEZONE_UTC) date_begin_current = date_end - relativedelta(years=1) idx = 0 finish = False @@ -391,10 +393,11 @@ def detail_linear(self, price_hp, price_hc=0, measurement_direction="consumption logging.info(" => Pas de donnée") def max_power(self): + """Get the maximum power data.""" logging.info("Génération des données de puissance max journalières.") - max_power_data = self.db.get_daily_max_power_all(self.usage_point_id, order="asc") + max_power_data = DatabaseMaxPower(self.usage_point_id).get_all(order="asc") mqtt_data = {} - contract = self.db.get_contract(self.usage_point_id) + contract = DatabaseContracts(self.usage_point_id).get() max_value = 0 if max_power_data: if hasattr(contract, "subscribed_power"): @@ -419,11 +422,12 @@ def max_power(self): logging.info(" => Pas de donnée") def ecowatt(self): + """Get the ecowatt data.""" logging.info("Génération des données Ecowatt") - begin = datetime.combine(datetime.now() - relativedelta(days=1), datetime.min.time()) + begin = datetime.combine(datetime.now(tz=TIMEZONE_UTC) - relativedelta(days=1), datetime.min.time()) end = begin + timedelta(days=7) - ecowatt = self.db.get_ecowatt_range(begin, end) - today = datetime.combine(datetime.now(), datetime.min.time()) + ecowatt = DatabaseEcowatt().get_range(begin, end) + today = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) mqtt_data = {} if ecowatt: for data in ecowatt: @@ -437,69 +441,71 @@ def ecowatt(self): mqtt_data[f"ecowatt/{queue}/value"] = data.value mqtt_data[f"ecowatt/{queue}/message"] = data.message for date, value in ast.literal_eval(data.detail).items(): - date = datetime.strptime(date, self.date_format_detail).strftime("%H") - mqtt_data[f"ecowatt/{queue}/detail/{date}"] = value + date_tmp = datetime.strptime(date, self.date_format_detail).astimezone(TIMEZONE_UTC).strftime("%H") + mqtt_data[f"ecowatt/{queue}/detail/{date_tmp}"] = value self.mqtt.publish_multiple(mqtt_data) logging.info(" => OK") else: logging.info(" => Pas de donnée") def tempo(self): + """Get the tempo data.""" logging.info("Envoie des données Tempo") mqtt_data = {} - tempo_data = self.db.get_stat(self.usage_point_id, "price_consumption") - tempo_price = self.db.get_tempo_config("price") + tempo_data = DatabaseStatistique(self.usage_point_id).get("price_consumption") + tempo_price = DatabaseTempo().get_config("price") if tempo_price: for color, price in tempo_price.items(): mqtt_data[f"tempo/price/{color}"] = price - tempo_days = self.db.get_tempo_config("days") + tempo_days = DatabaseTempo().get_config("days") if tempo_days: for color, days in tempo_days.items(): mqtt_data[f"tempo/days/{color}"] = days - today = datetime.combine(datetime.now(), datetime.min.time()) - tempo_color = self.db.get_tempo_range(today, today) + today = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.min.time()) + tempo_color = DatabaseTempo().get_range(today, today) if tempo_color: - mqtt_data[f"tempo/color/today"] = tempo_color[0].color + mqtt_data["tempo/color/today"] = tempo_color[0].color tomorrow = today + timedelta(days=1) - tempo_color = self.db.get_tempo_range(tomorrow, tomorrow) + tempo_color = DatabaseTempo().get_range(tomorrow, tomorrow) if tempo_color: - mqtt_data[f"tempo/color/tomorrow"] = tempo_color[0].color + mqtt_data["tempo/color/tomorrow"] = tempo_color[0].color if tempo_data: for year, data in ast.literal_eval(tempo_data[0].value).items(): - if year == datetime.now().strftime("%Y"): - year = "current" + select_year = year + if year == datetime.now(tz=TIMEZONE_UTC).strftime("%Y"): + select_year = "current" for color, tempo in data["TEMPO"].items(): - mqtt_data[f"{self.usage_point_id}/consumption/annual/{year}/thisYear/tempo/{color}/Wh"] = round( - tempo["Wh"], 2 - ) - mqtt_data[f"{self.usage_point_id}/consumption/annual/{year}/thisYear/tempo/{color}/kWh"] = round( - tempo["kWh"], 2 - ) - mqtt_data[f"{self.usage_point_id}/consumption/annual/{year}/thisYear/tempo/{color}/euro"] = round( - tempo["euro"], 2 - ) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/Wh" + ] = round(tempo["Wh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/kWh" + ] = round(tempo["kWh"], 2) + mqtt_data[ + f"{self.usage_point_id}/consumption/annual/{select_year}/thisYear/tempo/{color}/euro" + ] = round(tempo["euro"], 2) for month, month_data in data["month"].items(): for month_color, month_tempo in month_data["TEMPO"].items(): - if month == datetime.strftime(datetime.now(), "%m"): + if month == datetime.strftime(datetime.now(tz=TIMEZONE_UTC), "%m"): if month_tempo: mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/thisMonth/tempo/{month_color}/Wh" + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/Wh" ] = round(month_tempo["Wh"], 2) mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/thisMonth/tempo/{month_color}/kWh" + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/kWh" ] = round(month_tempo["kWh"], 2) mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/thisMonth/tempo/{month_color}/euro" + f"{self.usage_point_id}/consumption/annual/{select_year}/thisMonth/tempo/{month_color}/euro" ] = round(month_tempo["euro"], 2) if month_tempo: mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/month/{int(month)}/tempo/{month_color}/Wh" + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/Wh" ] = round(month_tempo["Wh"], 2) mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/month/{int(month)}/tempo/{month_color}/kWh" + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/kWh" ] = round(month_tempo["kWh"], 2) mqtt_data[ - f"{self.usage_point_id}/consumption/annual/{year}/month/{int(month)}/tempo/{month_color}/euro" + f"{self.usage_point_id}/consumption/annual/{select_year}/month/{int(month)}/tempo/{month_color}/euro" ] = round(month_tempo["euro"], 2) self.mqtt.publish_multiple(mqtt_data) logging.info(" => OK") diff --git a/src/models/export_mqttv1.py b/src/models/export_mqttv1.py deleted file mode 100644 index 5231295c..00000000 --- a/src/models/export_mqttv1.py +++ /dev/null @@ -1,441 +0,0 @@ -import logging -from datetime import datetime - -from dateutil.relativedelta import relativedelta - -from dependencies import title -from init import CONFIG, DB -from models.mqtt import Mqtt - - -class ExportMqtt: - def __init__(self, usage_point_id, measurement_direction="consumption"): - self.config = CONFIG - self.db = DB - self.mqtt_config = (self.config.mqtt_config(),) - self.usage_point_id = usage_point_id - self.measurement_direction = measurement_direction - self.date_format = "%Y-%m-%d" - if "enable" in self.mqtt_config and self.mqtt_config["enable"]: - if ["hostname"] not in self.mqtt_config: - self.connect() - else: - logging.warning("MQTT config is incomplete.") - else: - logging.info("MQTT disable") - - def connect(self): - MQTT = Mqtt( - hostname=self.mqtt_config["hostname"], - port=self.mqtt_config["port"], - username=self.mqtt_config["username"], - password=self.mqtt_config["password"], - client_id=self.mqtt_config["client_id"], - prefix=self.mqtt_config["prefix"], - retain=self.mqtt_config["retain"], - qos=self.mqtt_config["qos"], - ) - MQTT.connect() - - def status(self): - title(f"[{self.usage_point_id}] Statut du compte.") - usage_point_id_config = self.db.get_usage_point(self.usage_point_id) - # consentement_expiration_date = usage_point_id_config.consentement_expiration.strftime("%Y-%m-%d %H:%M:%S") - if ( - hasattr(usage_point_id_config, "consentement_expiration") - and usage_point_id_config.consentement_expiration is not None - ): - consentement_expiration_date = usage_point_id_config.consentement_expiration.strftime("%Y-%m-%d %H:%M:%S") - else: - consentement_expiration_date = "" - if hasattr(usage_point_id_config, "call_number") and usage_point_id_config.call_number is not None: - call_number = usage_point_id_config.call_number - else: - call_number = "" - if hasattr(usage_point_id_config, "quota_reached") and usage_point_id_config.quota_reached is not None: - quota_reached = usage_point_id_config.quota_reached - else: - quota_reached = "" - if hasattr(usage_point_id_config, "quota_limit") and usage_point_id_config.quota_limit is not None: - quota_limit = usage_point_id_config.quota_limit - else: - quota_limit = "" - if hasattr(usage_point_id_config, "quota_reset_at") and usage_point_id_config.quota_reset_at is not None: - quota_reset_at = (usage_point_id_config.quota_reset_at.strftime("%Y-%m-%d %H:%M:%S"),) - else: - quota_reset_at = "" - if hasattr(usage_point_id_config, "last_call") and usage_point_id_config.last_call is not None: - last_call = (usage_point_id_config.last_call.strftime("%Y-%m-%d %H:%M:%S"),) - else: - last_call = "" - if hasattr(usage_point_id_config, "ban") and usage_point_id_config.ban is not None: - ban = usage_point_id_config.ban - else: - ban = "" - consentement_expiration = { - f"{self.usage_point_id}/status/consentement_expiration": consentement_expiration_date, - f"{self.usage_point_id}/status/call_number": str(call_number), - f"{self.usage_point_id}/status/quota_reached": str(quota_reached), - f"{self.usage_point_id}/status/quota_limit": str(quota_limit), - f"{self.usage_point_id}/status/quota_reset_at": str(quota_reset_at), - f"{self.usage_point_id}/status/last_call": str(last_call), - f"{self.usage_point_id}/status/ban": str(ban), - } - # print(consentement_expiration) - self.mqtt_config.publish_multiple(consentement_expiration) - title("Finish") - - def contract(self): - title(f"[{self.usage_point_id}] Exportation de données dans self.mqtt_config.") - - logging.info("Génération des messages du contrat") - contract_data = self.db.get_contract(self.usage_point_id) - if hasattr(contract_data, "__table__"): - output = {} - for column in contract_data.__table__.columns: - output[f"{self.usage_point_id}/contract/{column.name}"] = str(getattr(contract_data, column.name)) - self.mqtt_config.publish_multiple(output) - title("Finish") - else: - title("Failed") - - def address(self): - logging.info(f"[{self.usage_point_id}] Génération des messages d'addresse") - address_data = self.db.get_addresse(self.usage_point_id) - if hasattr(address_data, "__table__"): - output = {} - for column in address_data.__table__.columns: - output[f"{self.usage_point_id}/address/{column.name}"] = str(getattr(address_data, column.name)) - self.mqtt_config.publish_multiple(output) - title("Finish") - else: - title("Failed") - - def load_daily_data(self, begin, end, price, sub_prefix): - logging.info(f" {begin.strftime(self.date_format)} => {end.strftime(self.date_format)}") - prefix = f"{sub_prefix}" - self.mqtt_config.publish_multiple( - { - f"{prefix}/dateBegin": begin.strftime(self.date_format), - f"{prefix}/dateEnded": end.strftime(self.date_format), - } - ) - # DATA FORMATTING - this_year_watt = 0 - this_year_euro = 0 - this_year_begin = datetime.now() - this_year_end = datetime.now() - this_month_watt = 0 - this_month_euro = 0 - this_month_begin = datetime.now() - this_month_end = datetime.now() - month_watt = {} - month_euro = {} - month_begin = {} - month_end = {} - week_watt = {} - week_euro = {} - week_begin = datetime.now() - week_end = datetime.now() - week_idx = 0 - current_month_year = "" - current_this_month_year = "" - - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): - date = data.date - watt = data.value - kwatt = data.value / 1000 - euro = kwatt * price - this_year_begin = date - if this_year_end == "": - this_year_end = date - this_year_watt = this_year_watt + watt - this_year_euro = this_year_euro + euro - - if current_month_year == "": - current_month_year = date.strftime("%Y") - if date.strftime("%Y") == current_month_year: - if date.strftime("%m") not in month_watt: - month_watt[date.strftime("%m")] = watt - month_euro[date.strftime("%m")] = euro - month_end[date.strftime("%m")] = date - else: - month_watt[date.strftime("%m")] = month_watt[date.strftime("%m")] + watt - month_euro[date.strftime("%m")] = month_euro[date.strftime("%m")] + euro - month_begin[date.strftime("%m")] = date - - if week_idx < 7: - week_begin = date - if week_end == "": - week_end = date - if date not in week_watt: - week_watt[date] = watt - week_euro[date] = euro - else: - week_watt[date] = week_watt[date] + watt - week_euro[date] = week_euro[date] + euro - - if current_this_month_year == "": - current_this_month_year = date.strftime("%Y") - if date.strftime("%m") == datetime.now().strftime("%m") and date.strftime("%Y") == current_this_month_year: - this_month_begin = date - if this_month_end == "": - this_month_end = date - this_month_watt = this_month_watt + watt - this_month_euro = this_month_euro + euro - week_idx = week_idx + 1 - # MQTT FORMATTING - mqtt_data = { - f"{prefix}/thisYear/dateBegin": this_year_begin.strftime(self.date_format), - f"{prefix}/thisYear/dateEnd": this_year_end.strftime(self.date_format), - f"{prefix}/thisYear/base/Wh": this_year_watt, - f"{prefix}/thisYear/base/kWh": round(this_year_watt / 1000, 2), - f"{prefix}/thisYear/base/euro": round(this_year_euro, 2), - f"{prefix}/thisMonth/dateBegin": this_month_begin.strftime(self.date_format), - f"{prefix}/thisMonth/dateEnd": this_month_end.strftime(self.date_format), - f"{prefix}/thisMonth/base/Wh": this_month_watt, - f"{prefix}/thisMonth/base/kWh": round(this_month_watt / 1000, 2), - f"{prefix}/thisMonth/base/euro": round(this_month_euro, 2), - f"{prefix}/thisWeek/dateBegin": week_begin.strftime(self.date_format), - f"{prefix}/thisWeek/dateEnd": week_end.strftime(self.date_format), - } - for date, watt in month_watt.items(): - mqtt_data[f"{prefix}/months/{date}/base/Wh"] = watt - mqtt_data[f"{prefix}/months/{date}/base/kWh"] = round(watt / 1000, 2) - for date, euro in month_euro.items(): - mqtt_data[f"{prefix}/months/{date}/base/euro"] = round(euro, 2) - for date, value in month_begin.items(): - mqtt_data[f"{prefix}/months/{date}/dateBegin"] = value.strftime(self.date_format) - for date, value in month_end.items(): - mqtt_data[f"{prefix}/months/{date}/dateEnd"] = value.strftime(self.date_format) - - for date, watt in week_watt.items(): - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/date"] = date.strftime(self.date_format) - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/base/Wh"] = watt - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/base/kWh"] = round(watt / 1000, 2) - for date, euro in week_euro.items(): - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/base/euro"] = round(euro, 2) - - # SEND TO self.mqtt_config - self.mqtt_config.publish_multiple(mqtt_data) - - def daily_annual(self, price): - logging.info("Génération des données annuelles") - date_range = self.db.get_daily_date_range(self.usage_point_id) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) - finish = False - while not finish: - if date_begin_current.strftime("%Y") == datetime.now().strftime("%Y"): - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/annual/current" - else: - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/annual/{date_begin_current.strftime('%Y')}" - self.load_daily_data(date_begin_current, date_end, price, sub_prefix) - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine( - (date_end - relativedelta(years=1)).replace(month=12, day=31), - datetime.max.time(), - ) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = date_begin - title("Finish") - else: - title("No data") - - def daily_linear(self, price): - logging.info("Génération des données linéaires") - date_range = self.db.get_daily_date_range(self.usage_point_id) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = date_end - relativedelta(years=1) - idx = 0 - finish = False - while not finish: - if idx == 0: - key = "year" - else: - key = f"year-{idx}" - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/linear/{key}" - self.load_daily_data(date_begin_current, date_end, price, sub_prefix) - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = datetime.combine(date_begin, datetime.min.time()) - idx = idx + 1 - title("Finish") - else: - title("No data") - - def load_detail_data(self, begin, end, price_hp, price_hc, sub_prefix): - logging.info(f" {begin.strftime(self.date_format)} => {end.strftime(self.date_format)}") - prefix = f"{sub_prefix}" - # DATA FORMATTING - week_idx = 0 - current_month_year = "" - current_this_month_year = "" - output = { - "hp": { - "this_year_watt": 0, - "this_year_euro": 0, - "month_watt": {}, - "month_euro": {}, - "week_watt": {}, - "week_euro": {}, - "this_month_watt": 0, - "this_month_euro": 0, - }, - "hc": { - "this_year_watt": 0, - "this_year_euro": 0, - "month_watt": {}, - "month_euro": {}, - "week_watt": {}, - "week_euro": {}, - "this_month_watt": 0, - "this_month_euro": 0, - }, - "base": { - "this_year_watt": 0, - "this_year_euro": 0, - "month_watt": {}, - "month_euro": {}, - "week_watt": {}, - "week_euro": {}, - "this_month_watt": 0, - "this_month_euro": 0, - }, - } - - for data in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): - date = data.date - watt = data.value / (60 / data.interval) - kwatt = watt / 1000 - - measure_type = data.measure_type.lower() - output[measure_type]["this_year_watt"] = output[measure_type]["this_year_watt"] + watt - if measure_type == "hp": - euro = kwatt * price_hp - else: - euro = kwatt * price_hc - output[measure_type]["this_year_euro"] = output[measure_type]["this_year_euro"] + euro - - if current_month_year == "": - current_month_year = date.strftime("%Y") - if date.strftime("%Y") == current_month_year: - if date.strftime("%m") not in output[measure_type]["month_watt"]: - output[measure_type]["month_watt"][date.strftime("%m")] = watt - output[measure_type]["month_euro"][date.strftime("%m")] = euro - else: - output[measure_type]["month_watt"][date.strftime("%m")] = ( - output[measure_type]["month_watt"][date.strftime("%m")] + watt - ) - output[measure_type]["month_euro"][date.strftime("%m")] = ( - output[measure_type]["month_euro"][date.strftime("%m")] + euro - ) - - if week_idx < 7: - if date not in output[measure_type]["week_watt"]: - output[measure_type]["week_watt"][date] = watt - output[measure_type]["week_euro"][date] = euro - else: - output[measure_type]["week_watt"][date] = output[measure_type]["week_watt"][date] + watt - output[measure_type]["week_euro"][date] = output[measure_type]["week_euro"][date] + euro - - # print(output) - - if current_this_month_year == "": - current_this_month_year = date.strftime("%Y") - if date.strftime("%m") == datetime.now().strftime("%m") and date.strftime("%Y") == current_this_month_year: - output[measure_type]["this_month_watt"] = output[measure_type]["this_month_watt"] + watt - output[measure_type]["this_month_euro"] = output[measure_type]["this_month_euro"] + euro - week_idx = week_idx + 1 - - # MQTT FORMATTING - for measure_type, data in output.items(): - mqtt_data = { - f"{prefix}/thisYear/{measure_type}/Wh": output[measure_type]["this_year_watt"], - f"{prefix}/thisYear/{measure_type}/kWh": round(output[measure_type]["this_year_watt"] / 1000, 2), - f"{prefix}/thisYear/{measure_type}/euro": round(output[measure_type]["this_year_euro"], 2), - f"{prefix}/thisMonth/{measure_type}/Wh": output[measure_type]["this_month_watt"], - f"{prefix}/thisMonth/{measure_type}/kWh": round(output[measure_type]["this_month_watt"] / 1000, 2), - f"{prefix}/thisMonth/{measure_type}/euro": round(output[measure_type]["this_month_euro"], 2), - } - for date, watt in output[measure_type]["month_watt"].items(): - mqtt_data[f"{prefix}/months/{date}/{measure_type}/Wh"] = watt - mqtt_data[f"{prefix}/months/{date}/{measure_type}/kWh"] = round(watt / 1000, 2) - for date, euro in output[measure_type]["month_euro"].items(): - mqtt_data[f"{prefix}/months/{date}/{measure_type}/euro"] = round(euro, 2) - - for date, watt in output[measure_type]["week_watt"].items(): - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/{measure_type}/Wh"] = watt - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/{measure_type}/kWh"] = round(watt / 1000, 2) - for date, euro in output[measure_type]["week_euro"].items(): - mqtt_data[f"{prefix}/thisWeek/{date.strftime('%A')}/{measure_type}/euro"] = round(euro, 2) - - # SEND TO MQTT - self.mqtt_config.publish_multiple(mqtt_data) - - def detail_annual(self, price_hp, price_hc=0): - logging.info("Génération des données annuelles détaillées") - date_range = self.db.get_detail_date_range(self.usage_point_id) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = datetime.combine(date_end.replace(month=1).replace(day=1), datetime.min.time()) - finish = False - while not finish: - if date_begin_current.strftime("%Y") == datetime.now().strftime("%Y"): - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/annual/current" - else: - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/annual/{date_begin_current.strftime('%Y')}" - self.load_detail_data(date_begin_current, date_end, price_hp, price_hc, sub_prefix) - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine( - (date_end - relativedelta(years=1)).replace(month=12, day=31), - datetime.max.time(), - ) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = date_begin - title("Finish") - else: - title("No data") - - def detail_linear(self, price_hp, price_hc=0): - logging.info("Génération des données linéaires détaillées") - date_range = self.db.get_detail_date_range(self.usage_point_id) - if date_range["begin"] and date_range["end"]: - date_begin = datetime.combine(date_range["begin"], datetime.min.time()) - date_end = datetime.combine(date_range["end"], datetime.max.time()) - date_begin_current = date_end - relativedelta(years=1) - idx = 0 - finish = False - while not finish: - if idx == 0: - key = "year" - else: - key = f"year-{idx}" - sub_prefix = f"{self.usage_point_id}/{self.measurement_direction}/linear/{key}" - self.load_detail_data(date_begin_current, date_end, price_hp, price_hc, sub_prefix) - # CALCUL NEW DATE - if date_begin_current == date_begin: - finish = True - date_end = datetime.combine((date_end - relativedelta(years=1)), datetime.max.time()) - date_begin_current = date_begin_current - relativedelta(years=1) - if date_begin_current < date_begin: - date_begin_current = datetime.combine(date_begin, datetime.min.time()) - idx = idx + 1 - title("Finish") - else: - title("No data") diff --git a/src/models/influxdb.py b/src/models/influxdb.py index d7865c0c..e9e3a512 100644 --- a/src/models/influxdb.py +++ b/src/models/influxdb.py @@ -1,5 +1,8 @@ +"""This module contains the InfluxDB class for connecting to and interacting with InfluxDB.""" import datetime +import json import logging +import sys import influxdb_client from dateutil.tz import tzlocal @@ -7,89 +10,121 @@ from influxdb_client.client.util.date_utils import DateHelper from influxdb_client.client.write_api import ASYNCHRONOUS, SYNCHRONOUS +from database.config import DatabaseConfig from dependencies import separator, separator_warning, title +# from . import INFLUXDB + class InfluxDB: + """Class for connecting to and interacting with InfluxDB.""" + + class BatchingOptions: + """Default configuration for InfluxDB batching options.""" + + def __init__(self) -> None: + """Initialize a new instance of the InfluxDB class. + + Parameters: + batch_size (int): The number of data points to batch together before writing to InfluxDB. + flush_interval (int): The time interval (in milliseconds) between flushing batches to InfluxDB. + jitter_interval (int): The maximum random interval (in milliseconds) to add to the flush interval. + retry_interval (int): The time interval (in milliseconds) between retry attempts when writing to InfluxDB fails. + max_retry_time (str): The maximum total time (in milliseconds) to spend on retry attempts. + max_retries (int): The maximum number of retry attempts when writing to InfluxDB fails. + max_retry_delay (str): The maximum delay (in milliseconds) between retry attempts. + exponential_base (int): The base value for exponential backoff when retrying. + + Returns: + None + """ + self.batch_size: int = 1000 + self.flush_interval: int = 1000 + self.jitter_interval: int = 0 + self.retry_interval: int = 5000 + self.max_retry_time: str = "180_000" + self.max_retries: int = 5 + self.max_retry_delay: str = "125_000" + self.exponential_base: int = 2 + + class Config: + """Default configuration for InfluxDB.""" + + def __init__(self) -> None: + """Initialize an instance of the InfluxDBConfig class. + + Attributes: + - enable (bool): Indicates whether InfluxDB is enabled or not. + - scheme (str): The scheme to use for connecting to InfluxDB (e.g., "http", "https"). + - hostname (str): The hostname of the InfluxDB server. + - port (int): The port number to use for connecting to InfluxDB. + - token (str): The authentication token for accessing InfluxDB. + - org (str): The organization name in InfluxDB. + - bucket (str): The bucket name in InfluxDB. + - method (str): The method to use for writing data to InfluxDB (e.g., "SYNCHRONOUS", "BATCHING"). + """ + self.enable: bool = False + self.scheme: str = "http" + self.hostname: str = "localhost" + self.port: int = 8086 + self.token: str = "my-token" + self.org: str = "myorg" + self.bucket: str = "mybucket" + self.method: str = "SYNCHRONOUS" + def __init__( self, - scheme: str, - hostname: str, - port: int, - token: str, - org: str = "myelectricaldata.fr", - bucket: str = "myelectricaldata", - method="SYNCHRONOUS", - write_options=None, ): - if write_options is None: - write_options = {} - self.scheme = scheme - self.hostname = hostname - self.port = port - self.token = token - self.org = org - self.bucket = bucket self.influxdb = {} self.query_api = {} self.write_api = {} self.delete_api = {} self.buckets_api = {} - self.method = method - self.write_options = {} - if "batch_size" in write_options: - self.write_options["batch_size"] = write_options["batch_size"] - else: - self.write_options["batch_size"] = 1000 - if "flush_interval" in write_options: - self.write_options["flush_interval"] = write_options["flush_interval"] - else: - self.write_options["flush_interval"] = 1000 - if "jitter_interval" in write_options: - self.write_options["jitter_interval"] = write_options["jitter_interval"] - else: - self.write_options["jitter_interval"] = 0 - if "retry_interval" in write_options: - self.write_options["retry_interval"] = write_options["retry_interval"] - else: - self.write_options["retry_interval"] = 5000 - if "max_retry_time" in write_options: - self.write_options["max_retry_time"] = write_options["max_retry_time"] - else: - self.write_options["max_retry_time"] = "180_000" - if "max_retries" in write_options: - self.write_options["max_retries"] = write_options["max_retries"] - else: - self.write_options["max_retries"] = 5 - if "max_retry_delay" in write_options: - self.write_options["max_retry_delay"] = write_options["max_retry_delay"] - else: - self.write_options["max_retry_delay"] = 125_000 - if "exponential_base" in write_options: - self.write_options["exponential_base"] = write_options["exponential_base"] - else: - self.write_options["exponential_base"] = 2 - self.connect() self.retention = 0 self.max_retention = None - self.get_list_retention_policies() - if self.retention != 0: - day = int(self.retention / 60 / 60 / 24) - logging.warning(f" ATTENTION, InfluxDB est configuré avec une durée de rétention de {day} jours.") - logging.warning( - f" Toutes les données supérieures à {day} jours ne seront jamais insérées dans celui-ci." - ) - else: - logging.warning(" => Aucune durée de rétention de données détectée.") + self.config = self.Config() + self.config_batching = self.BatchingOptions() + self.load_config() + if self.config.enable: + self.connect() + self.get_list_retention_policies() + if self.retention != 0: + day = int(self.retention / 60 / 60 / 24) + logging.warning(f" ATTENTION, InfluxDB est configuré avec une durée de rétention de {day} jours.") + logging.warning( + f" Toutes les données supérieures à {day} jours ne seront jamais insérées dans celui-ci." + ) + else: + logging.warning(" => Aucune durée de rétention de données détectée.") + + def load_config(self): + """Load the configuration for InfluxDB. + + This method loads the configuration values from the usage point and contract objects. + """ + self.influxdb_config = json.loads(DatabaseConfig().get("influxdb").value) + for key in self.config.__dict__: + if key in self.influxdb_config: + setattr(self.config, key, self.influxdb_config.get(key)) + + if "batching" in self.influxdb_config: + self.batching_options = self.influxdb_config.get("batching") + for key in self.config_batching.__dict__: + if key in self.batching_options: + setattr(self.config_batching, key, self.batching_options.get(key)) def connect(self): + """Connect to InfluxDB. + + This method establishes a connection to the InfluxDB database using the provided configuration. + """ separator() - logging.info(f"Connect to InfluxDB {self.hostname}:{self.port}") + logging.info(f"Connect to InfluxDB {self.config.hostname}:{self.config.port}") date_utils.date_helper = DateHelper(timezone=tzlocal()) self.influxdb = influxdb_client.InfluxDBClient( - url=f"{self.scheme}://{self.hostname}:{self.port}", - token=self.token, - org=self.org, + url=f"{self.config.scheme}://{self.config.hostname}:{self.config.port}", + token=self.config.token, + org=self.config.org, timeout="600000", ) health = self.influxdb.health() @@ -98,33 +133,32 @@ def connect(self): else: logging.critical( """ - Impossible de se connecter à la base influxdb. Vous pouvez récupérer un exemple ici : https://github.com/m4dm4rtig4n/enedisgateway2mqtt#configuration-file """ ) - exit(1) + sys.exit(1) - title(f"Méthode d'importation : {self.method.upper()}") - if self.method.upper() == "ASYNCHRONOUS": + title(f"Méthode d'importation : {self.config.method.upper()}") + if self.config.method.upper() == "ASYNCHRONOUS": logging.warning( ' ATTENTION, le mode d\'importation "ASYNCHRONOUS" est très consommateur de ressources système.' ) self.write_api = self.influxdb.write_api(write_options=ASYNCHRONOUS) - elif self.method.upper() == "SYNCHRONOUS": + elif self.config.method.upper() == "SYNCHRONOUS": self.write_api = self.influxdb.write_api(write_options=SYNCHRONOUS) else: self.write_api = self.influxdb.write_api( write_options=influxdb_client.WriteOptions( - batch_size=self.write_options["batch_size"], - flush_interval=self.write_options["flush_interval"], - jitter_interval=self.write_options["jitter_interval"], - retry_interval=self.write_options["retry_interval"], - max_retries=self.write_options["max_retries"], - max_retry_delay=self.write_options["max_retry_delay"], - exponential_base=self.write_options["exponential_base"], + batch_size=self.config_batching.batch_size, + flush_interval=self.config_batching.flush_interval, + jitter_interval=self.config_batching.jitter_interval, + retry_interval=self.config_batching.retry_interval, + max_retries=self.config_batching.max_retries, + max_retry_delay=self.config_batching.max_retry_delay, + exponential_base=self.config_batching.exponential_base, ) ) self.query_api = self.influxdb.query_api() @@ -133,8 +167,12 @@ def connect(self): self.get_list_retention_policies() def purge_influxdb(self): + """Purge the InfluxDB database. + + This method wipes the InfluxDB database by deleting all data within specified measurement types. + """ separator_warning() - logging.warning(f"Wipe influxdb database {self.hostname}:{self.port}") + logging.warning(f"Wipe influxdb database {self.config.hostname}:{self.config.port}") start = "1970-01-01T00:00:00Z" stop = datetime.datetime.utcnow() measurement = [ @@ -144,26 +182,41 @@ def purge_influxdb(self): "production_detail", ] for mesure in measurement: - self.delete_api.delete(start, stop, f'_measurement="{mesure}"', self.bucket, org=self.org) - # CONFIG.set("wipe_influxdb", False) - logging.warning(f" => Data reset") + self.delete_api.delete(start, stop, f'_measurement="{mesure}"', self.config.bucket, org=self.config.org) + logging.warning(" => Data reset") def get_list_retention_policies(self): - if self.org == f"-": # InfluxDB 1.8 + """Get the list of retention policies. + + This method retrieves the list of retention policies for the InfluxDB database. + """ + if self.config.org == "-": # InfluxDB 1.8 self.retention = 0 self.max_retention = 0 return else: buckets = self.buckets_api.find_buckets().buckets for bucket in buckets: - if bucket.name == self.bucket: + if bucket.name == self.config.bucket: self.retention = bucket.retention_rules[0].every_seconds self.max_retention = datetime.datetime.now() - datetime.timedelta(seconds=self.retention) def get(self, start, end, measurement): - if self.org != f"-": + """Retrieve data from the InfluxDB database. + + This method retrieves data from the specified measurement within the given time range. + + Args: + start (str): Start time of the data range. + end (str): End time of the data range. + measurement (str): Name of the measurement to retrieve data from. + + Returns: + list: List of data points retrieved from the database. + """ + if self.config.org != "-": query = f""" -from(bucket: "{self.bucket}") +from(bucket: "{self.config.bucket}") |> range(start: {start}, stop: {end}) |> filter(fn: (r) => r["_measurement"] == "{measurement}") """ @@ -175,9 +228,19 @@ def get(self, start, end, measurement): return output def count(self, start, end, measurement): - if self.org != f"-": + """Count the number of data points within a specified time range and measurement. + + Args: + start (str): Start time of the data range. + end (str): End time of the data range. + measurement (str): Name of the measurement to count data points from. + + Returns: + list: List of count values. + """ + if self.config.org != "-": query = f""" -from(bucket: "{self.bucket}") +from(bucket: "{self.config.bucket}") |> range(start: {start}, stop: {end}) |> filter(fn: (r) => r["_measurement"] == "{measurement}") |> filter(fn: (r) => r["_field"] == "Wh") @@ -192,9 +255,27 @@ def count(self, start, end, measurement): return output def delete(self, date, measurement): - self.delete_api.delete(date, date, f'_measurement="{measurement}"', self.bucket, org=self.org) + """Delete data from the InfluxDB database. + + This method deletes data from the specified measurement for a given date. + + Args: + date (str): Date of the data to be deleted. + measurement (str): Name of the measurement to delete data from. + """ + self.delete_api.delete(date, date, f'_measurement="{measurement}"', self.config.bucket, org=self.config.org) def write(self, tags, date=None, fields=None, measurement="log"): + """Write data to the InfluxDB database. + + This method writes data to the specified measurement in the InfluxDB database. + + Args: + tags (dict): Dictionary of tags associated with the data. + date (datetime.datetime, optional): Date and time of the data. Defaults to None. + fields (dict, optional): Dictionary of fields and their values. Defaults to None. + measurement (str, optional): Name of the measurement. Defaults to "log". + """ date_max = self.max_retention if date is None: date_object = datetime.datetime.now() @@ -213,4 +294,4 @@ def write(self, tags, date=None, fields=None, measurement="log"): if fields is not None: for key, value in fields.items(): record["fields"][key] = value - self.write_api.write(bucket=self.bucket, org=self.org, record=record) + self.write_api.write(bucket=self.config.bucket, org=self.config.org, record=record) diff --git a/src/models/jobs.py b/src/models/jobs.py index 3e0eaa8c..7a93772d 100644 --- a/src/models/jobs.py +++ b/src/models/jobs.py @@ -1,10 +1,14 @@ +"""This module contains the Job class, which is responsible for importing data from the API.""" + import logging import time import traceback -from os import environ, getenv +from os import getenv +from database import DB +from database.usage_points import DatabaseUsagePoints from dependencies import export_finish, finish, get_version, log_usage_point_id, str2bool, title -from init import CONFIG, DB +from init import CONFIG from models.export_home_assistant import HomeAssistant from models.export_home_assistant_ws import HomeAssistantWs from models.export_influxdb import ExportInfluxDB @@ -21,9 +25,10 @@ class Job: + """Represents a job for importing data.""" + def __init__(self, usage_point_id=None): self.config = CONFIG - self.db = DB self.usage_point_id = usage_point_id self.usage_point_config = {} self.mqtt_config = self.config.mqtt_config() @@ -32,23 +37,24 @@ def __init__(self, usage_point_id=None): self.influxdb_config = self.config.influxdb_config() self.wait_job_start = 10 self.tempo_enable = False - if self.usage_point_id is None: - self.usage_points = self.db.get_usage_point_all() + self.usage_points_all = DatabaseUsagePoints().get_all() else: - self.usage_points = [self.db.get_usage_point(self.usage_point_id)] + self.usage_points_all = [DatabaseUsagePoints(self.usage_point_id).get()] def boot(self): + """Boots the import job.""" if str2bool(getenv("DEV")) or str2bool(getenv("DEBUG")): logging.warning("=> Import job disable") else: self.job_import_data() - def job_import_data(self, wait=True, target=None): - if self.db.lock_status(): + def job_import_data(self, wait=True, target=None): # noqa: PLR0912, PLR0915, C901 + """Import data from the API.""" + if DB.lock_status(): return {"status": False, "notif": "Importation déjà en cours..."} else: - self.db.lock() + DB.lock() if wait: title("Démarrage du job d'importation dans 10s") @@ -71,11 +77,12 @@ def job_import_data(self, wait=True, target=None): if target == "ecowatt" or target is None: self.get_ecowatt() - for self.usage_point_config in self.usage_points: - self.usage_point_id = self.usage_point_config.usage_point_id - log_usage_point_id(self.usage_point_id) - self.db.last_call_update(self.usage_point_id) - if self.usage_point_config.enable: + for usage_point_config in self.usage_points_all: + self.usage_point_config = usage_point_config + usage_point_id = usage_point_config.usage_point_id + log_usage_point_id(usage_point_id) + DatabaseUsagePoints(usage_point_id).last_call_update() + if usage_point_config.enable: ####################################################################################################### # CHECK ACCOUNT DATA if target == "account_status" or target is None: @@ -134,16 +141,24 @@ def job_import_data(self, wait=True, target=None): self.export_influxdb() else: logging.info( - f" => Point de livraison Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)." + " => Point de livraison Désactivé dans la configuration (Exemple: https://tinyurl.com/2kbd62s9)." ) finish() self.usage_point_id = None - self.db.unlock() + DB.unlock() return {"status": True, "notif": "Importation terminée"} def header_generate(self, token=True): + """Generate the header for the API request. + + Args: + token (bool, optional): Whether to include the authorization token in the header. Defaults to True. + + Returns: + dict: The generated header as a dictionary. + """ output = { "Content-Type": "application/json", "call-service": "myelectricaldata", @@ -154,6 +169,14 @@ def header_generate(self, token=True): return output def get_gateway_status(self): + """Retrieve the status of the gateway. + + This method retrieves the status of the gateway by pinging it. If an error occurs during the process, + it logs the error message. + + Returns: + None + """ detail = "Récupération du statut de la passerelle :" try: title(detail) @@ -164,32 +187,57 @@ def get_gateway_status(self): logging.error(e) def get_account_status(self): + """Retrieve the account status information. + + This method retrieves the account status information for the usage point(s). + It sets the error log if there is an error in the status response. + + Returns: + None + """ detail = "Récupération des informations du compte" - def run(usage_point_config): - usage_point_id = usage_point_config.usage_point_id + def run(): + usage_point_id = self.usage_point_config.usage_point_id title(f"[{usage_point_id}] {detail} :") status = Status(headers=self.header_generate()).status(usage_point_id=usage_point_id) - if "error" in status and status["error"]: + if status.get("error"): message = f'{status["status_code"]} - {status["description"]["detail"]}' - self.db.set_error_log(usage_point_id, message) + DatabaseUsagePoints(usage_point_id).set_error_log(message) else: - self.db.set_error_log(usage_point_id, None) + DatabaseUsagePoints(usage_point_id).set_error_log(None) export_finish() try: - if self.usage_point_id is None: - for usage_point_config in self.usage_points: - if usage_point_config.enable: - run(usage_point_config) + if self.usage_point_config is None: + for usage_point_config in self.usage_points_all: + self.usage_point_config = usage_point_config + if self.usage_point_config.enable: + run() else: - run(self.usage_point_config) + run() except Exception as e: traceback.print_exc() logging.error(f"Erreur lors de la {detail.lower()}") logging.error(e) def get_contract(self): + """Retrieve contract information for the usage points. + + This method iterates over the list of usage points and retrieves the contract information + for each enabled usage point. If a specific usage point ID is provided, it retrieves the + contract information only for that usage point. + + Args: + self: The current instance of the Jobs class. + + Returns: + None + + Raises: + Exception: If an error occurs during the retrieval of contract information. + + """ detail = "Récupération des informations contractuelles" def run(usage_point_config): @@ -204,7 +252,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -215,6 +263,18 @@ def run(usage_point_config): logging.error(e) def get_addresses(self): + """Retrieve the postal addresses for the usage points. + + This method iterates over the list of usage points and retrieves the postal addresses + for each enabled usage point. It calls the `Address.get()` method to fetch the addresses + and then calls the `export_finish()` function to indicate the completion of the export. + + If a specific usage point ID is provided, only that usage point will be processed. + + Raises: + Exception: If an error occurs during the retrieval of postal addresses. + + """ detail = "Récupération des coordonnées postales" def run(usage_point_config): @@ -225,7 +285,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -249,7 +309,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -273,7 +333,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -301,7 +361,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -329,7 +389,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -350,7 +410,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -362,11 +422,11 @@ def run(usage_point_config): def get_tempo(self): try: - title(f"Récupération des données Tempo :") + title("Récupération des données Tempo :") Tempo().fetch() - title(f"Calcul des jours Tempo :") + title("Calcul des jours Tempo :") Tempo().calc_day() - title(f"Récupération des tarifs Tempo :") + title("Récupération des tarifs Tempo :") Tempo().fetch_price() export_finish() except Exception as e: @@ -376,7 +436,7 @@ def get_tempo(self): def get_ecowatt(self): try: - title(f"Récupération des données EcoWatt :") + title("Récupération des données EcoWatt :") Ecowatt().fetch() export_finish() except Exception as e: @@ -400,7 +460,7 @@ def run(usage_point_config): try: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -411,10 +471,10 @@ def run(usage_point_config): logging.error(e) def export_home_assistant(self, target=None): + """Export data to Home Assistant.""" detail = "Exportation des données vers Home Assistant (via MQTT)" - def run(usage_point_config, target): - usage_point_id = usage_point_config.usage_point_id + def run(usage_point_id, target): title(f"[{usage_point_id}] {detail}") if target is None: HomeAssistant(usage_point_id).export() @@ -426,11 +486,11 @@ def run(usage_point_config, target): if "enable" in self.home_assistant_config and str2bool(self.home_assistant_config["enable"]): if "enable" in self.mqtt_config and str2bool(self.mqtt_config["enable"]): if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_id, usage_point_config in self.usage_points_all.items(): if usage_point_config.enable: - run(usage_point_config, target) + run(usage_point_id, target) else: - run(self.usage_point_config, target) + run(self.usage_point_id, target) else: logging.critical( "L'export Home Assistant est dépendant de MQTT, " @@ -480,7 +540,7 @@ def run(usage_point_config): try: if "enable" in self.influxdb_config and self.influxdb_config["enable"]: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: @@ -552,7 +612,7 @@ def run(usage_point_config): try: if "enable" in self.mqtt_config and self.mqtt_config["enable"]: if self.usage_point_id is None: - for usage_point_config in self.usage_points: + for usage_point_config in self.usage_points_all: if usage_point_config.enable: run(usage_point_config) else: diff --git a/src/models/query.py b/src/models/query.py index 478472a1..1273a9e4 100755 --- a/src/models/query.py +++ b/src/models/query.py @@ -3,14 +3,14 @@ import requests from dependencies import str2bool -from init import CONFIG +from database.config import DatabaseConfig class Query(object): def __init__(self, endpoint, headers=None): self.endpoint = endpoint self.timeout = 60 - check_ssl = CONFIG.get("ssl") + check_ssl = DatabaseConfig().get("ssl") if check_ssl and "gateway" in check_ssl: self.ssl_valid = str2bool(check_ssl["gateway"]) else: diff --git a/src/models/query_address.py b/src/models/query_address.py index 74461893..90673cf8 100755 --- a/src/models/query_address.py +++ b/src/models/query_address.py @@ -1,24 +1,28 @@ +"""Fetch address data from the API and store it in the database.""" + import json import logging import traceback -from config import URL -from dependencies import title -from init import CONFIG, DB +from config import CODE_200_SUCCESS, URL +from database.addresses import DatabaseAddresses +from database.usage_points import DatabaseUsagePoints +from models.config import Config from models.query import Query class Address: + """Fetch address data from the API and store it in the database.""" + def __init__(self, headers, usage_point_id): - self.config = CONFIG - self.db = DB self.url = URL self.headers = headers self.usage_point_id = usage_point_id - self.usage_point_config = self.config.usage_point_id_config(self.usage_point_id) + self.usage_point_config = Config().usage_point_id_config(self.usage_point_id) def run(self): + """Run the address query process.""" name = "addresses" endpoint = f"{name}/{self.usage_point_id}" if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: @@ -26,7 +30,7 @@ def run(self): target = f"{self.url}/{endpoint}" response = Query(endpoint=target, headers=self.headers).get() - if response.status_code == 200: + if response.status_code == CODE_200_SUCCESS: try: response_json = json.loads(response.text) response = response_json["customer"]["usage_points"][0] @@ -34,8 +38,7 @@ def run(self): usage_point_addresses = usage_point["usage_point_addresses"] response = usage_point_addresses response.update(usage_point) - self.db.set_addresse( - self.usage_point_id, + DatabaseAddresses(self.usage_point_id).set( { "usage_points": str(usage_point["usage_point_id"]) if usage_point["usage_point_id"] is not None @@ -61,7 +64,7 @@ def run(self): "geo_points": str(usage_point_addresses["geo_points"]) if usage_point_addresses["geo_points"] is not None else "", - }, + } ) except Exception as e: logging.error(e) @@ -75,25 +78,24 @@ def run(self): return {"error": True, "description": json.loads(response.text)["detail"]} def get(self): - current_cache = self.db.get_addresse(usage_point_id=self.usage_point_id) + """Retrieve address data from the database and format it as a dictionary.""" + current_cache = DatabaseAddresses(self.usage_point_id).get() if not current_cache: # No cache logging.info(" => Pas de cache") result = self.run() + elif hasattr(self.usage_point_config, "refresh_addresse") and self.usage_point_config.refresh_addresse: + logging.info(" => Mise à jour du cache") + result = self.run() + self.usage_point_config.refresh_addresse = False + DatabaseUsagePoints(self.usage_point_id).set(self.usage_point_config.__dict__) else: - # Refresh cache - if hasattr(self.usage_point_config, "refresh_addresse") and self.usage_point_config.refresh_addresse: - logging.info(" => Mise à jour du cache") - result = self.run() - self.usage_point_config.refresh_addresse = False - DB.set_usage_point(self.usage_point_id, self.usage_point_config.__dict__) - else: - # Get data in cache - logging.info(" => Récupération du cache") - result = {} - for column in current_cache.__table__.columns: - result[column.name] = str(getattr(current_cache, column.name)) - logging.debug(f" => {result}") + # Get data in cache + logging.info(" => Récupération du cache") + result = {} + for column in current_cache.__table__.columns: + result[column.name] = str(getattr(current_cache, column.name)) + logging.debug(f" => {result}") if "error" not in result: for key, value in result.items(): if key != "usage_point_addresses": diff --git a/src/models/query_contract.py b/src/models/query_contract.py index 83ecf367..6bac0ade 100755 --- a/src/models/query_contract.py +++ b/src/models/query_contract.py @@ -1,18 +1,21 @@ +"""Query contract from gateway.""" + import datetime import json import logging import re import traceback -from config import URL -from dependencies import title -from init import DB +from config import CODE_200_SUCCESS, URL +from database.contracts import DatabaseContracts +from database.usage_points import DatabaseUsagePoints from models.query import Query class Contract: + """Query contract from gateway.""" + def __init__(self, headers, usage_point_id, config): - self.db = DB self.url = URL self.headers = headers @@ -20,6 +23,7 @@ def __init__(self, headers, usage_point_id, config): self.usage_point_config = config def run(self): + """Run the contract query process.""" name = "contracts" endpoint = f"{name}/{self.usage_point_id}" if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: @@ -27,7 +31,7 @@ def run(self): target = f"{self.url}/{endpoint}" query_response = Query(endpoint=target, headers=self.headers).get() - if query_response.status_code == 200: + if query_response.status_code == CODE_200_SUCCESS: try: response_json = json.loads(query_response.text) response = response_json["customer"]["usage_points"][0] @@ -37,7 +41,7 @@ def run(self): response.update(usage_point) if contracts["offpeak_hours"] is not None: - offpeak_hours = re.search("HC \((.*)\)", contracts["offpeak_hours"]).group(1) + offpeak_hours = re.search(r"HC \((.*)\)", contracts["offpeak_hours"]).group(1) else: offpeak_hours = "" if "last_activation_date" in contracts and contracts["last_activation_date"] is not None: @@ -58,8 +62,7 @@ def run(self): ).replace(tzinfo=None) else: last_distribution_tariff_change_date = contracts["last_distribution_tariff_change_date"] - self.db.set_contract( - self.usage_point_id, + DatabaseContracts(self.usage_point_id).set( { "usage_point_status": usage_point["usage_point_status"], "meter_type": usage_point["meter_type"], @@ -76,7 +79,7 @@ def run(self): "offpeak_hours_6": offpeak_hours, "contract_status": contracts["contract_status"], "last_distribution_tariff_change_date": last_distribution_tariff_change_date, - }, + } ) except Exception as e: logging.error(e) @@ -93,25 +96,23 @@ def run(self): } def get(self): - current_cache = self.db.get_contract(usage_point_id=self.usage_point_id) + current_cache = DatabaseContracts(self.usage_point_id).get() if not current_cache: # No cache logging.info(" => Pas de cache") result = self.run() + elif hasattr(self.usage_point_config, "refresh_contract") and self.usage_point_config.refresh_contract: + logging.info(" => Mise à jour du cache") + result = self.run() + self.usage_point_config.refresh_contract = False + DatabaseUsagePoints(self.usage_point_id).set(self.usage_point_config.__dict__) else: - # Refresh cache - if hasattr(self.usage_point_config, "refresh_contract") and self.usage_point_config.refresh_contract: - logging.info(" => Mise à jour du cache") - result = self.run() - self.usage_point_config.refresh_contract = False - DB.set_usage_point(self.usage_point_id, self.usage_point_config.__dict__) - else: - # Get data in cache - logging.info(" => Récupération du cache") - result = {} - for column in current_cache.__table__.columns: - result[column.name] = str(getattr(current_cache, column.name)) - logging.debug(f" => {result}") + # Get data in cache + logging.info(" => Récupération du cache") + result = {} + for column in current_cache.__table__.columns: + result[column.name] = str(getattr(current_cache, column.name)) + logging.debug(f" => {result}") if "error" not in result: for key, value in result.items(): logging.info(f"{key}: {value}") diff --git a/src/models/query_daily.py b/src/models/query_daily.py index fcae3454..b1058a2a 100644 --- a/src/models/query_daily.py +++ b/src/models/query_daily.py @@ -1,23 +1,33 @@ +"""The 'Daily' class represents a daily data retrieval and manipulation process for a specific usage point.""" + import json import logging from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta -from config import DAILY_MAX_DAYS, URL -from init import CONFIG, DB +from config import ( + CODE_200_SUCCESS, + CODE_400_BAD_REQUEST, + CODE_403_FORBIDDEN, + CODE_409_CONFLICT, + CODE_500_INTERNAL_SERVER_ERROR, + DAILY_MAX_DAYS, + TIMEZONE_UTC, + URL, +) +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.usage_points import DatabaseUsagePoints +from dependencies import daterange from models.query import Query from models.stat import Stat -def daterange(start_date, end_date): - for n in range(int((end_date - start_date).days)): - yield start_date + timedelta(n) - - class Daily: - """ - The 'Daily' class represents a daily data retrieval and manipulation process for a specific usage point. It provides methods for fetching, resetting, deleting, and blacklisting daily data. + """The 'Daily' class represents a daily data retrieval and manipulation process for a specific usage point. + + It provides methods for fetching, resetting, deleting, and blacklisting daily data. Attributes: config (dict): The configuration settings. @@ -68,18 +78,16 @@ class Daily: """ def __init__(self, headers, usage_point_id, measure_type="consumption"): - self.config = CONFIG - self.db = DB self.url = URL self.max_daily = 1095 self.date_format = "%Y-%m-%d" self.date_detail_format = "%Y-%m-%d %H:%M:%S" self.headers = headers self.usage_point_id = usage_point_id - self.usage_point_config = self.db.get_usage_point(self.usage_point_id) - self.contract = self.db.get_contract(self.usage_point_id) + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + self.contract = DatabaseContracts(self.usage_point_id).get() self.daily_max_days = int(DAILY_MAX_DAYS) - self.max_days_date = datetime.utcnow() - timedelta(days=self.daily_max_days) + self.max_days_date = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=self.daily_max_days) if ( measure_type == "consumption" and hasattr(self.usage_point_config, "consumption_max_date") @@ -107,20 +115,19 @@ def __init__(self, headers, usage_point_id, measure_type="consumption"): if measure_type == "consumption": if hasattr(self.usage_point_config, "consumption_price_base"): self.base_price = self.usage_point_config.consumption_price_base - else: - if hasattr(self.usage_point_config, "production_price"): - self.base_price = self.usage_point_config.production_price + elif hasattr(self.usage_point_config, "production_price"): + self.base_price = self.usage_point_config.production_price def run(self, begin, end): + """Retrieves and stores daily data for a specified date range.""" begin_str = begin.strftime(self.date_format) end_str = end.strftime(self.date_format) logging.info(f"Récupération des données : {begin_str} => {end_str}") endpoint = f"daily_{self.measure_type}/{self.usage_point_id}/start/{begin_str}/end/{end_str}" - # if begin < now() - timedelta(days=7): if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: endpoint += "/cache" try: - current_data = self.db.get_daily(self.usage_point_id, begin, end, self.measure_type) + current_data = DatabaseDaily(self.usage_point_id, self.measure_type).get(begin, end) if not current_data["missing_data"]: logging.info(" => Toutes les données sont déjà en cache.") output = [] @@ -130,7 +137,7 @@ def run(self, begin, end): else: logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() - if data.status_code == 403: + if data.status_code == CODE_403_FORBIDDEN: if hasattr(data, "text"): description = json.loads(data.text)["detail"] else: @@ -138,7 +145,7 @@ def run(self, begin, end): if hasattr(data, "status_code"): status_code = data.status_code else: - status_code = 500 + status_code = CODE_500_INTERNAL_SERVER_ERROR return { "error": True, "description": description, @@ -147,31 +154,31 @@ def run(self, begin, end): } else: blacklist = 0 - max_histo = datetime.combine(datetime.now(), datetime.max.time()) - timedelta(days=1) + max_histo = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.max.time()) - timedelta( + days=1 + ) if hasattr(data, "status_code"): - if data.status_code == 200: + if data.status_code == CODE_200_SUCCESS: meter_reading = json.loads(data.text)["meter_reading"] interval_reading = meter_reading["interval_reading"] interval_reading_tmp = {} for interval_reading_data in interval_reading: interval_reading_tmp[interval_reading_data["date"]] = interval_reading_data["value"] for single_date in daterange(begin, end): - if single_date < max_histo: - if single_date.strftime(self.date_format) in interval_reading_tmp: + single_date_tz = single_date.replace(tzinfo=TIMEZONE_UTC) + max_histo = max_histo.replace(tzinfo=TIMEZONE_UTC) + if single_date_tz < max_histo: + if single_date_tz.strftime(self.date_format) in interval_reading_tmp: # FOUND - self.db.insert_daily( - usage_point_id=self.usage_point_id, - date=datetime.combine(single_date, datetime.min.time()), - value=interval_reading_tmp[single_date.strftime(self.date_format)], + DatabaseDaily(self.usage_point_id, self.measure_type).insert( + date=datetime.combine(single_date_tz, datetime.min.time()), + value=interval_reading_tmp[single_date_tz.strftime(self.date_format)], blacklist=blacklist, - measurement_direction=self.measure_type, ) else: # NOT FOUND - self.db.daily_fail_increment( - usage_point_id=self.usage_point_id, - date=datetime.combine(single_date, datetime.min.time()), - measurement_direction=self.measure_type, + DatabaseDaily(self.usage_point_id, self.measure_type).fail_increment( + date=datetime.combine(single_date_tz, datetime.min.time()), ) return interval_reading else: @@ -188,7 +195,7 @@ def run(self, begin, end): if hasattr(data, "status_code"): status_code = data.status_code else: - status_code = 500 + status_code = CODE_500_INTERNAL_SERVER_ERROR return { "error": True, "description": description, @@ -222,10 +229,16 @@ def get(self): Note: The end date is exclusive, meaning it is not included in the range. """ - end = datetime.combine((datetime.now() + timedelta(days=2)), datetime.max.time()) - begin = datetime.combine(end - relativedelta(days=self.max_daily), datetime.min.time()) + end = datetime.combine((datetime.now(tz=TIMEZONE_UTC) + timedelta(days=2)), datetime.max.time()).astimezone( + TIMEZONE_UTC + ) + begin = datetime.combine(end - relativedelta(days=self.max_daily), datetime.min.time()).astimezone( + TIMEZONE_UTC + ) finish = True result = [] + print(self.activation_date, begin) + self.activation_date = self.activation_date.astimezone(TIMEZONE_UTC) while finish: if self.max_days_date > begin: # Max day reached @@ -255,40 +268,48 @@ def get(self): "error": True, "description": "MyElectricalData est indisponible.", } - if "error" in response and response["error"]: + if "error" in response and response.get("error"): logging.error("Echec de la récupération des données") logging.error(f'=> {response["description"]}') logging.error(f"=> {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") - if "status_code" in response and (response["status_code"] == 409 or response["status_code"] == 400): + if "status_code" in response and ( + response["status_code"] == CODE_409_CONFLICT or response["status_code"] == CODE_400_BAD_REQUEST + ): finish = False logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error(f"Prochain lancement à {datetime.now() + timedelta(seconds=self.config.get('cycle'))}") + logging.error( + "Prochain lancement à " + f"{datetime.now(tz=TIMEZONE_UTC) + timedelta(seconds=self.config.get('cycle'))}" + ) return result def reset(self, date=None): + """Resets the daily data for the usage point, optionally for a specific date.""" if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.reset_daily(self.usage_point_id, date, self.measure_type) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseDaily(self.usage_point_id, self.measure_type).reset(date) return True def delete(self, date=None): + """Deletes the daily data for the usage point, optionally for a specific date.""" if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.delete_daily(self.usage_point_id, date, self.measure_type) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseDaily(self.usage_point_id, self.measure_type).delete(date) return True def fetch(self, date): + """Fetches and returns the daily data for a specific date.""" if date is not None: - date = datetime.strptime(date, self.date_format) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) result = self.run( datetime.combine(date - timedelta(days=2), datetime.min.time()), datetime.combine(date + timedelta(days=2), datetime.min.time()), ) - if "error" in result and result["error"]: + if result.get("error"): return { "error": True, "notif": result["description"], - "fail_count": self.db.get_daily_fail_count(self.usage_point_id, date, self.measure_type), + "fail_count": DatabaseDaily(self.usage_point_id, date, self.measure_type).get_fail_count(), } for item in result: if date.strftime(self.date_format) in item["date"]: @@ -299,11 +320,12 @@ def fetch(self, date): return { "error": True, "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": self.db.get_daily_fail_count(self.usage_point_id, date, self.measure_type), + "fail_count": DatabaseDaily(self.usage_point_id, date, self.measure_type).get_fail_count(), } def blacklist(self, date, action): + """Adds or removes a date from the blacklist for the usage point.""" if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.blacklist_daily(self.usage_point_id, date, action, self.measure_type) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseDaily(self.usage_point_id, date, self.measure_type).blacklist(date, action) return True diff --git a/src/models/query_detail.py b/src/models/query_detail.py index 8a0adfbc..5fc91902 100644 --- a/src/models/query_detail.py +++ b/src/models/query_detail.py @@ -3,31 +3,37 @@ import re from datetime import datetime, timedelta -from config import DETAIL_MAX_DAYS, URL -from init import CONFIG, DB -from models.database import ConsumptionDetail, ProductionDetail +from config import ( + CODE_200_SUCCESS, + CODE_400_BAD_REQUEST, + CODE_403_FORBIDDEN, + CODE_409_CONFLICT, + CODE_500_INTERNAL_SERVER_ERROR, + DETAIL_MAX_DAYS, + TIMEZONE_UTC, + URL, +) +from database.contracts import DatabaseContracts +from database.detail import DatabaseDetail +from database.usage_points import DatabaseUsagePoints +from db_schema import ConsumptionDetail, ProductionDetail from models.query import Query -def daterange(start_date, end_date): - for n in range(int((end_date - start_date).days)): - yield start_date + timedelta(n) - - class Detail: + """Manage detail data.""" + def __init__(self, headers, usage_point_id, measure_type="consumption"): - self.config = CONFIG - self.db = DB self.url = URL self.max_detail = 7 self.date_format = "%Y-%m-%d" self.date_detail_format = "%Y-%m-%d %H:%M:%S" self.headers = headers self.usage_point_id = usage_point_id - self.usage_point_config = self.db.get_usage_point(self.usage_point_id) - self.contract = self.db.get_contract(self.usage_point_id) + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + self.contract = DatabaseContracts(self.usage_point_id).get() self.daily_max_days = int(DETAIL_MAX_DAYS) - self.max_days_date = datetime.utcnow() - timedelta(days=self.daily_max_days) + self.max_days_date = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=self.daily_max_days) if ( measure_type == "consumption" and hasattr(self.usage_point_config, "consumption_detail_max_date") @@ -59,6 +65,7 @@ def __init__(self, headers, usage_point_id, measure_type="consumption"): 5: self.usage_point_config.offpeak_hours_5, 6: self.usage_point_config.offpeak_hours_6, } + self.activation_date = self.activation_date.replace(tzinfo=TIMEZONE_UTC) self.measure_type = measure_type self.base_price = 0 if measure_type == "consumption": @@ -71,22 +78,17 @@ def __init__(self, headers, usage_point_id, measure_type="consumption"): self.base_price = self.usage_point_config.production_price def run(self, begin, end): + """Run the detail query.""" if begin.strftime(self.date_format) == end.strftime(self.date_format): end = end + timedelta(days=1) begin_str = begin.strftime(self.date_format) end_str = end.strftime(self.date_format) logging.info(f"Récupération des données : {begin_str} => {end_str}") endpoint = f"{self.measure_type}_load_curve/{self.usage_point_id}/start/{begin_str}/end/{end_str}" - # if begin <= (datetime.now() - timedelta(days=8)): if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: endpoint += "/cache" try: - current_data = self.db.get_detail(self.usage_point_id, begin, end, self.measure_type) - # current_week = datetime.now() - timedelta(days=self.max_detail + 1) - # last_week = False - # if current_week <= begin: - # last_week = True - # if not current_data["missing_data"] and not last_week: + current_data = DatabaseDetail(self.usage_point_id, self.measure_type).get(begin, end) if not current_data["missing_data"]: logging.info(" => Toutes les données sont déjà en cache.") output = [] @@ -97,7 +99,7 @@ def run(self, begin, end): logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() if hasattr(data, "status_code"): - if data.status_code == 403: + if data.status_code == CODE_403_FORBIDDEN: if hasattr(data, "text"): description = json.loads(data.text)["detail"] else: @@ -105,51 +107,27 @@ def run(self, begin, end): if hasattr(data, "status_code"): status_code = data.status_code else: - status_code = 500 + status_code = CODE_500_INTERNAL_SERVER_ERROR return { "error": True, "description": description, "status_code": status_code, "exit": True, } - if data.status_code == 200: + if data.status_code == CODE_200_SUCCESS: meter_reading = json.loads(data.text)["meter_reading"] for interval_reading in meter_reading["interval_reading"]: value = interval_reading["value"] interval = re.findall(r"\d+", interval_reading["interval_length"])[0] date = interval_reading["date"] - date_object = datetime.strptime(date, self.date_detail_format) + date_object = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE_UTC) # CHANGE DATE TO BEGIN RANGE date = date_object - timedelta(minutes=int(interval)) - # date = date.strftime(self.date_detail_format) - # print(date) - # GET WEEKDAY - # date_days = date_object.weekday() - # date_hour_minute = date_object.strftime('%H:%M') - # measure_type = "HP" - # day_offpeak_hours = self.offpeak_hours[date_days] - # if day_offpeak_hours is not None: - # for offpeak_hour in day_offpeak_hours.split(";"): - # if offpeak_hour != "None" and offpeak_hour != "" and offpeak_hour is not None: - # offpeak_begin = offpeak_hour.split("-")[0].replace('h', ':').replace('H', ':') - # # FORMAT HOUR WITH 2 DIGIT - # offpeak_begin = datetime.strptime(offpeak_begin, '%H:%M') - # offpeak_begin = datetime.strftime(offpeak_begin, '%H:%M') - # offpeak_stop = offpeak_hour.split("-")[1].replace('h', ':').replace('H', ':') - # # FORMAT HOUR WITH 2 DIGIT - # offpeak_stop = datetime.strptime(offpeak_stop, '%H:%M') - # offpeak_stop = datetime.strftime(offpeak_stop, '%H:%M') - # result = is_between(date_hour_minute, (offpeak_begin, offpeak_stop)) - # if result: - # measure_type = "HC" - self.db.insert_detail( - usage_point_id=self.usage_point_id, + DatabaseDetail(self.usage_point_id, self.measure_type).insert( date=date, value=value, interval=interval, - measure_type="", blacklist=0, - mesure_type=self.measure_type, ) return meter_reading["interval_reading"] else: @@ -166,7 +144,7 @@ def run(self, begin, end): if hasattr(data, "status_code"): status_code = data.status_code else: - status_code = 500 + status_code = CODE_500_INTERNAL_SERVER_ERROR return { "error": True, "description": description, @@ -177,8 +155,13 @@ def run(self, begin, end): logging.error(e) def get(self): - end = datetime.combine((datetime.now() + timedelta(days=2)), datetime.max.time()) - begin = datetime.combine(end - timedelta(days=self.max_detail), datetime.min.time()) + """Get the detail data.""" + end = datetime.combine((datetime.now(tz=TIMEZONE_UTC) + timedelta(days=2)), datetime.max.time()).replace( + tzinfo=TIMEZONE_UTC + ) + begin = datetime.combine(end - timedelta(days=self.max_detail), datetime.min.time()).replace( + tzinfo=TIMEZONE_UTC + ) finish = True result = [] while finish: @@ -210,57 +193,75 @@ def get(self): "error": True, "description": "MyElectricalData est indisponible.", } - if "error" in response and response["error"]: + if "error" in response and response.get("error"): logging.error("Echec de la récupération des données.") - logging.error(f' => {response["description"]}') - logging.error(f" => {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") - if "status_code" in response and (response["status_code"] == 409 or response["status_code"] == 400): + logging.error(" => %s", response["description"]) + logging.error(" => %s -> %s", begin.strftime(self.date_format), end.strftime(self.date_format)) + if "status_code" in response and ( + response["status_code"] == CODE_409_CONFLICT or response["status_code"] == CODE_400_BAD_REQUEST + ): finish = False logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error(f"Prochain lancement à {datetime.now() + timedelta(seconds=self.config.get('cycle'))}") + logging.error( + "Prochain lancement à %s", + datetime.now(tz=TIMEZONE_UTC) + timedelta(seconds=self.config.get("cycle")), + ) return result def reset_daily(self, date): - begin = datetime.combine(datetime.strptime(date, self.date_format), datetime.min.time()) - end = datetime.combine(datetime.strptime(date, self.date_format), datetime.max.time()) - self.db.reset_detail_range(self.usage_point_id, begin, end, self.measure_type) + """Reset the detail for a specific date.""" + begin = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE_UTC), datetime.min.time() + ).astimezone(TIMEZONE_UTC) + end = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE_UTC), datetime.max.time() + ).astimezone(TIMEZONE_UTC) + DatabaseDetail(self.usage_point_id, self.measure_type).reset_range(begin, end) return True def delete_daily(self, date): - begin = datetime.combine(datetime.strptime(date, self.date_format), datetime.min.time()) - end = datetime.combine(datetime.strptime(date, self.date_format), datetime.max.time()) - self.db.delete_detail_range(self.usage_point_id, begin, end, self.measure_type) + """Delete the detail for a specific date.""" + begin = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE_UTC), datetime.min.time() + ).astimezone(TIMEZONE_UTC) + end = datetime.combine( + datetime.strptime(date, self.date_format).replace(tzinfo=TIMEZONE_UTC), datetime.max.time() + ).astimezone(TIMEZONE_UTC) + DatabaseDetail(self.usage_point_id, self.measure_type).delete_range(begin, end) return True def reset(self, date=None): + """Reset the detail for a specific date.""" if date is not None: - date = datetime.strptime(date, self.date_detail_format) - self.db.reset_detail(self.usage_point_id, date, self.measure_type) + date = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE_UTC) + DatabaseDetail(self.usage_point_id, self.measure_type).reset(date) return True def delete(self, date=None): + """Delete the detail for a specific date.""" if date is not None: - date = datetime.strptime(date, self.date_detail_format) - self.db.delete_detail(self.usage_point_id, date, self.measure_type) + date = datetime.strptime(date, self.date_detail_format).astimezone(TIMEZONE_UTC) + DatabaseDetail(self.usage_point_id, self.measure_type).delete(date) return True def fetch(self, date): + """Fetch the detail for a specific date.""" if date is not None: - date = datetime.strptime(date, self.date_format) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) result = self.run( datetime.combine(date - timedelta(days=2), datetime.min.time()), datetime.combine(date + timedelta(days=2), datetime.min.time()), ) - if "error" in result and result["error"]: + if result.get("error"): return { "error": True, "notif": result["description"], - "fail_count": self.db.get_detail_fail_count(self.usage_point_id, date, self.measure_type), + "fail_count": DatabaseDetail(self.usage_point_id, self.measure_type).get_fail_count(date), } for item in result: - if type(item["date"]) == str: - item["date"] = datetime.strptime(item["date"], self.date_detail_format) + if isinstance(item["date"], str): + item["date"] = datetime.strptime(item["date"], self.date_detail_format).astimezone(TIMEZONE_UTC) result_date = item["date"].strftime(self.date_format) if date.strftime(self.date_format) in result_date: item["date"] = result_date @@ -269,11 +270,5 @@ def fetch(self, date): return { "error": True, "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": self.db.get_detail_fail_count(self.usage_point_id, date, self.measure_type), + "fail_count": DatabaseDetail(self.usage_point_id, self.measure_type).get_fail_count(date), } - - -def is_between(time, time_range): - if time_range[1] < time_range[0]: - return time > time_range[0] or time <= time_range[1] - return time_range[0] < time <= time_range[1] diff --git a/src/models/query_ecowatt.py b/src/models/query_ecowatt.py index de803deb..c3b6fb97 100644 --- a/src/models/query_ecowatt.py +++ b/src/models/query_ecowatt.py @@ -1,3 +1,5 @@ +"""Fetch and store Ecowatt data.""" + import ast import json import logging @@ -6,30 +8,31 @@ from dateutil.relativedelta import relativedelta -from config import URL +from config import CODE_200_SUCCESS, TIMEZONE, URL +from database.ecowatt import DatabaseEcowatt from dependencies import title -from init import CONFIG, DB from models.query import Query class Ecowatt: + """Class for fetching and storing Ecowatt data.""" + def __init__(self): - self.config = CONFIG - self.db = DB self.url = URL - self.valid_date = datetime.combine(datetime.now() + relativedelta(days=2), datetime.min.time()) + self.valid_date = datetime.combine(datetime.now(tz=TIMEZONE) + relativedelta(days=2), datetime.min.time()) def run(self): - start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") - end = (datetime.now() + relativedelta(days=3)).strftime("%Y-%m-%d") + """Fetches Ecowatt data from the API and stores it in the database.""" + start = (datetime.now(tz=TIMEZONE) - relativedelta(years=3)).strftime("%Y-%m-%d") + end = (datetime.now(tz=TIMEZONE) + relativedelta(days=3)).strftime("%Y-%m-%d") target = f"{self.url}/rte/ecowatt/{start}/{end}" query_response = Query(endpoint=target).get() - if query_response.status_code == 200: + if query_response.status_code == CODE_200_SUCCESS: try: response_json = json.loads(query_response.text) for date, data in response_json.items(): - date = datetime.strptime(date, "%Y-%m-%d") - self.db.set_ecowatt(date, data["value"], data["message"], str(data["detail"])) + date_obj = datetime.strptime(date, "%Y-%m-%d").astimezone(TIMEZONE) + DatabaseEcowatt().set(date_obj, data["value"], data["message"], str(data["detail"])) response = response_json except Exception as e: logging.error(e) @@ -46,7 +49,8 @@ def run(self): } def get(self): - data = self.db.get_ecowatt() + """Retrieve Ecowatt data from the database and format it as a dictionary.""" + data = DatabaseEcowatt().get() output = {} for d in data: if hasattr(d, "date") and hasattr(d, "value") and hasattr(d, "message") and hasattr(d, "detail"): @@ -58,11 +62,11 @@ def get(self): return output def fetch(self): - current_cache = self.db.get_ecowatt() + """Fetches Ecowatt data and returns the result.""" + current_cache = DatabaseEcowatt().get() result = {} if not current_cache: - # No cache - title(f"No cache") + title("No cache") result = self.run() else: last_item = current_cache[0] diff --git a/src/models/query_power.py b/src/models/query_power.py index fe54b16c..74363163 100644 --- a/src/models/query_power.py +++ b/src/models/query_power.py @@ -1,31 +1,40 @@ +"""Model to manage the power consumption data.""" + import json import logging from datetime import datetime, timedelta -from config import DAILY_MAX_DAYS, URL -from init import CONFIG, DB +from config import ( + CODE_200_SUCCESS, + CODE_400_BAD_REQUEST, + CODE_409_CONFLICT, + CODE_500_INTERNAL_SERVER_ERROR, + DAILY_MAX_DAYS, + TIMEZONE_UTC, + URL, +) +from database.config import DatabaseConfig +from database.contracts import DatabaseContracts +from database.max_power import DatabaseMaxPower +from database.usage_points import DatabaseUsagePoints +from dependencies import daterange from models.query import Query -def daterange(start_date, end_date): - for n in range(int((end_date - start_date).days)): - yield start_date + timedelta(n) - - class Power: + """Class to manage the power consumption data.""" + def __init__(self, headers, usage_point_id): - self.config = CONFIG - self.db = DB self.url = URL self.max_daily = 1095 self.date_format = "%Y-%m-%d" self.date_format_detail = "%Y-%m-%d %H:%M:%S" self.headers = headers self.usage_point_id = usage_point_id - self.usage_point_config = self.db.get_usage_point(self.usage_point_id) - self.contract = self.db.get_contract(self.usage_point_id) + self.usage_point_config = DatabaseUsagePoints(self.usage_point_id).get() + self.contract = DatabaseContracts(self.usage_point_id).get() self.daily_max_days = DAILY_MAX_DAYS - self.max_days_date = datetime.utcnow() - timedelta(days=self.daily_max_days) + self.max_days_date = datetime.now(tz=TIMEZONE_UTC) - timedelta(days=self.daily_max_days) if ( hasattr(self.usage_point_config, "consumption_max_date") and self.usage_point_config.consumption_max_date != "" @@ -40,8 +49,10 @@ def __init__(self, headers, usage_point_id): self.activation_date = self.contract.last_activation_date else: self.activation_date = self.max_days_date + self.activation_date = self.activation_date.astimezone(TIMEZONE_UTC) def run(self, begin, end): + """Run the query to get the daily power consumption data.""" begin_str = begin.strftime(self.date_format) end_str = end.strftime(self.date_format) logging.info(f"Récupération des données : {begin_str} => {end_str}") @@ -49,7 +60,7 @@ def run(self, begin, end): if hasattr(self.usage_point_config, "cache") and self.usage_point_config.cache: endpoint += "/cache" try: - current_data = self.db.get_daily_power(self.usage_point_id, begin, end) + current_data = DatabaseMaxPower(self.usage_point_id).get_power(begin, end) if not current_data["missing_data"]: logging.info(" => Toutes les données sont déjà en cache.") output = [] @@ -57,41 +68,40 @@ def run(self, begin, end): output.append({"date": date, "value": data["value"]}) return output else: - logging.info(f" Chargement des données depuis MyElectricalData {begin_str} => {end_str}") + logging.info(" Chargement des données depuis MyElectricalData %s => %s", begin_str, end_str) data = Query(endpoint=f"{self.url}/{endpoint}/", headers=self.headers).get() blacklist = 0 - max_histo = datetime.combine(datetime.now(), datetime.max.time()) - timedelta(days=1) + max_histo = datetime.combine(datetime.now(tz=TIMEZONE_UTC), datetime.max.time()) - timedelta(days=1) if hasattr(data, "status_code"): - if data.status_code == 200: + if data.status_code == CODE_200_SUCCESS: meter_reading = json.loads(data.text)["meter_reading"] interval_reading = meter_reading["interval_reading"] interval_reading_tmp = {} for interval_reading_data in interval_reading: - date = datetime.strptime(interval_reading_data["date"], self.date_format_detail) - date = datetime.combine(date, datetime.min.time()) + date_1 = datetime.strptime( + interval_reading_data["date"], self.date_format_detail + ).astimezone(TIMEZONE_UTC) + date = datetime.combine(date_1, datetime.min.time()) interval_reading_tmp[date.strftime(self.date_format)] = { - "date": datetime.strptime( - interval_reading_data["date"], - self.date_format_detail, - ), + "date": date_1, "value": interval_reading_data["value"], } for single_date in daterange(begin, end): - if single_date < max_histo: - if single_date.strftime(self.date_format) in interval_reading_tmp: + single_date_tz = single_date.replace(tzinfo=TIMEZONE_UTC) + max_histo = max_histo.replace(tzinfo=TIMEZONE_UTC) + if single_date_tz < max_histo: + if single_date_tz.strftime(self.date_format) in interval_reading_tmp: # FOUND - single_date_value = interval_reading_tmp[single_date.strftime(self.date_format)] - self.db.insert_daily_max_power( - usage_point_id=self.usage_point_id, - date=datetime.combine(single_date, datetime.min.time()), + single_date_value = interval_reading_tmp[single_date_tz.strftime(self.date_format)] + DatabaseMaxPower(self.usage_point_id).insert( + date=datetime.combine(single_date_tz, datetime.min.time()), event_date=single_date_value["date"], value=single_date_value["value"], blacklist=blacklist, ) else: # NOT FOUND - self.db.daily_max_power_fail_increment( - usage_point_id=self.usage_point_id, + DatabaseMaxPower(self.usage_point_id).daily_fail_increment( date=datetime.combine(single_date, datetime.min.time()), ) return interval_reading @@ -103,7 +113,7 @@ def run(self, begin, end): if hasattr(data, "status_code"): status_code = data.status_code else: - status_code = 500 + status_code = CODE_500_INTERNAL_SERVER_ERROR return { "error": True, "description": description, @@ -117,7 +127,7 @@ def run(self, begin, end): if hasattr(data, "status_code"): status_code = data.status_code else: - status_code = 500 + status_code = CODE_500_INTERNAL_SERVER_ERROR return { "error": True, "description": description, @@ -128,8 +138,11 @@ def run(self, begin, end): logging.error(e) def get(self): - end = datetime.combine((datetime.now() + timedelta(days=2)), datetime.max.time()) - begin = datetime.combine(end - timedelta(days=self.max_daily), datetime.min.time()) + """Get the daily power consumption data.""" + end = datetime.combine((datetime.now(tz=TIMEZONE_UTC) + timedelta(days=2)), datetime.max.time()).astimezone( + TIMEZONE_UTC + ) + begin = datetime.combine(end - timedelta(days=self.max_daily), datetime.min.time()).astimezone(TIMEZONE_UTC) finish = True result = [] while finish: @@ -154,50 +167,65 @@ def get(self): "error": True, "description": "MyElectricalData est indisponible.", } - if "error" in response and response["error"]: + if "error" in result and result.get("error"): logging.error("Echec de la récupération des données.") - logging.error(f' => {response["description"]}') - logging.error(f" => {begin.strftime(self.date_format)} -> {end.strftime(self.date_format)}") - if "status_code" in response and (response["status_code"] == 409 or response["status_code"] == 400): + logging.error(" => %s", response["description"]) + logging.error(" => %s -> %s", begin.strftime(self.date_format), end.strftime(self.date_format)) + if "status_code" in response and ( + response["status_code"] == CODE_409_CONFLICT or response["status_code"] == CODE_400_BAD_REQUEST + ): finish = False logging.error("Arrêt de la récupération des données suite à une erreur.") - logging.error(f"Prochain lancement à {datetime.now() + timedelta(seconds=CONFIG.get('cycle'))}") + logging.error( + "Prochain lancement à %s", + datetime.now(tz=TIMEZONE_UTC) + timedelta(seconds=DatabaseConfig().get("cycle")), + ) return result def reset(self, date=None): + """Reset the daily power consumption data.""" if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.reset_daily_max_power(self.usage_point_id, date) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).reset_daily(date) return True def delete(self, date=None): + """Delete the daily power consumption data.""" if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.delete_daily_max_power(self.usage_point_id, date) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).delete_daily(date) return True def blacklist(self, date, action): + """Blacklist the daily power consumption data.""" if date is not None: - date = datetime.strptime(date, self.date_format) - self.db.blacklist_daily_max_power(self.usage_point_id, date, action) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) + DatabaseMaxPower(self.usage_point_id).blacklist_daily(date) return True def fetch(self, date): + """Fetch the daily power consumption data.""" if date is not None: - date = datetime.strptime(date, self.date_format) + date = datetime.strptime(date, self.date_format).astimezone(TIMEZONE_UTC) result = self.run( date - timedelta(days=1), date + timedelta(days=1), ) - if "error" in result and result["error"]: + if "error" in result and result.get("error"): return { "error": True, "notif": result["description"], - "fail_count": self.db.get_daily_max_power_fail_count(self.usage_point_id, date), + "fail_count": DatabaseMaxPower(self.usage_point_id).get_fail_count(date), } for item in result: - target_date = datetime.strptime(item["date"], self.date_format_detail).strftime(self.date_format) - event_date = datetime.strptime(item["date"], self.date_format_detail).strftime("%H:%M:%S") + target_date = ( + datetime.strptime(item["date"], self.date_format_detail) + .astimezone(TIMEZONE_UTC) + .strftime(self.date_format) + ) + event_date = ( + datetime.strptime(item["date"], self.date_format_detail).astimezone(TIMEZONE_UTC).strftime("%H:%M:%S") + ) if date.strftime(self.date_format) == target_date: item["date"] = target_date item["event_date"] = event_date @@ -205,5 +233,5 @@ def fetch(self, date): return { "error": True, "notif": f"Aucune donnée n'est disponible chez Enedis sur cette date ({date})", - "fail_count": self.db.get_daily_max_power_fail_count(self.usage_point_id, date), + "fail_count": DatabaseMaxPower(self.usage_point_id).get_fail_count(date), } diff --git a/src/models/query_status.py b/src/models/query_status.py index 27eb8b1b..458db644 100755 --- a/src/models/query_status.py +++ b/src/models/query_status.py @@ -1,22 +1,26 @@ +"""Class representing the status of MyElectricalData.""" + import datetime import json import logging import traceback from os import environ, getenv -from config import URL +from config import CODE_200_SUCCESS, URL from dependencies import get_version -from init import DB +from database.usage_points import DatabaseUsagePoints from models.query import Query class Status: + """Class representing the status of MyElectricalData.""" + def __init__(self, headers=None): - self.db = DB self.url = URL self.headers = headers def ping(self): + """Ping the MyElectricalData endpoint to check its availability.""" target = f"{self.url}/ping" status = { "version": get_version(), @@ -25,7 +29,7 @@ def ping(self): } try: response = Query(endpoint=target, headers=self.headers).get() - if hasattr(response, "status_code") and response.status_code == 200: + if hasattr(response, "status_code") and response.status_code == CODE_200_SUCCESS: status = json.loads(response.text) for key, value in status.items(): logging.info(f"{key}: {value}") @@ -37,27 +41,35 @@ def ping(self): return status def status(self, usage_point_id): - usage_point_id_config = self.db.get_usage_point(usage_point_id) + """Retrieve the status of a usage point. + + Args: + usage_point_id (str): The ID of the usage point. + + Returns: + dict: The status of the usage point. + """ + usage_point_id_config = DatabaseUsagePoints(usage_point_id).get() target = f"{self.url}/valid_access/{usage_point_id}" if hasattr(usage_point_id_config, "cache") and usage_point_id_config.cache: target += "/cache" response = Query(endpoint=target, headers=self.headers).get() if response: status = json.loads(response.text) - if response.status_code == 200: + if response.status_code == CODE_200_SUCCESS: try: for key, value in status.items(): logging.info(f"{key}: {value}") - self.db.usage_point_update( - usage_point_id, + DatabaseUsagePoints(usage_point_id).update( consentement_expiration=datetime.datetime.strptime( status["consent_expiration_date"], "%Y-%m-%dT%H:%M:%S" - ), - # last_call=datetime.datetime.strptime(status["last_call"], "%Y-%m-%dT%H:%M:%S.%f"), + ).replace(tzinfo=datetime.timezone.utc), call_number=status["call_number"], quota_limit=status["quota_limit"], quota_reached=status["quota_reached"], - quota_reset_at=datetime.datetime.strptime(status["quota_reset_at"], "%Y-%m-%dT%H:%M:%S.%f"), + quota_reset_at=datetime.datetime.strptime( + status["quota_reset_at"], "%Y-%m-%dT%H:%M:%S.%f" + ).replace(tzinfo=datetime.timezone.utc), ban=status["ban"], ) return status diff --git a/src/models/query_tempo.py b/src/models/query_tempo.py index 9b349d40..10fd8f5b 100644 --- a/src/models/query_tempo.py +++ b/src/models/query_tempo.py @@ -1,3 +1,4 @@ +"""Fetch tempo data from gateway and store it in the database.""" import json import logging import traceback @@ -5,18 +6,18 @@ from dateutil.relativedelta import relativedelta -from config import URL +from config import TIMEZONE, URL, CODE_200_SUCCESS from dependencies import title -from init import CONFIG, DB from models.query import Query +from database.tempo import DatabaseTempo class Tempo: + """Fetches tempo data from gateway and stores it in the database.""" + def __init__(self): - self.config = CONFIG - self.db = DB self.url = URL - self.valid_date = datetime.combine(datetime.now() + relativedelta(days=1), datetime.min.time()) + self.valid_date = datetime.combine(datetime.now(tz=TIMEZONE) + relativedelta(days=1), datetime.min.time()) self.nb_check_day = 31 self.total_tempo_days = { "red": 22, @@ -25,16 +26,25 @@ def __init__(self): } def run(self): - start = (datetime.now() - relativedelta(years=3)).strftime("%Y-%m-%d") - end = (datetime.now() + relativedelta(days=2)).strftime("%Y-%m-%d") + """Runs the tempo data retrieval process. + + Args: + None + + Returns: + A dictionary containing the retrieved tempo data. + + """ + start = (datetime.now(tz=TIMEZONE) - relativedelta(years=3)).strftime("%Y-%m-%d") + end = (datetime.now(tz=TIMEZONE) + relativedelta(days=2)).strftime("%Y-%m-%d") target = f"{self.url}/rte/tempo/{start}/{end}" query_response = Query(endpoint=target).get() - if query_response.status_code == 200: + if query_response.status_code == CODE_200_SUCCESS: try: response_json = json.loads(query_response.text) for date, color in response_json.items(): - date = datetime.strptime(date, "%Y-%m-%d") - self.db.set_tempo(date, color) + date_obj = datetime.strptime(date, "%Y-%m-%d").replace(tzinfo=TIMEZONE) + DatabaseTempo().set(date_obj, color) response = response_json except Exception as e: logging.error(e) @@ -51,7 +61,16 @@ def run(self): } def get(self): - data = self.db.get_tempo() + """Retrieves tempo data from the database. + + Args: + None + + Returns: + A dictionary containing the tempo data. + + """ + data = DatabaseTempo().get() output = {} for d in data: if hasattr(d, "date") and hasattr(d, "color"): @@ -59,11 +78,20 @@ def get(self): return output def fetch(self): - current_cache = self.db.get_tempo() + """Fetches tempo data from the database or retrieves it from the cache if available. + + Args: + None + + Returns: + A dictionary containing the tempo data. + + """ + current_cache = DatabaseTempo().get() result = {} if not current_cache: # No cache - title(f"No cache") + title("No cache") result = self.run() else: valid_date = self.valid_date @@ -85,8 +113,7 @@ def fetch(self): return result def calc_day(self): - """ - Calculates the number of days left for each color based on the current date. + """Calculates the number of days left for each color based on the current date. Args: None @@ -95,29 +122,39 @@ def calc_day(self): A dictionary containing the number of days left for each color. """ - now = datetime.now() - begin = datetime.combine(now.replace(month=9, day=1), datetime.min.time()) + now = datetime.now(tz=TIMEZONE) + begin = datetime.combine(now.replace(month=9, day=1), datetime.min.time()).astimezone(TIMEZONE) + print(begin, now) if now < begin: begin = begin.replace(year=int(now.strftime("%Y")) - 1) end = datetime.combine(begin - timedelta(hours=5), datetime.max.time()).replace( year=int(begin.strftime("%Y")) + 1 ) - current_tempo_day = self.db.get_tempo_range(begin=begin, end=end) + current_tempo_day = DatabaseTempo().get_range(begin=begin, end=end) result = self.total_tempo_days for day in current_tempo_day: result[day.color.lower()] -= 1 - self.db.set_tempo_config("days", result) + DatabaseTempo().set_config("days", result) return result def fetch_day(self): + """Fetches tempo days data from the API and updates the database. + + Args: + None + + Returns: + A dictionary containing the tempo days data. + + """ target = f"{self.url}/edf/tempo/days" query_response = Query(endpoint=target).get() - if query_response.status_code == 200: + if query_response.status_code == CODE_200_SUCCESS: try: response_json = json.loads(query_response.text) - self.db.set_tempo_config("days", response_json) + DatabaseTempo().set_config("days", response_json) response = {"error": False, "description": "", "items": response_json} - logging.info(" => Toutes les valeurs sont misent à jours.") + logging.info(" => Toutes les valeurs sont mises à jour.") except Exception as e: logging.error(e) traceback.print_exc() @@ -133,12 +170,21 @@ def fetch_day(self): } def fetch_price(self): + """Fetches tempo price data from the API and updates the database. + + Args: + None + + Returns: + A dictionary containing the tempo price data. + + """ target = f"{self.url}/edf/tempo/price" query_response = Query(endpoint=target).get() - if query_response.status_code == 200: + if query_response.status_code == CODE_200_SUCCESS: try: response_json = json.loads(query_response.text) - self.db.set_tempo_config("price", response_json) + DatabaseTempo().set_config("price", response_json) response = {"error": False, "description": "", "items": response_json} logging.info(" => Toutes les valeurs sont misent à jours.") except Exception as e: diff --git a/src/models/stat.py b/src/models/stat.py index b546c799..4cdd4dcd 100644 --- a/src/models/stat.py +++ b/src/models/stat.py @@ -1,14 +1,20 @@ +"""Generate all statistical data for a usage point.""" import calendar import json import logging from datetime import date, datetime, timedelta, timezone -import pytz from dateutil.relativedelta import relativedelta -from init import CONFIG, DB - -utc = pytz.UTC +from config import TEMPO_BEGIN, TEMPO_END +from database.contracts import DatabaseContracts +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.max_power import DatabaseMaxPower +from database.statistique import DatabaseStatistique +from database.tempo import DatabaseTempo +from database.usage_points import DatabaseUsagePoints +from dependencies import is_between now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) @@ -43,11 +49,11 @@ class Stat: # pylint: disable=R0902,R0904 - value_yesterday_hp: The value of yesterday for high peak measurement type. - value_yesterday_hc: The value of yesterday for high consumption measurement type. - value_peak_offpeak_percent_hp: The percentage value of peak and off-peak for high peak measurement type. - - value_peak_offpeak_percent_hc: The percentage value of peak and off-peak for high consumption measurement type. + - value_peak_offpeak_percent_hc: The percentage value of peak and off-peak for high consumption. - value_current_week_evolution: The evolution value of the current week. - value_yesterday_evolution: The evolution value of yesterday. - value_current_month_evolution: The evolution value of the current month. - - value_peak_offpeak_percent_hp_vs_hc: The percentage value of peak and off-peak for high peak and high consumption measurement types. + - value_peak_offpeak_percent_hp_vs_hc: The percentage value of peak and off-peak for high peak/consumption. - value_monthly_evolution: The evolution value of the monthly data. - value_yearly_evolution: The evolution value of the yearly data. @@ -80,21 +86,25 @@ class Stat: # pylint: disable=R0902,R0904 - yesterday_hc_hp(): Returns the yesterday data for high consumption and high peak measurement types. - peak_offpeak_percent(): Returns the percentage value of peak and off-peak. - get_year(year, measure_type=None): Returns the yearly data for the specified year and measure type. - - get_year_linear(idx, measure_type=None): Returns the linear yearly data for the specified index and measure type. - - get_month(year, month=None, measure_type=None): Returns the monthly data for the specified year, month, and measure type. - - get_month_linear(idx, measure_type=None): Returns the linear monthly data for the specified index and measure type. - - get_week(year, month=None, measure_type=None): Returns the weekly data for the specified year, month, and measure type. - - get_week_linear(idx, measure_type=None): Returns the linear weekly data for the specified index and measure type. + - get_year_linear(idx, measure_type=None): Returns the linear yearly data for the specified index and + measure type. + - get_month(year, month=None, measure_type=None): Returns the monthly data for the specified year, month, + and measure type. + - get_month_linear(idx, measure_type=None): Returns the linear monthly data for the specified index + and measure type. + - get_week(year, month=None, measure_type=None): Returns the weekly data for the specified year, month, + and measure type. + - get_week_linear(idx, measure_type=None): Returns the linear weekly data for the specified index + and measure type. - get_price(): Returns the price data. - get_mesure_type(date): Returns the measure type for the specified date. - generate_price(): Generates and saves the price data. - get_daily(date, mesure_type): Returns the daily data for the specified date and measure type. - delete(): Deletes the statistical data for the usage point. - - is_between(time, time_range): Checks if the given time is between the given time range. """ def __init__(self, usage_point_id, measurement_direction=None): - """Initializes a new instance of the 'Stat' class. + """Initialize a new instance of the 'Stat' class. Parameters: usage_point_id (int): The ID of the usage point. @@ -123,24 +133,25 @@ def __init__(self, usage_point_id, measurement_direction=None): value_last_year (int): The value of the last year. value_yesterday_hp (int): The value of yesterday for high peak measurement type. value_yesterday_hc (int): The value of yesterday for high consumption measurement type. - value_peak_offpeak_percent_hp (int): The percentage value of peak and off-peak for high peak measurement type. - value_peak_offpeak_percent_hc (int): The percentage value of peak and off-peak for high consumption measurement type. + value_peak_offpeak_percent_hp (int): The percentage value of peak and off-peak for high peak + measurement type. + value_peak_offpeak_percent_hc (int): The percentage value of peak and off-peak for high consumption + measurement type. value_current_week_evolution (int): The evolution value of the current week. value_yesterday_evolution (int): The evolution value of yesterday. value_current_month_evolution (int): The evolution value of the current month. - value_peak_offpeak_percent_hp_vs_hc (int): The percentage value of peak and off-peak for high peak and high consumption measurement types. + value_peak_offpeak_percent_hp_vs_hc (int): The percentage value of peak and off-peak for high peak and + high consumption measurement types. value_monthly_evolution (int): The evolution value of the monthly data. value_yearly_evolution (int): The evolution value of the yearly data. Returns: None """ - self.config = CONFIG - self.db = DB self.usage_point_id = usage_point_id self.measurement_direction = measurement_direction - self.usage_point_id_config = self.db.get_usage_point(self.usage_point_id) - self.usage_point_id_contract = self.db.get_contract(self.usage_point_id) + self.usage_point_id_config = DatabaseUsagePoints(self.usage_point_id).get() + self.usage_point_id_contract = DatabaseContracts(self.usage_point_id).get() self.date_format = "%Y-%m-%d" self.date_format_detail = "%Y-%m-%d %H:%M:%S" # STAT @@ -166,15 +177,23 @@ def __init__(self, usage_point_id, measurement_direction=None): self.value_peak_offpeak_percent_hp_vs_hc = 0 self.value_monthly_evolution = 0 self.value_yearly_evolution = 0 - self.usage_point_id_contract = self.db.get_contract(self.usage_point_id) + self.usage_point_id_contract = DatabaseContracts(self.usage_point_id).get() def daily(self, index=0): + """Calculate the daily value for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary containing the calculated value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) value = 0 - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + data.value return { "value": value, @@ -183,12 +202,21 @@ def daily(self, index=0): } def detail(self, index, measure_type=None): + """Calculate the detailed value for the given index and measure type. + + Args: + index (int): The index for the number of days ago. + measure_type (str, optional): The measure type (HP or HC). Defaults to None. + + Returns: + dict: A dictionary containing the calculated value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) value = 0 - for data in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(data.date) if measure_type is None or (measure_type == "HP" and day_measure_type == "HP"): value = value + data.value / (60 / data.interval) @@ -201,6 +229,14 @@ def detail(self, index, measure_type=None): } def tempo(self, index): + """Calculate the tempo value for the given index. + + Args: + index (int): The index for the number of days ago. + + Returns: + dict: A dictionary containing the calculated value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) @@ -213,17 +249,16 @@ def tempo(self, index): "red_hc": 0, "red_hp": 0, } - for data in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): - # print(data) + for data in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): hour = int(datetime.strftime(data.date, "%H")) - if hour < 6: - color = self.db.get_tempo_range(begin - timedelta(days=1), end - timedelta(days=1))[0].color + if hour < TEMPO_BEGIN: + color = DatabaseTempo().get_range(begin - timedelta(days=1), end - timedelta(days=1))[0].color color = f"{color.lower()}_hc" - elif hour >= 22: - color = self.db.get_tempo_range(begin + timedelta(days=1), end + timedelta(days=1))[0].color + elif hour >= TEMPO_END: + color = DatabaseTempo().get_range(begin + timedelta(days=1), end + timedelta(days=1))[0].color color = f"{color.lower()}_hc" else: - color = self.db.get_tempo_range(begin, end)[0].color + color = DatabaseTempo().get_range(begin, end)[0].color color = f"{color.lower()}_hp" value[color] += data.value / (60 / data.interval) return { @@ -233,12 +268,20 @@ def tempo(self, index): } def tempo_color(self, index=0): + """Calculate the tempo color for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary containing the tempo color value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) value = "" - for data in self.db.get_tempo_range(begin, end): + for data in DatabaseTempo().get_range(begin, end): logging.debug(f"tempo data: {data}") value = value + data.color return { @@ -248,14 +291,20 @@ def tempo_color(self, index=0): } def max_power(self, index=0): + """Calculate the maximum power for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary containing the maximum power value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) value = 0 - # print(self.db.get_daily_max_power_range(self.usage_point_id, begin, end)) - for data in self.db.get_daily_max_power_range(self.usage_point_id, begin, end): - # print(data) + for data in DatabaseMaxPower(self.usage_point_id).get_range(begin, end): value = value + data.value return { "value": value, @@ -264,6 +313,14 @@ def max_power(self, index=0): } def max_power_over(self, index=0): + """Calculate if the maximum power is exceeded for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary indicating if the maximum power is exceeded, begin date, and end date. + """ max_power = 0 now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) @@ -276,7 +333,7 @@ def max_power_over(self, index=0): end = datetime.combine(begin, datetime.max.time()) value = 0 boolv = "true" - for data in self.db.get_daily_max_power_range(self.usage_point_id, begin, end): + for data in DatabaseMaxPower(self.usage_point_id).get_range(begin, end): value = value + data.value if (value / 1000) < max_power: boolv = "false" @@ -287,14 +344,20 @@ def max_power_over(self, index=0): } def max_power_time(self, index=0): + """Calculate the maximum power time for the given index. + + Args: + index (int, optional): The index for the number of days ago. Defaults to 0. + + Returns: + dict: A dictionary containing the maximum power time value, begin date, and end date. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=index), datetime.min.time()) end = datetime.combine(begin, datetime.max.time()) max_power_time = "" - # print(self.db.get_daily_max_power_range(self.usage_point_id, begin, end)) - for data in self.db.get_daily_max_power_range(self.usage_point_id, begin, end): - # print(data) + for data in DatabaseMaxPower(self.usage_point_id).get_range(begin, end): if data.event_date is None or data.event_date == "": max_power_time = data.date else: @@ -311,6 +374,11 @@ def max_power_time(self, index=0): return data def current_week_array(self): + """Calculate the array of values for the current week. + + Returns: + list: A list containing the values for each day of the current week. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date, datetime.min.time()) @@ -318,8 +386,9 @@ def current_week_array(self): end = datetime.combine(yesterday_date, datetime.max.time()) day_idx = 0 daily_obj = [] - while day_idx < 7: - day = self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction) + id_max = 7 + while day_idx < id_max: + day = DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end) if day: daily_obj.append({"date": day[0].date, "value": day[0].value}) else: @@ -330,11 +399,16 @@ def current_week_array(self): return {"value": daily_obj, "begin": begin_return, "end": end} def current_week(self): + """Calculate the total value for the current week. + + Returns: + dict: A dictionary containing the total value, begin date, and end date of the current week. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date - relativedelta(weeks=1), datetime.min.time()) end = datetime.combine(yesterday_date, datetime.max.time()) - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_week = self.value_current_week + data.value logging.debug(f" current_week => {self.value_current_week}") return { @@ -343,33 +417,17 @@ def current_week(self): "end": end.strftime(self.date_format), } - # def get_week(self, year): - # logging.debug(f"[{year}] current_week") - # begin = datetime.combine(now_date - relativedelta(weeks=1), datetime.min.time()) - # end = datetime.combine(yesterday_date, datetime.max.time()) - # for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): - # self.value_current_week = self.value_current_week + data.value - # logging.debug(f" {self.value_current_week}") - # return { - # "value": self.value_current_week, - # "begin": begin.strftime(self.date_format), - # "end": end.strftime(self.date_format) - # } - def last_week(self): + """Calculate the total value for the last week. + + Returns: + dict: A dictionary containing the total value, begin date, and end date of the last week. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date - relativedelta(weeks=2), datetime.min.time()) end = datetime.combine(yesterday_date - relativedelta(weeks=1), datetime.max.time()) - # while day_idx < 7: - # day = self.db.get_daily_range(self.usage_point_id, begin, end, self.self.measurement_direction) - # if day: - # for data in day: - # last_week = last_week + data.value - # begin = begin - timedelta(days=1) - # end = end - timedelta(days=1) - # day_idx = day_idx + 1 - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_last_week = self.value_last_week + data.value logging.debug(f" last_week => {self.value_last_week}") return { @@ -379,17 +437,27 @@ def last_week(self): } def current_week_evolution(self): + """Calculate the evolution of the current week's value compared to the previous week. + + Returns: + float: The percentage change in value between the current week and the previous week. + """ if self.value_last_week != 0: self.value_current_week_evolution = ((self.value_current_week * 100) / self.value_last_week) - 100 logging.debug(f" current_week_evolution => {self.value_current_week_evolution}") return self.value_current_week_evolution def yesterday(self): + """Calculate the value for yesterday. + + Returns: + dict: A dictionary containing the value, begin date, and end date of yesterday. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date, datetime.min.time()) end = datetime.combine(yesterday_date, datetime.max.time()) - data = self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction) + data = DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end) if data: self.value_yesterday = data[0].value else: @@ -402,11 +470,16 @@ def yesterday(self): } def yesterday_1(self): + """Calculate the value for the day before yesterday. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the day before yesterday. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date - timedelta(days=1), datetime.min.time()) end = datetime.combine(yesterday_date - timedelta(days=1), datetime.max.time()) - data = self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction) + data = DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end) if data: self.value_yesterday_1 = data[0].value else: @@ -419,6 +492,11 @@ def yesterday_1(self): } def yesterday_evolution(self): + """Calculate the evolution of the value for yesterday compared to the day before yesterday. + + Returns: + float: The percentage change in value between yesterday and the day before yesterday. + """ self.yesterday() self.yesterday_1() if self.value_yesterday_1 != 0: @@ -427,17 +505,11 @@ def yesterday_evolution(self): return self.value_yesterday_evolution def current_week_last_year(self): - # begin = datetime.combine(yesterday - relativedelta(years=1), datetime.min.time()) - # end = datetime.combine(yesterday - relativedelta(years=1), datetime.max.time()) - # day_idx = 0 - # while day_idx < 7: - # day = self.db.get_daily_range(self.usage_point_id, begin, end, self.self.measurement_direction) - # if day: - # for data in day: - # current_week_last_year = current_week_last_year + data.value - # begin = begin - timedelta(days=1) - # end = end - timedelta(days=1) - # day_idx = day_idx + 1 + """Calculate the value for the current week of the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current week of the last year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine( @@ -445,7 +517,7 @@ def current_week_last_year(self): datetime.min.time(), ) end = datetime.combine(yesterday_date - relativedelta(years=1), datetime.max.time()) - for data in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for data in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_week_last_year = self.value_current_week_last_year + data.value logging.debug(f" current_week_last_year => {self.value_current_week_last_year}") return { @@ -455,6 +527,11 @@ def current_week_last_year(self): } def last_month(self): + """Calculate the value for the last month. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the last month. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine( @@ -462,7 +539,7 @@ def last_month(self): datetime.min.time(), ) end = datetime.combine(yesterday_date.replace(day=1) - timedelta(days=1), datetime.max.time()) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_last_month = self.value_last_month + day.value logging.debug(f" last_month => {self.value_last_month}") return { @@ -472,11 +549,16 @@ def last_month(self): } def current_month(self): + """Calculate the value for the current month. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current month. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date.replace(day=1), datetime.min.time()) end = yesterday_date - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_month = self.value_current_month + day.value logging.debug(f" current_month => {self.value_current_month}") return { @@ -486,11 +568,16 @@ def current_month(self): } def current_month_last_year(self): + """Calculate the value for the current month of the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current month of the last year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date.replace(day=1), datetime.min.time()) - relativedelta(years=1) end = yesterday_date - relativedelta(years=1) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_month_last_year = self.value_current_month_last_year + day.value logging.debug(f" current_month_last_year => {self.value_current_month_last_year}") return { @@ -500,6 +587,11 @@ def current_month_last_year(self): } def current_month_evolution(self): + """Calculate the evolution of the current month compared to the same month of the previous year. + + Returns: + float: The percentage evolution of the current month. + """ if self.value_current_month_last_year != 0: self.value_current_month_evolution = ( (100 * self.value_current_month) / self.value_current_month_last_year @@ -508,6 +600,11 @@ def current_month_evolution(self): return self.value_current_month_evolution def last_month_last_year(self): + """Calculate the value for the last month of the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the last month of the last year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine( @@ -517,7 +614,7 @@ def last_month_last_year(self): end = datetime.combine(yesterday_date.replace(day=1) - timedelta(days=1), datetime.max.time()) - relativedelta( years=1 ) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_last_month_last_year = self.value_last_month_last_year + day.value logging.debug(f" last_month_last_year => {self.value_last_month_last_year}") return { @@ -527,6 +624,11 @@ def last_month_last_year(self): } def monthly_evolution(self): + """Calculate the monthly evolution based on the last month and the last month of the previous year. + + Returns: + float: The percentage monthly evolution. + """ self.last_month() self.last_month_last_year() if self.value_last_month_last_year != 0: @@ -535,11 +637,16 @@ def monthly_evolution(self): return self.value_monthly_evolution def current_year(self): + """Calculate the value for the current year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(now_date.replace(month=1, day=1), datetime.min.time()) end = yesterday_date - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_year = self.value_current_year + day.value logging.debug(f" current_year => {self.value_current_year}") return { @@ -549,6 +656,11 @@ def current_year(self): } def current_year_last_year(self): + """Calculate the value for the current year of the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the current year of the last year. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine( @@ -556,7 +668,7 @@ def current_year_last_year(self): datetime.min.time(), ) end = yesterday_date - relativedelta(years=1) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_current_year_last_year = self.value_current_year_last_year + day.value logging.debug(f" current_year_last_year => {self.value_current_year_last_year}") return { @@ -566,6 +678,11 @@ def current_year_last_year(self): } def last_year(self): + """Calculate the value for the last year. + + Returns: + dict: A dictionary containing the value, begin date, and end date of the last year. + """ now_date = datetime.now(timezone.utc) begin = datetime.combine( now_date.replace(month=1, day=1) - relativedelta(years=1), @@ -573,7 +690,7 @@ def last_year(self): ) last_day_of_month = calendar.monthrange(int(begin.strftime("%Y")), 12)[1] end = datetime.combine(begin.replace(month=1, day=last_day_of_month), datetime.max.time()) - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): self.value_last_year = self.value_last_year + day.value logging.debug(f" last_year => {self.value_last_year}") return { @@ -583,6 +700,11 @@ def last_year(self): } def yearly_evolution(self): + """Calculate the yearly evolution based on the current year and the last year. + + Returns: + float: The percentage yearly evolution. + """ self.current_year() self.current_year_last_year() if self.value_last_month_last_year != 0: @@ -591,11 +713,16 @@ def yearly_evolution(self): return self.value_yearly_evolution def yesterday_hc_hp(self): + """Calculate the value for yesterday's HC and HP. + + Returns: + dict: A dictionary containing the values for HC and HP, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = datetime.combine(yesterday_date, datetime.min.time()) end = datetime.combine(now_date, datetime.max.time()) - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): measure_type = self.get_mesure_type(day.date) if measure_type == "HP": self.value_yesterday_hp = self.value_yesterday_hp + (day.value / (60 / day.interval)) @@ -610,6 +737,11 @@ def yesterday_hc_hp(self): } def peak_offpeak_percent(self): + """Calculate the percentage difference between peak and off-peak values. + + Returns: + float: The percentage difference between peak and off-peak values. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) begin = yesterday_date - relativedelta(years=1) @@ -617,7 +749,7 @@ def peak_offpeak_percent(self): value_peak_offpeak_percent_hp = 0 value_peak_offpeak_percent_hc = 0 value_peak_offpeak_percent_hp_vs_hc = 0 - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): measure_type = self.get_mesure_type(day.date) if measure_type == "HP": value_peak_offpeak_percent_hp = value_peak_offpeak_percent_hp + day.value @@ -632,6 +764,15 @@ def peak_offpeak_percent(self): # STAT V2 def get_year(self, year, measure_type=None): + """Retrieve the data for a specific year. + + Args: + year (int): The year for which to retrieve the data. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) begin = datetime.combine(now_date.replace(year=year, month=1, day=1), datetime.min.time()) last_day_of_month = calendar.monthrange(year, 12)[1] @@ -641,10 +782,10 @@ def get_year(self, year, measure_type=None): ) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: value = value + (day.value / (60 / day.interval)) @@ -655,16 +796,25 @@ def get_year(self, year, measure_type=None): } def get_year_linear(self, idx, measure_type=None): + """Retrieve the linear data for a specific year. + + Args: + idx (int): The index of the year. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) end = datetime.combine(yesterday_date - relativedelta(years=idx), datetime.max.time()) begin = datetime.combine(end - relativedelta(years=1), datetime.min.time()) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: value = value + (day.value / (60 / day.interval)) @@ -675,6 +825,16 @@ def get_year_linear(self, idx, measure_type=None): } def get_month(self, year, month=None, measure_type=None): + """Retrieve the data for a specific month. + + Args: + year (int): The year for which to retrieve the data. + month (int, optional): The month for which to retrieve the data. Defaults to None. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) if month is None: month = int(datetime.now().strftime("%m")) @@ -686,10 +846,10 @@ def get_month(self, year, month=None, measure_type=None): ) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: value = value + (day.value / (60 / day.interval)) @@ -700,16 +860,25 @@ def get_month(self, year, month=None, measure_type=None): } def get_month_linear(self, idx, measure_type=None): + """Retrieve the linear data for a specific month. + + Args: + idx (int): The index of the month. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) end = datetime.combine(yesterday_date - relativedelta(years=idx), datetime.max.time()) begin = datetime.combine(end - relativedelta(months=1), datetime.min.time()) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: value = value + (day.value / (60 / day.interval)) @@ -720,6 +889,16 @@ def get_month_linear(self, idx, measure_type=None): } def get_week(self, year, month=None, measure_type=None): + """Retrieve the data for a specific week. + + Args: + year (int): The year for which to retrieve the data. + month (int, optional): The month for which to retrieve the data. Defaults to None. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) if month is None: month = int(datetime.now().strftime("%m")) @@ -740,10 +919,10 @@ def get_week(self, year, month=None, measure_type=None): ) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: value = value + (day.value / (60 / day.interval)) @@ -754,16 +933,25 @@ def get_week(self, year, month=None, measure_type=None): } def get_week_linear(self, idx, measure_type=None): + """Retrieve the linear data for a specific week. + + Args: + idx (int): The index of the week. + measure_type (str, optional): The type of measurement. Defaults to None. + + Returns: + dict: A dictionary containing the retrieved data, along with the begin and end dates. + """ now_date = datetime.now(timezone.utc) yesterday_date = datetime.combine(now_date - relativedelta(days=1), datetime.max.time()) end = datetime.combine(yesterday_date - relativedelta(years=idx), datetime.max.time()) begin = datetime.combine(end - timedelta(days=7), datetime.min.time()) value = 0 if measure_type is None: - for day in self.db.get_daily_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDaily(self.usage_point_id, self.measurement_direction).get_range(begin, end): value = value + day.value else: - for day in self.db.get_detail_range(self.usage_point_id, begin, end, self.measurement_direction): + for day in DatabaseDetail(self.usage_point_id, self.measurement_direction).get_range(begin, end): day_measure_type = self.get_mesure_type(day.date) if day_measure_type == measure_type: value = value + (day.value / (60 / day.interval)) @@ -774,9 +962,13 @@ def get_week_linear(self, idx, measure_type=None): } def get_price(self): - data = self.db.get_stat(self.usage_point_id, f"price_{self.measurement_direction}") + """Retrieve the price data for the measurement direction. + + Returns: + dict: A dictionary containing the price data. + """ + data = DatabaseStatistique(self.usage_point_id).get(f"price_{self.measurement_direction}") return json.loads(data[0].value) - # return ast.literal_eval() def get_mesure_type(self, measurement_date): """Determine the measurement type (HP or HC) based on the given date and off-peak hours. @@ -806,42 +998,23 @@ def get_mesure_type(self, measurement_date): # FORMAT HOUR WITH 2 DIGIT offpeak_stop = datetime.strptime(offpeak_stop, "%H:%M") offpeak_stop = datetime.strftime(offpeak_stop, "%H:%M") - result = self.is_between(date_hour_minute, (offpeak_begin, offpeak_stop)) + result = is_between(date_hour_minute, (offpeak_begin, offpeak_stop)) if result: measure_type = "HC" return measure_type - def is_between(self, time, time_range): - """Check if a given time is between a specified time range. - - Args: - time (datetime): The time to check. - time_range (tuple): The time range represented by a tuple of two datetime objects. - - Returns: - bool: True if the time is between the time range, False otherwise. - """ - time = time.replace(":", "") - start = time_range[0].replace(":", "") - end = time_range[1].replace(":", "") - if end < start: - return time >= start or time < end - return start <= time < end - - def generate_price(self): - """Generates the price for the usage point based on the measurement data. + def generate_price(self): # noqa: C901, PLR0912, PLR0915 + """Generate the price for the usage point based on the measurement data. Returns: str: JSON string representing the calculated price. """ - data = self.db.get_detail_all( - usage_point_id=self.usage_point_id, measurement_direction=self.measurement_direction - ) + data = DatabaseDetail(self.usage_point_id, self.measurement_direction).get_all() result = {} last_month = "" if data: - tempo_config = self.db.get_tempo_config("price") - tempo_data = self.db.get_tempo_range(data[0].date, data[-1].date) + tempo_config = DatabaseTempo().get_config("price") + tempo_data = DatabaseTempo().get_range(data[0].date, data[-1].date) for item in data: year = item.date.strftime("%Y") month = item.date.strftime("%m") @@ -910,7 +1083,7 @@ def generate_price(self): # TEMPO if tempo_config: hour = int(item.date.strftime("%H")) - if 6 <= hour < 22: + if TEMPO_BEGIN <= hour < TEMPO_END: measure_type = "HP" else: measure_type = "HC" @@ -927,8 +1100,7 @@ def generate_price(self): result[year]["month"][month]["TEMPO"][f"{color}_{measure_type}"]["kWh"] += kwh result[year]["month"][month]["TEMPO"][f"{color}_{measure_type}"]["euro"] += kwh * tempo_price last_month = month - self.db.set_stat( - self.usage_point_id, + DatabaseStatistique(self.usage_point_id).set( f"price_{self.measurement_direction}", json.dumps(result), ) @@ -947,7 +1119,7 @@ def get_daily(self, specific_date, mesure_type): begin = datetime.combine(specific_date, datetime.min.time()) end = datetime.combine(specific_date, datetime.max.time()) value = 0 - for item in self.db.get_detail_range(self.usage_point_id, begin, end): + for item in DatabaseDetail(self.usage_point_id, mesure_type).get_range(begin, end): if self.get_mesure_type(item.date).upper() == mesure_type.upper(): value += item.value / (60 / item.interval) return value diff --git a/src/routers/data.py b/src/routers/data.py index ae3493e0..8034ccef 100644 --- a/src/routers/data.py +++ b/src/routers/data.py @@ -1,16 +1,51 @@ +"""Return data from cache.""" + import ast from datetime import datetime from fastapi import APIRouter, HTTPException, Path, Request from fastapi.responses import HTMLResponse +from database.contracts import DatabaseContracts +from database.addresses import DatabaseAddresses +from database.daily import DatabaseDaily +from database.detail import DatabaseDetail +from database.max_power import DatabaseMaxPower +from database.usage_points import DatabaseUsagePoints from doc import DOCUMENTATION -from init import DB from models.ajax import Ajax ROUTER = APIRouter(tags=["Données"]) +@ROUTER.get("/contract/{usage_point_id}") +@ROUTER.get("/contract/{usage_point_id}/", include_in_schema=False) +def get_contract(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): + """Renvoie les information du contrat remonter par Enedis.""" + if DatabaseUsagePoints(usage_point_id).get() is not None: + data = DatabaseContracts(usage_point_id).get().__dict__ + return dict(sorted(data.items())) + else: + raise HTTPException( + status_code=404, + detail=f"Le point de livraison '{usage_point_id}' est inconnu!", + ) + + +@ROUTER.get("/addresse/{usage_point_id}") +@ROUTER.get("/addresse/{usage_point_id}/", include_in_schema=False) +def get_contract(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): + """Renvoie les information postal remonter par Enedis.""" + if DatabaseUsagePoints(usage_point_id).get() is not None: + data = DatabaseAddresses(usage_point_id).get().__dict__ + return dict(sorted(data.items())) + else: + raise HTTPException( + status_code=404, + detail=f"Le point de livraison '{usage_point_id}' est inconnu!", + ) + + @ROUTER.put("/tempo", include_in_schema=False) @ROUTER.put("/tempo/", include_in_schema=False) def put_tempo(): @@ -27,7 +62,7 @@ def tempo(): @ROUTER.put("/ecowatt", include_in_schema=False) @ROUTER.put("/ecowatt/", include_in_schema=False) -def ecowatt(): +def put_ecowatt(): return Ajax().fetch_ecowatt() @@ -47,7 +82,7 @@ def ecowatt(): def fetch_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Mise à jour le cache local du comparateur d'abonnement.""" usage_point_id = usage_point_id.strip() - if DB.get_usage_point(usage_point_id) is not None: + if DatabaseUsagePoints(usage_point_id).get() is not None: return ast.literal_eval(Ajax(usage_point_id).generate_price()) else: raise HTTPException( @@ -64,7 +99,7 @@ def fetch_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage def get_price(usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"])): """Retourne les données du cache local du comparateur d'abonnement.""" usage_point_id = usage_point_id.strip() - if DB.get_usage_point(usage_point_id) is not None: + if DatabaseUsagePoints(usage_point_id).get() is not None: return Ajax(usage_point_id).get_price() else: raise HTTPException( @@ -94,9 +129,9 @@ def get_data_daily( if measurement_direction not in ["consumption", "production"]: raise HTTPException( status_code=404, - detail=f"'measurement_direction' inconnu, valeur possible consumption/production", + detail="'measurement_direction' inconnu, valeur possible consumption/production", ) - data = DB.get_daily_range(usage_point_id, begin, end, measurement_direction) + data = DatabaseDaily(usage_point_id, measurement_direction).get_range(begin=begin, end=end) output = {"unit": "w", "data": {}} if data is not None: for d in data: @@ -125,9 +160,9 @@ def get_data_detail( if measurement_direction not in ["consumption", "production"]: raise HTTPException( status_code=404, - detail=f"'measurement_direction' inconnu, valeur possible consumption/production", + detail="'measurement_direction' inconnu, valeur possible consumption/production", ) - data = DB.get_detail_range(usage_point_id, begin, end, measurement_direction) + data = DatabaseDetail(usage_point_id, measurement_direction).get_range(begin=begin, end=end) output = {"unit": "w", "data": {}} if data is not None: for d in data: @@ -135,6 +170,32 @@ def get_data_detail( return output +@ROUTER.get( + "/max_power/{usage_point_id}/{begin}/{end}", + summary="Retourne la puissance maximun.", +) +@ROUTER.get( + "/max_power/{usage_point_id}/{begin}/{end}/", + include_in_schema=False, +) +def get_max_power( + usage_point_id: str = Path(..., description=DOCUMENTATION["usage_point_id"]), + begin: str = Path(..., description=DOCUMENTATION["begin"]), + end: str = Path(..., description=DOCUMENTATION["end"]), +): + """Retourne les données du cache local de puissance maximal.""" + usage_point_id = usage_point_id.strip() + begin = datetime.strptime(begin, "%Y-%m-%d") + end = datetime.strptime(end, "%Y-%m-%d") + data = DatabaseMaxPower(usage_point_id).get_range(begin=begin, end=end) + print(data) + output = {"unit": "w", "data": {}} + if data is not None: + for d in data: + output["data"][d.event_date] = d.value + return output + + @ROUTER.get( "/get/{usage_point_id}/{measurement_direction}", response_class=HTMLResponse, @@ -151,7 +212,7 @@ def get_data( measurement_direction: str = Path(..., description=DOCUMENTATION["measurement_direction"]), ): usage_point_id = usage_point_id.strip() - if DB.get_usage_point(usage_point_id) is not None: + if DatabaseUsagePoints(usage_point_id).get() is not None: return Ajax(usage_point_id).datatable(measurement_direction, request) else: raise HTTPException( diff --git a/src/routers/html.py b/src/routers/html.py index de81f33d..755220fc 100644 --- a/src/routers/html.py +++ b/src/routers/html.py @@ -1,8 +1,9 @@ from fastapi import APIRouter, Request from fastapi.responses import FileResponse, HTMLResponse +from database import DB from dependencies import APPLICATION_PATH -from init import CONFIG, DB +from init import CONFIG from models.ajax import Ajax from templates.index import Index from templates.usage_point import UsagePoint @@ -12,8 +13,7 @@ @ROUTER.get("/favicon.ico") async def favicon(): - """ - This function handles the endpoint '/favicon.ico' and returns the favicon.ico file as a response. + """This function handles the endpoint '/favicon.ico' and returns the favicon.ico file as a response. Returns: - FileResponse: The favicon.ico file as a response. @@ -23,7 +23,7 @@ async def favicon(): @ROUTER.get("/", response_class=HTMLResponse) def main(): - """This function handles the root endpoint '/' and returns the HTML response generated by the 'display' method of the 'Index' class. + """Handle the root endpoint '/' and return the HTML generated by the 'display' method of the 'Index' class. Returns: - HTMLResponse: The HTML response generated by the 'display' method of the 'Index' class. @@ -34,7 +34,7 @@ def main(): @ROUTER.get("/usage_point_id/{usage_point_id}", response_class=HTMLResponse) @ROUTER.get("/usage_point_id/{usage_point_id}/", response_class=HTMLResponse) def usage_point_id(usage_point_id): - """This function handles the endpoint '/usage_point_id/{usage_point_id}' and '/usage_point_id/{usage_point_id}/' and returns the HTML response generated by the 'display' method of the 'UsagePoint' class. + """Handle the endpoint '/usage_point_id/{usage_point_id}' and '/usage_point_id/{usage_point_id}/'. Parameters: - usage_point_id (str): The ID of the usage point. @@ -62,30 +62,3 @@ def datatable(request: Request, usage_point_id, measurement_direction): datatable(request, "usage_point_id", "measurement_direction") """ return Ajax(usage_point_id).datatable(measurement_direction, request) - - -# ######################################################################################################################## -# # SWAGGER -# @ROUTER.get(f"/swagger", response_class=HTMLResponse, include_in_schema=False) -# def swagger(): -# data = '' -# html_content = html_return_fullscreen(body=data, footer_type="consent") -# return html_content -# -# -# ######################################################################################################################## -# # REDOC -# @ROUTER.get(f"/redocs", response_class=HTMLResponse, include_in_schema=False) -# def swagger(): -# data = '' -# html_content = html_return_fullscreen(body=data, footer_type="consent") -# return html_content -# -# from jinja2 import Template -# def html_return_fullscreen(body, footer_type="donation"): -# with open(f'/app/templates/html/index.html') as file_: -# index_template = Template(file_.read()) -# html = index_template.render( -# body=body, -# ) -# return html diff --git a/src/routers/info.py b/src/routers/info.py index 2bcffe00..d7a1897e 100644 --- a/src/routers/info.py +++ b/src/routers/info.py @@ -1,10 +1,12 @@ +"""Routers pour les informations générales.""" + from typing import Optional from fastapi import APIRouter from fastapi.responses import HTMLResponse from pydantic import BaseModel -from init import CONFIG, DB +from database import DB from models.ajax import Ajax ROUTER = APIRouter(tags=["Infos"]) diff --git a/src/templates/loading.py b/src/templates/loading.py index a8482a8e..e5213e99 100644 --- a/src/templates/loading.py +++ b/src/templates/loading.py @@ -2,7 +2,6 @@ from jinja2 import Template from dependencies import APPLICATION_PATH -from init import DB from templates.models.configuration import Configuration diff --git a/src/templates/models/configuration.py b/src/templates/models/configuration.py index 38af4d1d..a6465229 100644 --- a/src/templates/models/configuration.py +++ b/src/templates/models/configuration.py @@ -1,14 +1,23 @@ +"""Configation of usage point.""" + import datetime +from pathlib import Path +import pytz from jinja2 import Template from mergedeep import Strategy, merge +from database.contracts import DatabaseContracts +from database.usage_points import DatabaseUsagePoints from dependencies import APPLICATION_PATH, str2bool +TIMEZONE = pytz.timezone("Europe/Paris") + class Configuration: - def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False): - self.db = db + """Represents the configuration settings for the application.""" + + def __init__(self, title="", usage_point_id=0, display_usage_point_id=False): self.application_path = APPLICATION_PATH self.title = title self.usage_point_id = usage_point_id @@ -83,13 +92,13 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) "par les API d'Enedis." "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement du quota", - "type": datetime.datetime.now(), + "type": datetime.datetime.now(tz=TIMEZONE), "default": "", }, "consumption_detail": { "title": "Consommation détaillée", - "help": "Active/Désactive la récupération de la consommation détaillée.

ATTENTION, pour " - "fonctionner il vous faut activer le relevé de consommation horaire sur le site d'Enedis" + "help": "Active/Désactive la récupération de la consommation détaillée.

ATTENTION, " + "pour fonctionner il vous faut activer le relevé de consommation horaire sur le site d'Enedis" "Plus d'informations sont disponibles ici", "type": True, "default": True, @@ -101,7 +110,7 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) "par les API d'Enedis." "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement du quota", - "type": datetime.datetime.now(), + "type": datetime.datetime.now(tz=TIMEZONE), "default": "", }, "consumption_price_hc": { @@ -159,14 +168,14 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) "par les API d'Enedis." "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement de quota", - "type": datetime.datetime.now(), + "type": datetime.datetime.now(tz=TIMEZONE), "default": "", }, "production_detail": { "title": "Production détaillée", "help": "Active/Désactive la récupération de la production détaillée via vos panneaux solaires." - "

ATTENTION, pour fonctionner il vous faut activer le relevé de consommation horaire" - "sur le site d'Enedis
Plus d'informations sont disponibles " + "

ATTENTION, pour fonctionner il vous faut activer le relevé de consommation " + "horaire sur le site d'Enedis
Plus d'informations sont disponibles " "ici", "type": True, "default": False, @@ -178,7 +187,7 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) "par les API d'Enedis." "

ATTENTION, si cette valeur n'est pas correctement définie vous risquez de ne pas " "récupérer la totalité de vos données ou encore d'avoir un dépassement de quota", - "type": datetime.datetime.now(), + "type": datetime.datetime.now(tz=TIMEZONE), "default": "", }, "production_price": { @@ -206,16 +215,17 @@ def __init__(self, db, title="", usage_point_id=0, display_usage_point_id=False) strategy=Strategy.ADDITIVE, ) - def html(self): + def html(self): # noqa: PLR0912, PLR0912, PLR0915, C901 + """Generate the HTML representation of the configuration.""" current_cat = "" if self.usage_point_id != 0: configuration = f"""