Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
dd801a8
Initial plan
Copilot Apr 9, 2026
b51de05
Add delete sensor data filtering by source/start/until + UI delete da…
Copilot Apr 9, 2026
32fbab9
chore: added changelog entry
joshuaunity Apr 9, 2026
473f329
chore: Add detailed description and request body for delete sensor da…
joshuaunity Apr 9, 2026
e2b9bc5
Reuse stats data to populate delete source dropdown instead of extra …
Copilot Apr 10, 2026
ddbb4af
refactor: Refactor deleteData function for improved readability and s…
joshuaunity Apr 10, 2026
14af7ed
fix: pass fresh input values to deleteData to avoid stale page-load s…
Copilot Apr 10, 2026
0ff9702
fix: remove redundant btn-close listener causing toast.js classList n…
Copilot Apr 13, 2026
b07ec8c
fix: Prevents instance replacement/dispose race on already visible to…
joshuaunity Apr 13, 2026
6be0e99
feat: initialize delete-data inputs to the graph's current start/end …
Copilot Apr 13, 2026
fdaa8c5
feat: add 'Select all data' link to delete-data panel to set inputs t…
Copilot Apr 13, 2026
d58c513
fix: add sensor resolution to 'Select all data' end time; refresh sta…
Copilot Apr 13, 2026
9857109
fix: remove double-counted resolution, add no-cache to stats fetch, r…
Copilot Apr 13, 2026
344f5ed
fix: add Cache-Control: no-store to stats endpoint response
Copilot Apr 13, 2026
c00fd0c
explicit cache buster, so new and deleted data gets fresh stats
nhoening Apr 14, 2026
0200334
black
nhoening Apr 14, 2026
08ec702
small api specs update
nhoening Apr 14, 2026
5774afe
black
nhoening Apr 14, 2026
96bbb71
fix failing test - new function only should live on sensor page
nhoening Apr 14, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions documentation/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ New features
* Show sensor attributes on sensor page, if not empty [see `PR #2015 <https://www.github.com/FlexMeasures/flexmeasures/pull/2015>`_]
* Separate the ``StorageScheduler``'s tie-breaking preference for a full :abbr:`SoC (state of charge)` from its reported energy costs [see `PR #2023 <https://www.github.com/FlexMeasures/flexmeasures/pull/2023>`_]
* Improve asset audit log messages for JSON field edits (especially ``sensors_to_show`` and nested flex-config values) [see `PR #2055 <https://www.github.com/FlexMeasures/flexmeasures/pull/2055>`_]
* Added a form on the UI for deleting sensor data sources [see `PR #2095 <https://www.github.com/FlexMeasures/flexmeasures/pull/2095>`_]

Infrastructure / Support
----------------------
Expand Down
79 changes: 71 additions & 8 deletions flexmeasures/api/v3_0/sensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
DurationField,
PlanningDurationField,
)
from flexmeasures.data.schemas import AssetIdField
from flexmeasures.data.schemas import AssetIdField, SourceIdField
from flexmeasures.api.common.schemas.search import SearchFilterField
from flexmeasures.data.schemas.scheduling import GetScheduleSchema
from flexmeasures.data.schemas.units import UnitField
Expand Down Expand Up @@ -1367,15 +1367,33 @@ def delete(self, id: int, sensor: Sensor):

@route("/<id>/data", methods=["DELETE"])
@use_kwargs({"sensor": SensorIdField(data_key="id")}, location="path")
@use_kwargs(
{
"source": SourceIdField(load_default=None),
"start": AwareDateTimeField(load_default=None),
"until": AwareDateTimeField(load_default=None),
},
location="json",
)
@permission_required_for_context("delete", ctx_arg_name="sensor")
@as_json
def delete_data(self, id: int, sensor: Sensor):
def delete_data(
self,
id: int,
sensor: Sensor,
source=None,
start=None,
until=None,
):
"""
.. :quickref: Sensors; Delete sensor data
---
delete:
summary: Delete sensor data
description: This endpoint deletes all data for a sensor.
description: >
This endpoint deletes data for a sensor.
Optionally, filter by source, start time and/or until time.
A missing source means all sources are deleted.
security:
- ApiKeyAuth: []
parameters:
Expand All @@ -1384,6 +1402,24 @@ def delete_data(self, id: int, sensor: Sensor):
description: ID of the sensor to delete data for.
required: true
schema: SensorId
requestBody:
required: false
content:
application/json:
schema:
type: object
properties:
source:
type: integer
description: ID of the data source to delete data for. If not provided, data from all sources is deleted.
start:
type: string
format: date-time
description: Only delete data with event start at or after this datetime (ISO 8601).
until:
type: string
format: date-time
description: Only delete data with event start before this datetime (ISO 8601).
responses:
204:
description: SENSOR_DATA_DELETED
Expand All @@ -1398,10 +1434,25 @@ def delete_data(self, id: int, sensor: Sensor):
tags:
- Sensors
"""
db.session.execute(delete(TimedBelief).filter_by(sensor_id=sensor.id))
query = delete(TimedBelief).where(TimedBelief.sensor_id == sensor.id)
if source is not None:
query = query.where(TimedBelief.source_id == source.id)
if start is not None:
query = query.where(TimedBelief.event_start >= start)
if until is not None:
query = query.where(TimedBelief.event_start < until)
db.session.execute(query)

audit_message = f"Deleted data for sensor '{sensor.name}': {sensor.id}"
if source is not None:
audit_message += f", source: {source.id}"
if start is not None:
audit_message += f", from: {start}"
if until is not None:
audit_message += f", until: {until}"
AssetAuditLog.add_record(
sensor.generic_asset,
f"Deleted data for sensor '{sensor.name}': {sensor.id}",
audit_message,
)
db.session.commit()

Expand All @@ -1414,6 +1465,7 @@ def delete_data(self, id: int, sensor: Sensor):
"sort_keys": fields.Boolean(data_key="sort", load_default=True),
"event_start_time": fields.Str(load_default=None),
"event_end_time": fields.Str(load_default=None),
"fresh": fields.Boolean(load_default=False),
},
location="query",
)
Expand All @@ -1426,6 +1478,7 @@ def get_stats(
event_start_time: str,
event_end_time: str,
sort_keys: bool,
fresh: bool,
):
"""
.. :quickref: Sensors; Get sensor stats
Expand Down Expand Up @@ -1454,7 +1507,12 @@ def get_stats(
format: date-time
- in: query
name: sort_keys
description: Whether to sort the stats by keys.
description: Whether to sort the stats by keys (defaults to true).
schema:
type: boolean
- in: query
name: fresh
description: Whether to compute fresh data, bypassing any cached results (defaults to false).
schema:
type: boolean
responses:
Expand Down Expand Up @@ -1487,9 +1545,14 @@ def get_stats(
tags:
- Sensors
"""

return (
get_sensor_stats(sensor, event_start_time, event_end_time, sort_keys),
get_sensor_stats(
sensor,
event_start_time,
event_end_time,
sort_keys,
from_cache=not fresh,
),
200,
)

Expand Down
152 changes: 152 additions & 0 deletions flexmeasures/api/v3_0/tests/test_sensors_delete_data_fresh_db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
"""Tests for DELETE /api/v3_0/sensors/<id>/data with source, start and until filters.

These tests use fresh_db (function-scoped) to ensure data isolation between tests,
since each test mutates the sensor data.
"""

from __future__ import annotations

import pytest

from flask import url_for
from sqlalchemy import select

from flexmeasures.data.models.time_series import TimedBelief
from flexmeasures import Sensor
from flexmeasures.api.v3_0.tests.utils import check_audit_log_event


@pytest.mark.parametrize("requesting_user", ["test_admin_user@seita.nl"], indirect=True)
def test_delete_sensor_data_by_source(
client, setup_api_fresh_test_data, requesting_user, fresh_db
):
"""Deleting sensor data with a source filter only removes beliefs from that source."""
existing_sensor = setup_api_fresh_test_data["some gas sensor"]
existing_sensor_id = existing_sensor.id

# Collect distinct source ids for this sensor
all_beliefs = fresh_db.session.scalars(
select(TimedBelief).filter(TimedBelief.sensor_id == existing_sensor_id)
).all()
assert len(all_beliefs) > 0
source_ids = list({b.source_id for b in all_beliefs})
assert len(source_ids) >= 2, "Need at least two sources for this test"

# Pick one source to delete
source_id_to_delete = source_ids[0]

# Delete sensor data for that source only
delete_data_response = client.delete(
url_for("SensorAPI:delete_data", id=existing_sensor_id),
json={"source": source_id_to_delete},
)
assert delete_data_response.status_code == 204

remaining_beliefs = fresh_db.session.scalars(
select(TimedBelief).filter(TimedBelief.sensor_id == existing_sensor_id)
).all()

# Beliefs from the deleted source should be gone
deleted_source_beliefs = [
b for b in remaining_beliefs if b.source_id == source_id_to_delete
]
assert deleted_source_beliefs == []

# Beliefs from other sources should remain
other_beliefs = [b for b in remaining_beliefs if b.source_id != source_id_to_delete]
assert len(other_beliefs) > 0

deleted_sensor = fresh_db.session.get(Sensor, existing_sensor_id)
assert deleted_sensor is not None, "Sensor itself should not be deleted"

check_audit_log_event(
db=fresh_db,
event=f"Deleted data for sensor '{existing_sensor.name}': {existing_sensor.id}, source: {source_id_to_delete}",
user=requesting_user,
asset=existing_sensor.generic_asset,
)


@pytest.mark.parametrize("requesting_user", ["test_admin_user@seita.nl"], indirect=True)
def test_delete_sensor_data_by_start(
client, setup_api_fresh_test_data, requesting_user, fresh_db
):
"""Deleting sensor data with a start filter only removes beliefs at or after that time."""
existing_sensor = setup_api_fresh_test_data["some gas sensor"]
existing_sensor_id = existing_sensor.id

all_beliefs = fresh_db.session.scalars(
select(TimedBelief).filter(TimedBelief.sensor_id == existing_sensor_id)
).all()
assert len(all_beliefs) >= 2

# Use the second distinct event_start as the cutoff: beliefs at or after it should be deleted
event_starts = sorted({b.event_start for b in all_beliefs})
cutoff = event_starts[1]

delete_data_response = client.delete(
url_for("SensorAPI:delete_data", id=existing_sensor_id),
json={"start": cutoff.isoformat()},
)
assert delete_data_response.status_code == 204

remaining_beliefs = fresh_db.session.scalars(
select(TimedBelief).filter(TimedBelief.sensor_id == existing_sensor_id)
).all()

# All remaining beliefs should have event_start < cutoff
for b in remaining_beliefs:
assert b.event_start < cutoff

deleted_sensor = fresh_db.session.get(Sensor, existing_sensor_id)
assert deleted_sensor is not None, "Sensor itself should not be deleted"

check_audit_log_event(
db=fresh_db,
event=f"Deleted data for sensor '{existing_sensor.name}': {existing_sensor.id}, from: {cutoff}",
user=requesting_user,
asset=existing_sensor.generic_asset,
)


@pytest.mark.parametrize("requesting_user", ["test_admin_user@seita.nl"], indirect=True)
def test_delete_sensor_data_by_until(
client, setup_api_fresh_test_data, requesting_user, fresh_db
):
"""Deleting sensor data with an until filter only removes beliefs before that time."""
existing_sensor = setup_api_fresh_test_data["some gas sensor"]
existing_sensor_id = existing_sensor.id

all_beliefs = fresh_db.session.scalars(
select(TimedBelief).filter(TimedBelief.sensor_id == existing_sensor_id)
).all()
assert len(all_beliefs) >= 2

# Use the last distinct event_start as the until cutoff:
# beliefs strictly before it should be deleted
event_starts = sorted({b.event_start for b in all_beliefs})
cutoff = event_starts[-1]

delete_data_response = client.delete(
url_for("SensorAPI:delete_data", id=existing_sensor_id),
json={"until": cutoff.isoformat()},
)
assert delete_data_response.status_code == 204

remaining_beliefs = fresh_db.session.scalars(
select(TimedBelief).filter(TimedBelief.sensor_id == existing_sensor_id)
).all()

# All remaining beliefs should have event_start >= cutoff
for b in remaining_beliefs:
assert b.event_start >= cutoff

deleted_sensor = fresh_db.session.get(Sensor, existing_sensor_id)
assert deleted_sensor is not None, "Sensor itself should not be deleted"

check_audit_log_event(
db=fresh_db,
event=f"Deleted data for sensor '{existing_sensor.name}': {existing_sensor.id}, until: {cutoff}",
user=requesting_user,
asset=existing_sensor.generic_asset,
)
8 changes: 6 additions & 2 deletions flexmeasures/data/services/sensors.py
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,11 @@ def _get_sensor_stats(


def get_sensor_stats(
sensor: Sensor, event_start_time: str, event_end_time: str, sort_keys: bool = True
sensor: Sensor,
event_start_time: str,
event_end_time: str,
sort_keys: bool = True,
from_cache: bool = True,
) -> dict:
"""Get stats for a sensor.

Expand All @@ -591,7 +595,7 @@ def get_sensor_stats(
bucket = round(time.time() / _SENSOR_STATS_TTL)
key = (sensor.id, event_end_time, event_start_time, sort_keys, bucket)

if key in _sensor_stats_cache:
if from_cache and key in _sensor_stats_cache:
return _sensor_stats_cache[key]

result = _get_sensor_stats(sensor, event_end_time, event_start_time, sort_keys)
Expand Down
41 changes: 39 additions & 2 deletions flexmeasures/ui/static/js/flexmeasures.js
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ function updateStatsTable(stats, tableBody) {
});
}

function loadSensorStats(sensor_id, event_start_time="", event_end_time="") {
function loadSensorStats(sensor_id, event_start_time="", event_end_time="", fresh=false) {
const spinner = document.getElementById('spinner-run-simulation');
const dropdownContainer = document.getElementById('sourceKeyDropdownContainer');
const tableBody = document.getElementById('statsTableBody');
Expand All @@ -664,7 +664,11 @@ function loadSensorStats(sensor_id, event_start_time="", event_end_time="") {
if (toggleStatsCheckbox.checked) {
queryParams = `?sort=false&event_start_time=${event_start_time}&event_end_time=${event_end_time}`
}

//add a cache buster to ensure we get the latest data after an upload
if (fresh === true) {
queryParams += `&fresh=true`;
}

// Enable all the default behaviors on every API call.
dropdownMenu.innerHTML = '';
noDataWarning.classList.add('d-none');
Expand Down Expand Up @@ -708,6 +712,39 @@ function loadSensorStats(sensor_id, event_start_time="", event_end_time="") {
const firstSourceKey = getLatestBeliefName(data);
dropdownButton.textContent = firstSourceKey;
updateStatsTable(data[firstSourceKey], tableBody);

// Populate the "Delete data" source dropdown if it exists on the page,
// re-using the stats data already fetched to avoid a duplicate API call.
const deleteSourceSelect = document.getElementById('deleteDataSource');
if (deleteSourceSelect) {
// Keep only the "All sources" placeholder option, then add sources from stats
deleteSourceSelect.innerHTML = '<option value="">All sources</option>';
Object.keys(data).forEach(sourceKey => {
const idMatch = sourceKey.match(/\(ID:\s*(\d+)\)$/);
if (!idMatch) { return; }
const option = document.createElement('option');
option.value = idMatch[1];
option.textContent = sourceKey;
deleteSourceSelect.appendChild(option);
});
}

// Notify the "Delete data" panel of the overall first/last event times
// across all sources so the "Select all data" link can populate the inputs.
const firstEventDates = Object.values(data)
.map(d => new Date(d["First event start"]))
.filter(d => !isNaN(d.getTime()));
const lastEventDates = Object.values(data)
.map(d => new Date(d["Last event end"]))
.filter(d => !isNaN(d.getTime()));
if (firstEventDates.length > 0 && lastEventDates.length > 0) {
document.dispatchEvent(new CustomEvent('sensorDataRangeAvailable', {
detail: {
firstEventStart: new Date(Math.min(...firstEventDates)),
lastEventEnd: new Date(Math.max(...lastEventDates))
}
}));
}
} else {
// If the stats table is empty, make the properties table full width
noDataWarning.classList.remove('d-none');
Expand Down
Loading
Loading