Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions dojo/db_migrations/0262_system_settings_max_import_history.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("dojo", "0261_remove_url_insert_insert_remove_url_update_update_and_more"),
]

operations = [
migrations.AddField(
model_name="system_settings",
name="max_import_history",
field=models.IntegerField(
blank=True,
null=True,
default=None,
verbose_name="Max Import History",
help_text="When set, the oldest import history records will be deleted when a test exceeds this number of imports. Leave empty to keep all history.",
),
),
]
3 changes: 3 additions & 0 deletions dojo/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,9 @@ class System_Settings(models.Model):
"issue reaches the maximum "
"number of duplicates, the "
"oldest will be deleted. Duplicate will not be deleted when left empty. A value of 0 will remove all duplicates."))
max_import_history = models.IntegerField(blank=True, null=True, default=None,
verbose_name=_("Max Import History"),
help_text=_("When set, the oldest import history records will be deleted when a test exceeds this number of imports. Leave empty to keep all history."))

email_from = models.CharField(max_length=200, default="no-reply@example.com", blank=True)

Expand Down
2 changes: 2 additions & 0 deletions dojo/settings/settings.dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,7 @@
# we limit the amount of duplicates that can be deleted in a single run of that job
# to prevent overlapping runs of that job from occurrring
DD_DUPE_DELETE_MAX_PER_RUN=(int, 200),
DD_IMPORT_HISTORY_MAX_PER_OBJECT=(int, 200),
# when enabled 'mitigated date' and 'mitigated by' of a finding become editable
DD_EDITABLE_MITIGATED_DATA=(bool, False),
# new feature that tracks history across multiple reimports for the same test
Expand Down Expand Up @@ -1773,6 +1774,7 @@ def saml2_attrib_map_format(din):
DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value

DUPE_DELETE_MAX_PER_RUN = env("DD_DUPE_DELETE_MAX_PER_RUN")
IMPORT_HISTORY_MAX_PER_OBJECT = env("DD_IMPORT_HISTORY_MAX_PER_OBJECT")

DISABLE_FINDING_MERGE = env("DD_DISABLE_FINDING_MERGE")

Expand Down
40 changes: 39 additions & 1 deletion dojo/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,45 @@ def jira_status_reconciliation_task(*args, **kwargs):
return jira_status_reconciliation(*args, **kwargs)


@app.task

@app.task(bind=True)
def async_import_history_cleanup(*args, **kwargs):
with pghistory.context(source="import_history_cleanup_task"):
_async_import_history_cleanup_impl()


def _async_import_history_cleanup_impl():
"""Delete oldest Test_Import records when a test exceeds max_import_history."""
try:
system_settings = System_Settings.objects.get()
max_history = system_settings.max_import_history
max_per_run = settings.IMPORT_HISTORY_MAX_PER_OBJECT
except System_Settings.DoesNotExist:
logger.error('System_Settings not found, skipping import history cleanup')
return
Comment on lines +223 to +225
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please log something or just let the exception bobble up


if max_history is None:
logger.info("skipping import history cleanup: max_import_history not configured")
return

logger.info("cleaning up import history (max per test: %s, max deletes per run: %s)", max_history, max_per_run)

tests_with_excess = Test_Import.objects \
.values("test") \
.annotate(import_count=Count("id")) \
.filter(import_count__gt=max_history)[:max_per_run]

total_deleted_count = 0
for entry in tests_with_excess:
test_id = entry["test"]
imports = Test_Import.objects.filter(test_id=test_id).order_by("created")
excess_count = entry["import_count"] - max_history
for test_import in imports[:excess_count]:
logger.debug("deleting Test_Import id %s for test %s", test_import.id, test_id)
test_import.delete()
total_deleted_count += 1

logger.info("total import history records deleted: %s", total_deleted_count)
def fix_loop_duplicates_task(*args, **kwargs):
# Wrap with pghistory context for audit trail
with pghistory.context(source="fix_loop_duplicates"):
Expand Down
Loading