Skip to content

Commit 73af855

Browse files
authoredFeb 18, 2025
Merge pull request #11848 from DefectDojo/release/2.43.3
Release: Merge release into master from: release/2.43.3
2 parents 31f0be8 + ba42d83 commit 73af855

15 files changed

+752
-72
lines changed
 

‎components/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "defectdojo",
3-
"version": "2.43.2",
3+
"version": "2.43.3",
44
"license" : "BSD-3-Clause",
55
"private": true,
66
"dependencies": {

‎dojo/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,6 @@
44
# Django starts so that shared_task will use this app.
55
from .celery import app as celery_app # noqa: F401
66

7-
__version__ = "2.43.2"
7+
__version__ = "2.43.3"
88
__url__ = "https://github.com/DefectDojo/django-DefectDojo"
99
__docs__ = "https://documentation.defectdojo.com"

‎dojo/api_v2/serializers.py

+26-27
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ class ImportStatisticsSerializer(serializers.Serializer):
167167
)
168168
delta = DeltaStatisticsSerializer(
169169
required=False,
170-
help_text="Finding statistics of modifications made by the reimport. Only available when TRACK_IMPORT_HISTORY hass not disabled.",
170+
help_text="Finding statistics of modifications made by the reimport. Only available when TRACK_IMPORT_HISTORY has not been disabled.",
171171
)
172172
after = SeverityStatusStatisticsSerializer(
173173
help_text="Finding statistics as stored in Defect Dojo after the import",
@@ -1920,43 +1920,42 @@ class Meta:
19201920

19211921
# Overriding this to push add Push to JIRA functionality
19221922
def create(self, validated_data):
1923-
# remove tags from validated data and store them seperately
1923+
# Pop off of some fields that should not be sent to the model at this time
19241924
to_be_tagged, validated_data = self._pop_tags(validated_data)
1925-
1926-
# pop push_to_jira so it won't get send to the model as a field
1927-
push_to_jira = validated_data.pop("push_to_jira")
1928-
1929-
# Save vulnerability ids and pop them
1930-
if "vulnerability_id_set" in validated_data:
1931-
vulnerability_id_set = validated_data.pop("vulnerability_id_set")
1932-
else:
1933-
vulnerability_id_set = None
1934-
1935-
# first save, so we have an instance to get push_all_to_jira from
1936-
new_finding = super(TaggitSerializer, self).create(validated_data)
1937-
1938-
if vulnerability_id_set:
1939-
vulnerability_ids = []
1940-
for vulnerability_id in vulnerability_id_set:
1941-
vulnerability_ids.append(vulnerability_id["vulnerability_id"])
1942-
validated_data["cve"] = vulnerability_ids[0]
1943-
save_vulnerability_ids(new_finding, vulnerability_ids)
1944-
new_finding.save()
1945-
1925+
push_to_jira = validated_data.pop("push_to_jira", False)
1926+
notes = validated_data.pop("notes", None)
1927+
found_by = validated_data.pop("found_by", None)
1928+
reviewers = validated_data.pop("reviewers", None)
1929+
# Process the vulnerability IDs specially
1930+
parsed_vulnerability_ids = []
1931+
if (vulnerability_ids := validated_data.pop("vulnerability_id_set", None)):
1932+
for vulnerability_id in vulnerability_ids:
1933+
parsed_vulnerability_ids.append(vulnerability_id["vulnerability_id"])
1934+
validated_data["cve"] = parsed_vulnerability_ids[0]
1935+
# Create a findings in memory so that we have access to unsaved_vulnerability_ids
1936+
new_finding = Finding(**validated_data)
1937+
new_finding.unsaved_vulnerability_ids = parsed_vulnerability_ids
1938+
new_finding.save()
1939+
# Deal with all of the many to many things
1940+
if notes:
1941+
new_finding.notes.set(notes)
1942+
if found_by:
1943+
new_finding.found_by.set(found_by)
1944+
if reviewers:
1945+
new_finding.reviewers.set(reviewers)
1946+
if parsed_vulnerability_ids:
1947+
save_vulnerability_ids(new_finding, parsed_vulnerability_ids)
19461948
# TODO: JIRA can we remove this is_push_all_issues, already checked in
19471949
# apiv2 viewset?
19481950
push_to_jira = push_to_jira or jira_helper.is_push_all_issues(
19491951
new_finding,
19501952
)
1951-
19521953
# If we need to push to JIRA, an extra save call is needed.
19531954
# TODO: try to combine create and save, but for now I'm just fixing a
19541955
# bug and don't want to change to much
19551956
if push_to_jira or new_finding:
19561957
new_finding.save(push_to_jira=push_to_jira)
1957-
1958-
# not sure why we are returning a tag_object, but don't want to change
1959-
# too much now as we're just fixing a bug
1958+
# This final call will save the finding again and return it
19601959
return self._save_tags(new_finding, to_be_tagged)
19611960

19621961
def validate(self, data):

‎dojo/importers/base_importer.py

+49-14
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from django.core.exceptions import ValidationError
66
from django.core.files.base import ContentFile
77
from django.core.files.uploadedfile import TemporaryUploadedFile
8+
from django.db import IntegrityError
89
from django.urls import reverse
910
from django.utils.timezone import make_aware
1011

@@ -262,6 +263,13 @@ def determine_process_method(
262263
**kwargs,
263264
)
264265

266+
def determine_deduplication_algorithm(self) -> str:
267+
"""
268+
Determines what dedupe algorithm to use for the Test being processed.
269+
:return: A string representing the dedupe algorithm to use.
270+
"""
271+
return self.test.deduplication_algorithm
272+
265273
def update_test_meta(self):
266274
"""
267275
Update the test with some values stored in the kwargs dict. The common
@@ -357,53 +365,80 @@ def update_import_history(
357365
commit_hash=self.commit_hash,
358366
type=self.import_type,
359367
)
360-
# Define all of the respective import finding actions for the test import object
361-
test_import_finding_action_list = []
368+
369+
# Create a history record for each finding
362370
for finding in closed_findings:
363-
logger.debug(f"preparing Test_Import_Finding_Action for closed finding: {finding.id}")
364-
test_import_finding_action_list.append(Test_Import_Finding_Action(
371+
self.create_import_history_record_safe(Test_Import_Finding_Action(
365372
test_import=test_import,
366373
finding=finding,
367374
action=IMPORT_CLOSED_FINDING,
368375
))
369376
for finding in new_findings:
370-
logger.debug(f"preparing Test_Import_Finding_Action for created finding: {finding.id}")
371-
test_import_finding_action_list.append(Test_Import_Finding_Action(
377+
self.create_import_history_record_safe(Test_Import_Finding_Action(
372378
test_import=test_import,
373379
finding=finding,
374380
action=IMPORT_CREATED_FINDING,
375381
))
376382
for finding in reactivated_findings:
377-
logger.debug(f"preparing Test_Import_Finding_Action for reactivated finding: {finding.id}")
378-
test_import_finding_action_list.append(Test_Import_Finding_Action(
383+
self.create_import_history_record_safe(Test_Import_Finding_Action(
379384
test_import=test_import,
380385
finding=finding,
381386
action=IMPORT_REACTIVATED_FINDING,
382387
))
383388
for finding in untouched_findings:
384-
logger.debug(f"preparing Test_Import_Finding_Action for untouched finding: {finding.id}")
385-
test_import_finding_action_list.append(Test_Import_Finding_Action(
389+
self.create_import_history_record_safe(Test_Import_Finding_Action(
386390
test_import=test_import,
387391
finding=finding,
388392
action=IMPORT_UNTOUCHED_FINDING,
389393
))
390-
# Bulk create all the defined objects
391-
Test_Import_Finding_Action.objects.bulk_create(test_import_finding_action_list)
392394

393395
# Add any tags to the findings imported if necessary
394396
if self.apply_tags_to_findings and self.tags:
395397
for finding in test_import.findings_affected.all():
396398
for tag in self.tags:
397-
finding.tags.add(tag)
399+
self.add_tags_safe(finding, tag)
398400
# Add any tags to any endpoints of the findings imported if necessary
399401
if self.apply_tags_to_endpoints and self.tags:
400402
for finding in test_import.findings_affected.all():
401403
for endpoint in finding.endpoints.all():
402404
for tag in self.tags:
403-
endpoint.tags.add(tag)
405+
self.add_tags_safe(endpoint, tag)
404406

405407
return test_import
406408

409+
def create_import_history_record_safe(
410+
self,
411+
test_import_finding_action,
412+
):
413+
"""Creates an import history record, while catching any IntegrityErrors that might happen because of the background job having deleted a finding"""
414+
logger.debug(f"creating Test_Import_Finding_Action for finding: {test_import_finding_action.finding.id} action: {test_import_finding_action.action}")
415+
try:
416+
test_import_finding_action.save()
417+
except IntegrityError as e:
418+
# This try catch makes us look we don't know what we're doing, but in https://github.com/DefectDojo/django-DefectDojo/issues/6217 we decided that for now this is the best solution
419+
logger.warning("Error creating Test_Import_Finding_Action: %s", e)
420+
logger.debug("Error creating Test_Import_Finding_Action, finding marked as duplicate and deleted ?")
421+
422+
def add_tags_safe(
423+
self,
424+
finding_or_endpoint,
425+
tag,
426+
):
427+
"""Adds tags to a finding or endpoint, while catching any IntegrityErrors that might happen because of the background job having deleted a finding"""
428+
if not isinstance(finding_or_endpoint, Finding) and not isinstance(finding_or_endpoint, Endpoint):
429+
msg = "finding_or_endpoint must be a Finding or Endpoint object"
430+
raise TypeError(msg)
431+
432+
msg = "finding" if isinstance(finding_or_endpoint, Finding) else "endpoint" if isinstance(finding_or_endpoint, Endpoint) else "unknown"
433+
logger.debug(f" adding tag: {tag} to " + msg + f"{finding_or_endpoint.id}")
434+
435+
try:
436+
finding_or_endpoint.tags.add(tag)
437+
except IntegrityError as e:
438+
# This try catch makes us look we don't know what we're doing, but in https://github.com/DefectDojo/django-DefectDojo/issues/6217 we decided that for now this is the best solution
439+
logger.warning("Error adding tag: %s", e)
440+
logger.debug("Error adding tag, finding marked as duplicate and deleted ?")
441+
407442
def construct_imported_message(
408443
self,
409444
finding_count: int = 0,

‎dojo/importers/default_importer.py

+33-18
Original file line numberDiff line numberDiff line change
@@ -254,29 +254,44 @@ def close_old_findings(
254254
# First check if close old findings is desired
255255
if not self.close_old_findings_toggle:
256256
return []
257-
logger.debug("REIMPORT_SCAN: Closing findings no longer present in scan report")
258-
# Close old active findings that are not reported by this scan.
259-
# Refactoring this to only call test.finding_set.values() once.
260-
findings = findings.values()
261-
mitigated_hash_codes = []
257+
258+
logger.debug("IMPORT_SCAN: Closing findings no longer present in scan report")
259+
# Remove all the findings that are coming from the report already mitigated
262260
new_hash_codes = []
263-
for finding in findings:
264-
new_hash_codes.append(finding["hash_code"])
265-
if finding.get("is_mitigated", None):
266-
mitigated_hash_codes.append(finding["hash_code"])
267-
for hash_code in new_hash_codes:
268-
if hash_code == finding["hash_code"]:
269-
new_hash_codes.remove(hash_code)
261+
new_unique_ids_from_tool = []
262+
for finding in findings.values():
263+
# Do not process closed findings in the report
264+
if finding.get("is_mitigated", False):
265+
continue
266+
# Grab the hash code
267+
if (hash_code := finding.get("hash_code")) is not None:
268+
new_hash_codes.append(hash_code)
269+
if (unique_id_from_tool := finding.get("unique_id_from_tool")) is not None:
270+
new_unique_ids_from_tool.append(unique_id_from_tool)
270271
# Get the initial filtered list of old findings to be closed without
271272
# considering the scope of the product or engagement
272-
old_findings = Finding.objects.exclude(
273-
test=self.test,
274-
).exclude(
275-
hash_code__in=new_hash_codes,
276-
).filter(
273+
old_findings = Finding.objects.filter(
277274
test__test_type=self.test.test_type,
278275
active=True,
279-
)
276+
).exclude(test=self.test)
277+
# Filter further based on the deduplication algorithm set on the test
278+
self.deduplication_algorithm = self.determine_deduplication_algorithm()
279+
if self.deduplication_algorithm in ["hash_code", "legacy"]:
280+
old_findings = old_findings.exclude(
281+
hash_code__in=new_hash_codes,
282+
)
283+
if self.deduplication_algorithm == "unique_id_from_tool":
284+
old_findings = old_findings.exclude(
285+
unique_id_from_tool__in=new_unique_ids_from_tool,
286+
)
287+
if self.deduplication_algorithm == "unique_id_from_tool_or_hash_code":
288+
old_findings = old_findings.exclude(
289+
(Q(hash_code__isnull=False) & Q(hash_code__in=new_hash_codes))
290+
| (
291+
Q(unique_id_from_tool__isnull=False)
292+
& Q(unique_id_from_tool__in=new_unique_ids_from_tool)
293+
),
294+
)
280295
# Accommodate for product scope or engagement scope
281296
if self.close_old_findings_product_scope:
282297
old_findings = old_findings.filter(test__engagement__product=self.test.engagement.product)

‎dojo/importers/default_reimporter.py

-7
Original file line numberDiff line numberDiff line change
@@ -147,13 +147,6 @@ def process_scan(
147147
test_import_history,
148148
)
149149

150-
def determine_deduplication_algorithm(self) -> str:
151-
"""
152-
Determines what dedupe algorithm to use for the Test being processed.
153-
:return: A string representing the dedupe algorithm to use.
154-
"""
155-
return self.test.deduplication_algorithm
156-
157150
def process_findings(
158151
self,
159152
parsed_findings: list[Finding],

‎dojo/product/views.py

+1
Original file line numberDiff line numberDiff line change
@@ -1381,6 +1381,7 @@ def process_finding_form(self, request: HttpRequest, test: Test, context: dict):
13811381
finding.reporter = request.user
13821382
finding.numerical_severity = Finding.get_numerical_severity(finding.severity)
13831383
finding.tags = context["form"].cleaned_data["tags"]
1384+
finding.unsaved_vulnerability_ids = context["form"].cleaned_data["vulnerability_ids"].split()
13841385
finding.save()
13851386
# Save and add new endpoints
13861387
finding_helper.add_endpoints(finding, context["form"])

‎dojo/templates/base.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@
126126
<div class="custom-search-form">
127127
<form id="custom_search_form" role="search" method="get" action="{% url 'simple_search' %}">
128128
<div class="input-group">
129-
<input id="simple_search" label="simple_search" aria_label="simple_search" type="text" name="query" class="form-control"
129+
<input id="simple_search" label="simple_search" aria-label="simple_search" type="text" name="query" class="form-control"
130130
placeholder="{% trans "Search" %}..." value="{{clean_query}}">
131131
<span class="input-group-btn">
132132
<button id="simple_search_submit" class="btn btn-primary" type="submit" aria-label="Search">

‎dojo/test/views.py

+1
Original file line numberDiff line numberDiff line change
@@ -538,6 +538,7 @@ def process_finding_form(self, request: HttpRequest, test: Test, context: dict):
538538
finding.reporter = request.user
539539
finding.numerical_severity = Finding.get_numerical_severity(finding.severity)
540540
finding.tags = context["form"].cleaned_data["tags"]
541+
finding.unsaved_vulnerability_ids = context["form"].cleaned_data["vulnerability_ids"].split()
541542
finding.save()
542543
# Save and add new endpoints
543544
finding_helper.add_endpoints(finding, context["form"])

‎helm/defectdojo/Chart.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
apiVersion: v2
2-
appVersion: "2.43.2"
2+
appVersion: "2.43.3"
33
description: A Helm chart for Kubernetes to install DefectDojo
44
name: defectdojo
5-
version: 1.6.173
5+
version: 1.6.174
66
icon: https://www.defectdojo.org/img/favicon.ico
77
maintainers:
88
- name: madchap

‎requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ Markdown==3.7
3232
openpyxl==3.1.5
3333
Pillow==11.1.0 # required by django-imagekit
3434
psycopg[c]==3.2.4
35-
cryptography==44.0.0
35+
cryptography==44.0.1
3636
python-dateutil==2.9.0.post0
3737
pytz==2025.1
3838
redis==5.2.1

0 commit comments

Comments
 (0)
Please sign in to comment.