Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ def to_internal_value(self, data):
except ValueError:
self.fail("invalid_json")

logger.debug(f"data as json: {data}")
logger.debug("data as json: %s", data)

if not isinstance(data, list):
self.fail("not_a_list", input_type=type(data).__name__)
Expand All @@ -238,7 +238,7 @@ def to_internal_value(self, data):
tag_validator(sub, exception_class=RestFrameworkValidationError)
data_safe.extend(substrings)

logger.debug(f"result after rendering tags: {data_safe}")
logger.debug("result after rendering tags: %s", data_safe)
return data_safe

def to_representation(self, value):
Expand Down Expand Up @@ -1863,7 +1863,7 @@ class Meta:

# Overriding this to push add Push to JIRA functionality
def create(self, validated_data):
logger.debug(f"Creating finding with validated data: {validated_data}")
logger.debug("Creating finding with validated data: %s", validated_data)
push_to_jira = validated_data.pop("push_to_jira", False)
notes = validated_data.pop("notes", None)
found_by = validated_data.pop("found_by", None)
Expand Down
2 changes: 1 addition & 1 deletion dojo/api_v2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -2648,7 +2648,7 @@ def perform_create(self, serializer):
jira_driver = test or (engagement or (product or None))
if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None):
push_to_jira = push_to_jira or jira_project.push_all_issues
logger.debug(f"push_to_jira: {push_to_jira}")
logger.debug("push_to_jira: %s", push_to_jira)
serializer.save(push_to_jira=push_to_jira)


Expand Down
4 changes: 2 additions & 2 deletions dojo/finding/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,7 +486,7 @@ def get_request_response(self, finding: Finding):
burp_request = base64.b64decode(request_response.burpRequestBase64)
burp_response = base64.b64decode(request_response.burpResponseBase64)
except Exception as e:
logger.debug(f"unsuspected error: {e}")
logger.debug("unsuspected error: %s", e)

return {
"burp_request": burp_request,
Expand Down Expand Up @@ -1597,7 +1597,7 @@ def request_finding_review(request, fid):
reviewers = Dojo_User.objects.filter(id__in=form.cleaned_data["reviewers"])
reviewers_string = ", ".join([f"{user} ({user.id})" for user in reviewers])
reviewers_usernames = [user.username for user in reviewers]
logger.debug(f"Asking {reviewers_string} for review")
logger.debug("Asking %s for review", reviewers_string)

create_notification(
event="review_requested", # TODO: - if 'review_requested' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces
Expand Down
2 changes: 1 addition & 1 deletion dojo/importers/endpoint_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def clean_unsaved_endpoints(
try:
endpoint.clean()
except ValidationError as e:
logger.warning(f"DefectDojo is storing broken endpoint because cleaning wasn't successful: {e}")
logger.warning("DefectDojo is storing broken endpoint because cleaning wasn't successful: %s", e)

def chunk_endpoints_and_reactivate(
self,
Expand Down
2 changes: 1 addition & 1 deletion dojo/jira_link/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def can_be_pushed_to_jira(obj, form=None):
return False, f"Finding below the minimum JIRA severity threshold ({System_Settings.objects.get().jira_minimum_severity}).", "error_below_minimum_threshold"
elif isinstance(obj, Finding_Group):
finding_group_status = _safely_get_obj_status_for_jira(obj)
logger.error(f"Finding group status: {finding_group_status}")
logger.error("Finding group status: %s", finding_group_status)
if "Empty" in finding_group_status:
return False, f"{to_str_typed(obj)} cannot be pushed to jira as it contains no findings above minimum treshold.", "error_empty"

Expand Down
10 changes: 5 additions & 5 deletions dojo/management/commands/import_all_unittest_scans.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def import_scan_with_params(self, filename, scan_type="ZAP Scan", engagement=1,
return self.import_scan(payload, expected_http_status_code)

def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engagement=10, engagements_per_product=50, products_per_product_type=15, *, include_very_big_scans=False, **kwargs):
logger.info(f"product_name_prefix: {product_name_prefix}, tests_per_engagement: {tests_per_engagement}, engagements_per_product: {engagements_per_product}, products_per_product_type: {products_per_product_type}")
logger.info("product_name_prefix: %s, tests_per_engagement: %s, engagements_per_product: %s, products_per_product_type: %s", product_name_prefix, tests_per_engagement, engagements_per_product, products_per_product_type)
product_type_prefix = "Sample scans " + datetime.now().strftime("%Y-%m-%d %H:%M:%S")
product_type_index = 1

Expand Down Expand Up @@ -159,7 +159,7 @@ def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engageme
for attribute_name in dir(module):
attribute = getattr(module, attribute_name)
if isclass(attribute) and attribute_name.lower() == module_name.replace("_", "") + "parser":
logger.debug(f"Loading {module_name} parser")
logger.debug("Loading %s parser", module_name)
scan_dir = Path("unittests") / "scans" / module_name
for scan_file in scan_dir.glob("*.json"):
if include_very_big_scans or scan_file.name != "very_many_vulns.json": # jfrog_xray file is huge and takes too long to import
Expand All @@ -183,12 +183,12 @@ def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engageme
error_messages[module_name + "/" + scan_file.name] = result.get("message", str(e))

except:
logger.exception(f"failed to load {module_name}")
logger.exception("failed to load %s", module_name)
raise

logger.error(f"Error count: {error_count}")
logger.error("Error count: %s", error_count)
for scan, message in error_messages.items():
logger.error(f"Error importing scan {scan}: {message}")
logger.error("Error importing scan %s: %s", scan, message)

def handle(self, *args, **options):
logger.info("EXPERIMENTAL: This command may be changed/deprecated/removed without prior notice.")
Expand Down
4 changes: 2 additions & 2 deletions dojo/management/commands/import_github_languages.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def handle(self, *args, **options):
try:
language_type, created = Language_Type.objects.get_or_create(language=name)
except Language_Type.MultipleObjectsReturned:
logger.warning(f"Language_Type {name} exists multiple times")
logger.warning("Language_Type %s exists multiple times", name)
continue

if created:
Expand All @@ -52,4 +52,4 @@ def handle(self, *args, **options):
language_type.color = element.get("color", 0)
language_type.save()

logger.info(f"Finished importing languages from GitHub, added {new_language_types} Language_Types")
logger.info("Finished importing languages from GitHub, added %s Language_Types", new_language_types)
6 changes: 3 additions & 3 deletions dojo/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1866,7 +1866,7 @@ def clean(self):
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
for remove_str in null_char_list:
self.path = self.path.replace(remove_str, "%00")
logger.error(f'Path "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
logger.error('Path "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string)
if self.path == "":
self.path = None

Expand All @@ -1879,7 +1879,7 @@ def clean(self):
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
for remove_str in null_char_list:
self.query = self.query.replace(remove_str, "%00")
logger.error(f'Query "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
logger.error('Query "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string)
if self.query == "":
self.query = None

Expand All @@ -1892,7 +1892,7 @@ def clean(self):
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
for remove_str in null_char_list:
self.fragment = self.fragment.replace(remove_str, "%00")
logger.error(f'Fragment "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
logger.error('Fragment "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string)
if self.fragment == "":
self.fragment = None

Expand Down
8 changes: 4 additions & 4 deletions dojo/notifications/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def send_slack_notification(
elif self.system_settings.slack_channel is not None:
channel = self.system_settings.slack_channel
logger.info(
f"Sending system notification to system channel {channel}.",
"Sending system notification to system channel %s.", channel,
)
self._post_slack_message(event, user, channel, **kwargs)
else:
Expand Down Expand Up @@ -272,11 +272,11 @@ def _get_slack_user_id(self, user_email: str) -> str:
if user_email == user["user"]["profile"]["email"]:
if "id" in user["user"]:
user_id = user["user"]["id"]
logger.debug(f"Slack user ID is {user_id}")
logger.debug("Slack user ID is %s", user_id)
slack_user_is_found = True
else:
logger.warning(
f"A user with email {user_email} could not be found in this Slack workspace.",
"A user with email %s could not be found in this Slack workspace.", user_email,
)

if not slack_user_is_found:
Expand Down Expand Up @@ -496,7 +496,7 @@ def _get_webhook_endpoints(
if not endpoints.exists():
if user:
logger.info(
f"URLs for Webhooks not configured for user '{user}': skipping user notification",
"URLs for Webhooks not configured for user '%s': skipping user notification", user,
)
else:
logger.info(
Expand Down
2 changes: 1 addition & 1 deletion dojo/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs):
logger.debug("Skipping group " + group_name + " due to AZUREAD_TENANT_OAUTH2_GROUPS_FILTER " + settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER)
continue
except Exception as e:
logger.error(f"Could not call microsoft graph API or save groups to member: {e}")
logger.error("Could not call microsoft graph API or save groups to member: %s", e)
if len(group_names) > 0:
assign_user_to_groups(user, group_names, Dojo_Group.AZURE)
if settings.AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS:
Expand Down
8 changes: 4 additions & 4 deletions dojo/product/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,16 @@ def propagate_tags_on_product(product_id, *args, **kwargs):

def propagate_tags_on_product_sync(product):
# enagagements
logger.debug(f"Propagating tags from {product} to all engagements")
logger.debug("Propagating tags from %s to all engagements", product)
propagate_tags_on_object_list(Engagement.objects.filter(product=product))
# tests
logger.debug(f"Propagating tags from {product} to all tests")
logger.debug("Propagating tags from %s to all tests", product)
propagate_tags_on_object_list(Test.objects.filter(engagement__product=product))
# findings
logger.debug(f"Propagating tags from {product} to all findings")
logger.debug("Propagating tags from %s to all findings", product)
propagate_tags_on_object_list(Finding.objects.filter(test__engagement__product=product))
# endpoints
logger.debug(f"Propagating tags from {product} to all endpoints")
logger.debug("Propagating tags from %s to all endpoints", product)
propagate_tags_on_object_list(Endpoint.objects.filter(product=product))


Expand Down
6 changes: 3 additions & 3 deletions dojo/search/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,9 +422,9 @@ def parse_search_query(clean_query):
else:
keywords.append(vulnerability_id_fix(query_part))

logger.debug(f"query: {clean_query}")
logger.debug(f"operators: {operators}")
logger.debug(f"keywords: {keywords}")
logger.debug("query: %s", clean_query)
logger.debug("operators: %s", operators)
logger.debug("keywords: %s", keywords)

return operators, keywords

Expand Down
4 changes: 2 additions & 2 deletions dojo/settings/settings.dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -1364,7 +1364,7 @@ def saml2_attrib_map_format(din):
logger.info(f"Replacing {key} with value {value} (previously set to {HASHCODE_FIELDS_PER_SCANNER[key]}) from env var DD_HASHCODE_FIELDS_PER_SCANNER")
HASHCODE_FIELDS_PER_SCANNER[key] = value
if key not in HASHCODE_FIELDS_PER_SCANNER:
logger.info(f"Adding {key} with value {value} from env var DD_HASHCODE_FIELDS_PER_SCANNER")
logger.info("Adding %s with value %s from env var DD_HASHCODE_FIELDS_PER_SCANNER", key, value)
HASHCODE_FIELDS_PER_SCANNER[key] = value


Expand Down Expand Up @@ -1618,7 +1618,7 @@ def saml2_attrib_map_format(din):
logger.info(f"Replacing {key} with value {value} (previously set to {DEDUPLICATION_ALGORITHM_PER_PARSER[key]}) from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER")
DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value
if key not in DEDUPLICATION_ALGORITHM_PER_PARSER:
logger.info(f"Adding {key} with value {value} from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER")
logger.info("Adding %s with value %s from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER", key, value)
DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value

DUPE_DELETE_MAX_PER_RUN = env("DD_DUPE_DELETE_MAX_PER_RUN")
Expand Down
4 changes: 2 additions & 2 deletions dojo/sla_config/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def update_sla_expiration_dates_product_async(product, sla_config, *args, **kwar


def update_sla_expiration_dates_sla_config_sync(sla_config, products, severities=None):
logger.info(f"Updating finding SLA expiration dates within the {sla_config} SLA configuration")
logger.info("Updating finding SLA expiration dates within the %s SLA configuration", sla_config)
# update each finding that is within the SLA configuration that was saved
findings = Finding.objects.filter(test__engagement__product__sla_configuration_id=sla_config.id)
if products:
Expand Down Expand Up @@ -49,4 +49,4 @@ def update_sla_expiration_dates_sla_config_sync(sla_config, products, severities
# reset the async updating flag to false for this sla config
sla_config.async_updating = False
super(SLA_Configuration, sla_config).save()
logger.info(f"DONE Updating finding SLA expiration dates within the {sla_config} SLA configuration")
logger.info("DONE Updating finding SLA expiration dates within the %s SLA configuration", sla_config)
12 changes: 6 additions & 6 deletions dojo/tools/anchore_grype/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ def get_description_for_scan_types(self, scan_type):
)

def get_findings(self, file, test):
logger.debug(f"file: {file}")
logger.debug("file: %s", file)
data = json.load(file)
logger.debug(f"data: {data}")
logger.debug("data: %s", data)
dupes = {}
for item in data.get("matches", []):
vulnerability = item["vulnerability"]
Expand Down Expand Up @@ -223,22 +223,22 @@ def get_cvss(self, cvss):

def get_epss_values(self, vuln_id, epss_list):
if not isinstance(epss_list, list):
logger.debug(f"epss_list is not a list: {epss_list}")
logger.debug("epss_list is not a list: %s", epss_list)
return None, None

if isinstance(epss_list, list):
logger.debug(f"epss_list: {epss_list}")
logger.debug("epss_list: %s", epss_list)
for epss_data in epss_list:
if epss_data.get("cve") != vuln_id:
continue
try:
epss_score = float(epss_data.get("epss"))
epss_percentile = float(epss_data.get("percentile"))
except (TypeError, ValueError):
logger.debug(f"epss_data is not a float: {epss_data}")
logger.debug("epss_data is not a float: %s", epss_data)
else:
return epss_score, epss_percentile
logger.debug(f"epss not found for vuln_id: {vuln_id} in epss_list: {epss_list}")
logger.debug("epss not found for vuln_id: %s in epss_list: %s", vuln_id, epss_list)
return None, None

def get_vulnerability_ids(self, vuln_id, related_vulnerabilities):
Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/api_bugcrowd/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def get_findings(self, test):
for page in submissions_paged:
submissions += page
counter += 1
logger.debug(f"{counter} Bugcrowd submissions pages fetched")
logger.debug("%s Bugcrowd submissions pages fetched", counter)

return submissions, config

Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/api_bugcrowd/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def get_findings(self, file, test):
finding.unsaved_endpoints = [bug_endpoint]
except Exception as e:
logger.error(
f"{bug_endpoint} bug url from bugcrowd failed to parse to endpoint, error= {e}",
"%s bug url from bugcrowd failed to parse to endpoint, error= %s", bug_endpoint, e,
)
except ValidationError:
logger.error(
Expand Down
6 changes: 3 additions & 3 deletions dojo/tools/api_sonarqube/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def update_sonarqube_finding(self, finding):
return

logger.debug(
f"Checking if finding '{finding}' needs to be updated in SonarQube",
"Checking if finding '%s' needs to be updated in SonarQube", finding,
)

client, _ = SonarQubeApiImporter.prepare_client(finding.test)
Expand All @@ -141,15 +141,15 @@ def update_sonarqube_finding(self, finding):
current_status = issue.get("status")

logger.debug(
f"--> SQ Current status: {current_status}. Current target status: {target_status}",
"--> SQ Current status: %s. Current target status: %s", current_status, target_status,
)

transitions = self.get_sonarqube_required_transitions_for(
current_status, target_status,
)
if transitions:
logger.info(
f"Updating finding '{finding}' in SonarQube",
"Updating finding '%s' in SonarQube", finding,
)

for transition in transitions:
Expand Down
4 changes: 2 additions & 2 deletions dojo/tools/api_sonarqube/updater_from_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,14 @@ def update(self, finding):
current_status = issue.get("resolution") or issue.get("status")
current_finding_status = self.get_sonarqube_status_for(finding)
logger.debug(
f"--> SQ Current status: {current_status}. Finding status: {current_finding_status}",
"--> SQ Current status: %s. Finding status: %s", current_status, current_finding_status,
)

if (
current_status not in {"OPEN", current_finding_status}
):
logger.info(
f"Original SonarQube issue '{sonarqube_issue}' has changed. Updating DefectDojo finding '{finding}'...",
"Original SonarQube issue '%s' has changed. Updating DefectDojo finding '%s'...", sonarqube_issue, finding,
)
self.update_finding_status(finding, current_status)

Expand Down
Loading