Skip to content

Commit 218a29a

Browse files
Ruff: Preparation for G004 (#13076)
1 parent 37caf24 commit 218a29a

File tree

38 files changed

+94
-94
lines changed

38 files changed

+94
-94
lines changed

dojo/api_v2/serializers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@ def to_internal_value(self, data):
219219
except ValueError:
220220
self.fail("invalid_json")
221221

222-
logger.debug(f"data as json: {data}")
222+
logger.debug("data as json: %s", data)
223223

224224
if not isinstance(data, list):
225225
self.fail("not_a_list", input_type=type(data).__name__)
@@ -238,7 +238,7 @@ def to_internal_value(self, data):
238238
tag_validator(sub, exception_class=RestFrameworkValidationError)
239239
data_safe.extend(substrings)
240240

241-
logger.debug(f"result after rendering tags: {data_safe}")
241+
logger.debug("result after rendering tags: %s", data_safe)
242242
return data_safe
243243

244244
def to_representation(self, value):
@@ -1863,7 +1863,7 @@ class Meta:
18631863

18641864
# Overriding this to push add Push to JIRA functionality
18651865
def create(self, validated_data):
1866-
logger.debug(f"Creating finding with validated data: {validated_data}")
1866+
logger.debug("Creating finding with validated data: %s", validated_data)
18671867
push_to_jira = validated_data.pop("push_to_jira", False)
18681868
notes = validated_data.pop("notes", None)
18691869
found_by = validated_data.pop("found_by", None)

dojo/api_v2/views.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2648,7 +2648,7 @@ def perform_create(self, serializer):
26482648
jira_driver = test or (engagement or (product or None))
26492649
if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None):
26502650
push_to_jira = push_to_jira or jira_project.push_all_issues
2651-
logger.debug(f"push_to_jira: {push_to_jira}")
2651+
logger.debug("push_to_jira: %s", push_to_jira)
26522652
serializer.save(push_to_jira=push_to_jira)
26532653

26542654

dojo/finding/views.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -486,7 +486,7 @@ def get_request_response(self, finding: Finding):
486486
burp_request = base64.b64decode(request_response.burpRequestBase64)
487487
burp_response = base64.b64decode(request_response.burpResponseBase64)
488488
except Exception as e:
489-
logger.debug(f"unsuspected error: {e}")
489+
logger.debug("unsuspected error: %s", e)
490490

491491
return {
492492
"burp_request": burp_request,
@@ -1597,7 +1597,7 @@ def request_finding_review(request, fid):
15971597
reviewers = Dojo_User.objects.filter(id__in=form.cleaned_data["reviewers"])
15981598
reviewers_string = ", ".join([f"{user} ({user.id})" for user in reviewers])
15991599
reviewers_usernames = [user.username for user in reviewers]
1600-
logger.debug(f"Asking {reviewers_string} for review")
1600+
logger.debug("Asking %s for review", reviewers_string)
16011601

16021602
create_notification(
16031603
event="review_requested", # TODO: - if 'review_requested' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces

dojo/importers/endpoint_manager.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def clean_unsaved_endpoints(
111111
try:
112112
endpoint.clean()
113113
except ValidationError as e:
114-
logger.warning(f"DefectDojo is storing broken endpoint because cleaning wasn't successful: {e}")
114+
logger.warning("DefectDojo is storing broken endpoint because cleaning wasn't successful: %s", e)
115115

116116
def chunk_endpoints_and_reactivate(
117117
self,

dojo/jira_link/helper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ def can_be_pushed_to_jira(obj, form=None):
204204
return False, f"Finding below the minimum JIRA severity threshold ({System_Settings.objects.get().jira_minimum_severity}).", "error_below_minimum_threshold"
205205
elif isinstance(obj, Finding_Group):
206206
finding_group_status = _safely_get_obj_status_for_jira(obj)
207-
logger.error(f"Finding group status: {finding_group_status}")
207+
logger.error("Finding group status: %s", finding_group_status)
208208
if "Empty" in finding_group_status:
209209
return False, f"{to_str_typed(obj)} cannot be pushed to jira as it contains no findings above minimum treshold.", "error_empty"
210210

dojo/management/commands/import_all_unittest_scans.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def import_scan_with_params(self, filename, scan_type="ZAP Scan", engagement=1,
120120
return self.import_scan(payload, expected_http_status_code)
121121

122122
def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engagement=10, engagements_per_product=50, products_per_product_type=15, *, include_very_big_scans=False, **kwargs):
123-
logger.info(f"product_name_prefix: {product_name_prefix}, tests_per_engagement: {tests_per_engagement}, engagements_per_product: {engagements_per_product}, products_per_product_type: {products_per_product_type}")
123+
logger.info("product_name_prefix: %s, tests_per_engagement: %s, engagements_per_product: %s, products_per_product_type: %s", product_name_prefix, tests_per_engagement, engagements_per_product, products_per_product_type)
124124
product_type_prefix = "Sample scans " + datetime.now().strftime("%Y-%m-%d %H:%M:%S")
125125
product_type_index = 1
126126

@@ -159,7 +159,7 @@ def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engageme
159159
for attribute_name in dir(module):
160160
attribute = getattr(module, attribute_name)
161161
if isclass(attribute) and attribute_name.lower() == module_name.replace("_", "") + "parser":
162-
logger.debug(f"Loading {module_name} parser")
162+
logger.debug("Loading %s parser", module_name)
163163
scan_dir = Path("unittests") / "scans" / module_name
164164
for scan_file in scan_dir.glob("*.json"):
165165
if include_very_big_scans or scan_file.name != "very_many_vulns.json": # jfrog_xray file is huge and takes too long to import
@@ -183,12 +183,12 @@ def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engageme
183183
error_messages[module_name + "/" + scan_file.name] = result.get("message", str(e))
184184

185185
except:
186-
logger.exception(f"failed to load {module_name}")
186+
logger.exception("failed to load %s", module_name)
187187
raise
188188

189-
logger.error(f"Error count: {error_count}")
189+
logger.error("Error count: %s", error_count)
190190
for scan, message in error_messages.items():
191-
logger.error(f"Error importing scan {scan}: {message}")
191+
logger.error("Error importing scan %s: %s", scan, message)
192192

193193
def handle(self, *args, **options):
194194
logger.info("EXPERIMENTAL: This command may be changed/deprecated/removed without prior notice.")

dojo/management/commands/import_github_languages.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def handle(self, *args, **options):
4343
try:
4444
language_type, created = Language_Type.objects.get_or_create(language=name)
4545
except Language_Type.MultipleObjectsReturned:
46-
logger.warning(f"Language_Type {name} exists multiple times")
46+
logger.warning("Language_Type %s exists multiple times", name)
4747
continue
4848

4949
if created:
@@ -52,4 +52,4 @@ def handle(self, *args, **options):
5252
language_type.color = element.get("color", 0)
5353
language_type.save()
5454

55-
logger.info(f"Finished importing languages from GitHub, added {new_language_types} Language_Types")
55+
logger.info("Finished importing languages from GitHub, added %s Language_Types", new_language_types)

dojo/models.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1866,7 +1866,7 @@ def clean(self):
18661866
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
18671867
for remove_str in null_char_list:
18681868
self.path = self.path.replace(remove_str, "%00")
1869-
logger.error(f'Path "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
1869+
logger.error('Path "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string)
18701870
if self.path == "":
18711871
self.path = None
18721872

@@ -1879,7 +1879,7 @@ def clean(self):
18791879
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
18801880
for remove_str in null_char_list:
18811881
self.query = self.query.replace(remove_str, "%00")
1882-
logger.error(f'Query "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
1882+
logger.error('Query "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string)
18831883
if self.query == "":
18841884
self.query = None
18851885

@@ -1892,7 +1892,7 @@ def clean(self):
18921892
action_string = "Postgres does not accept NULL character. Attempting to replace with %00..."
18931893
for remove_str in null_char_list:
18941894
self.fragment = self.fragment.replace(remove_str, "%00")
1895-
logger.error(f'Fragment "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}')
1895+
logger.error('Fragment "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string)
18961896
if self.fragment == "":
18971897
self.fragment = None
18981898

dojo/notifications/helper.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ def send_slack_notification(
234234
elif self.system_settings.slack_channel is not None:
235235
channel = self.system_settings.slack_channel
236236
logger.info(
237-
f"Sending system notification to system channel {channel}.",
237+
"Sending system notification to system channel %s.", channel,
238238
)
239239
self._post_slack_message(event, user, channel, **kwargs)
240240
else:
@@ -272,11 +272,11 @@ def _get_slack_user_id(self, user_email: str) -> str:
272272
if user_email == user["user"]["profile"]["email"]:
273273
if "id" in user["user"]:
274274
user_id = user["user"]["id"]
275-
logger.debug(f"Slack user ID is {user_id}")
275+
logger.debug("Slack user ID is %s", user_id)
276276
slack_user_is_found = True
277277
else:
278278
logger.warning(
279-
f"A user with email {user_email} could not be found in this Slack workspace.",
279+
"A user with email %s could not be found in this Slack workspace.", user_email,
280280
)
281281

282282
if not slack_user_is_found:
@@ -496,7 +496,7 @@ def _get_webhook_endpoints(
496496
if not endpoints.exists():
497497
if user:
498498
logger.info(
499-
f"URLs for Webhooks not configured for user '{user}': skipping user notification",
499+
"URLs for Webhooks not configured for user '%s': skipping user notification", user,
500500
)
501501
else:
502502
logger.info(

dojo/pipeline.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs):
9999
logger.debug("Skipping group " + group_name + " due to AZUREAD_TENANT_OAUTH2_GROUPS_FILTER " + settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER)
100100
continue
101101
except Exception as e:
102-
logger.error(f"Could not call microsoft graph API or save groups to member: {e}")
102+
logger.error("Could not call microsoft graph API or save groups to member: %s", e)
103103
if len(group_names) > 0:
104104
assign_user_to_groups(user, group_names, Dojo_Group.AZURE)
105105
if settings.AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS:

0 commit comments

Comments
 (0)