Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dev: Add E, PLC, and PLE ruff rules #505

Merged
merged 1 commit into from
Jun 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions database/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,9 +297,9 @@ def report(self):
db_session.query(CommitReport)
.filter(
(CommitReport.commit_id == self.id_)
& (CommitReport.code == None)
& (CommitReport.code == None) # noqa: E711
& (
(CommitReport.report_type == None)
(CommitReport.report_type == None) # noqa: E711
| (CommitReport.report_type == ReportType.COVERAGE.value)
)
)
Expand All @@ -316,7 +316,7 @@ def commit_report(self, report_type: ReportType):
db_session.query(CommitReport)
.filter(
(CommitReport.commit_id == self.id_)
& (CommitReport.code == None)
& (CommitReport.code == None) # noqa: E711
& (CommitReport.report_type == report_type.value)
)
.first()
Expand Down Expand Up @@ -483,7 +483,7 @@ def is_first_coverage_pull(self):
self.repository.pulls.with_entities(
Pull.id_, Pull.commentid, Pull.bundle_analysis_commentid
)
.filter(Pull.commentid != None)
.filter(Pull.commentid != None) # noqa: E711
.order_by(Pull.id_)
.first()
)
Expand Down
10 changes: 5 additions & 5 deletions database/tests/unit/test_model_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def test_archive_getter_archive_field_set(self, db, mocker):
commit = CommitFactory()
test_class = self.ClassWithArchiveField(commit, None, "gcs_path")

assert test_class._archive_field == None
assert test_class._archive_field is None
assert test_class._archive_field_storage_path == "gcs_path"
assert test_class.archive_field == some_json
mock_read_file.assert_called_with("gcs_path")
Expand All @@ -93,9 +93,9 @@ def test_archive_getter_file_not_in_storage(self, db, mocker):
commit = CommitFactory()
test_class = self.ClassWithArchiveField(commit, None, "gcs_path")

assert test_class._archive_field == None
assert test_class._archive_field is None
assert test_class._archive_field_storage_path == "gcs_path"
assert test_class.archive_field == None
assert test_class.archive_field is None
mock_read_file.assert_called_with("gcs_path")
mock_archive_service.assert_called_with(repository=commit.repository)

Expand All @@ -122,7 +122,7 @@ def test_archive_setter_archive_field(self, db, mocker):
mock_archive_service.return_value.write_json_data_to_storage = mock_write_file

assert test_class._archive_field == "db_value"
assert test_class._archive_field_storage_path == None
assert test_class._archive_field_storage_path is None
assert test_class.archive_field == "db_value"
assert mock_read_file.call_count == 0

Expand All @@ -132,7 +132,7 @@ def test_archive_setter_archive_field(self, db, mocker):

# Now we write to the property
test_class.archive_field = some_json
assert test_class._archive_field == None
assert test_class._archive_field is None
assert test_class._archive_field_storage_path == "path/to/written/object"
assert test_class.archive_field == some_json
# Cache is updated on write
Expand Down
4 changes: 3 additions & 1 deletion helpers/checkpoint_logger/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,9 @@ def _subflows() -> TSubflows:
# We get our subflows in the form: [(metric, begin, end)]
# We want them in the form: {end: [(metric, begin)]}
# The first step of munging is to group by end
key_on_end = lambda x: x[2]
def key_on_end(x):
return x[2]

sorted_by_end = sorted(args, key=key_on_end)
grouped_by_end = itertools.groupby(args, key=key_on_end)

Expand Down
2 changes: 1 addition & 1 deletion helpers/tests/pathmap/test_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def test_drill_multiple_possible_paths(self):

branch = self.tree.instance.get("list.rs")
results = []
assert self.tree._drill(branch, results) == None
assert self.tree._drill(branch, results) is None

def test_recursive_lookup(self):
path = "one/two/three.py"
Expand Down
7 changes: 4 additions & 3 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,10 @@ indent-width = 4
target-version = "py312"

[lint]
# Currently only enabled for most F (Pyflakes) and I (isort) rules: https://docs.astral.sh/ruff/rules/
select = ["F", "I"]
ignore = ["F841", "F405", "F403"]
# Currently only enabled for most F (Pyflakes), Pycodestyle (Error),
# PyLint (Convention, Error), and I (isort) rules: https://docs.astral.sh/ruff/rules/
select = ["F", "E", "I", "PLC", "PLE"]
ignore = ["F841", "F405", "F403", "E501", "E712"]

# Allow fix for all enabled rules (when `--fix`) is provided.
# The preferred method (for now) w.r.t. fixable rules is to manually update the makefile
Expand Down
2 changes: 1 addition & 1 deletion services/commit_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ def _ci_providers() -> List[str]:
providers = get_config("services", "ci_providers")
if not providers:
return []
elif type(providers) is list:
elif isinstance(providers, list):
return providers
else:
return map(lambda p: p.strip(), providers.split(","))
Expand Down
5 changes: 4 additions & 1 deletion services/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,10 @@ def get_github_integration_token(
raise RepositoryWithoutValidBotError()


COMMIT_GHAPP_KEY_NAME = lambda commit_id: f"app_to_use_for_commit_{commit_id}"
def COMMIT_GHAPP_KEY_NAME(commit_id):
return f"app_to_use_for_commit_{commit_id}"


GHAPP_KEY_EXPIRY_SECONDS = 60 * 60 * 2 # 2h


Expand Down
8 changes: 6 additions & 2 deletions services/notification/notifiers/mixins/message/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,9 @@ async def write_section_to_msg(
write("")

def get_middle_layout_section_names(self, settings):
sections = map(lambda l: l.strip(), (settings["layout"] or "").split(","))
sections = map(
lambda layout: layout.strip(), (settings["layout"] or "").split(",")
)
return [
section
for section in sections
Expand All @@ -258,7 +260,9 @@ def get_middle_layout_section_names(self, settings):
]

def get_upper_section_names(self, settings):
sections = list(map(lambda l: l.strip(), (settings["layout"] or "").split(",")))
sections = list(
map(lambda layout: layout.strip(), (settings["layout"] or "").split(","))
)
headers = ["newheader", "header", "condensed_header"]
if all(x not in sections for x in headers):
sections.insert(0, "condensed_header")
Expand Down
4 changes: 2 additions & 2 deletions services/notification/notifiers/mixins/message/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ def make_metrics(before, after, relative, show_complexity, yaml, pull_url=None):
complexity = " |" if show_complexity else ""

else:
if type(before) is list:
if isinstance(before, list):
before = ReportTotals(*before)
if type(after) is list:
if isinstance(after, list):
after = ReportTotals(*after)

layout = " `{absolute} <{relative}> ({impact})` |"
Expand Down
6 changes: 3 additions & 3 deletions services/notification/notifiers/tests/unit/test_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ async def test_checks_403_failure(
assert fallback_notifier.title == "title"
assert fallback_notifier.is_enabled() == True
assert fallback_notifier.notification_type.value == "checks_patch"
assert fallback_notifier.decoration_type == None
assert fallback_notifier.decoration_type is None

res = await fallback_notifier.notify(sample_comparison)
fallback_notifier.store_results(sample_comparison, res)
Expand All @@ -306,7 +306,7 @@ async def test_checks_403_failure(
assert fallback_notifier.title == "title"
assert fallback_notifier.is_enabled() == True
assert fallback_notifier.notification_type.value == "checks_patch"
assert fallback_notifier.decoration_type == None
assert fallback_notifier.decoration_type is None
assert res == "success"

@pytest.mark.asyncio
Expand Down Expand Up @@ -341,7 +341,7 @@ async def test_checks_failure(self, sample_comparison, mocker, mock_repo_provide
assert fallback_notifier.title == "title"
assert fallback_notifier.is_enabled() == True
assert fallback_notifier.notification_type.value == "checks_patch"
assert fallback_notifier.decoration_type == None
assert fallback_notifier.decoration_type is None

res = await fallback_notifier.notify(sample_comparison)
assert res.notification_successful == False
Expand Down
2 changes: 1 addition & 1 deletion services/notification/notifiers/tests/unit/test_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -718,7 +718,7 @@ def test_determine_status_check_behavior_to_apply(self, sample_comparison):
notifier.determine_status_check_behavior_to_apply(
comparison, "flag_coverage_not_uploaded_behavior"
)
== None
is None
)

def test_flag_coverage_was_uploaded_when_none_uploaded(
Expand Down
2 changes: 1 addition & 1 deletion services/report/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ async def initialize_and_save_report(
db_session.query(CommitReport)
.filter_by(commit_id=commit.id_, code=report_code)
.filter(
(CommitReport.report_type == None)
(CommitReport.report_type == None) # noqa: E711
| (CommitReport.report_type == ReportType.COVERAGE.value)
)
.first()
Expand Down
6 changes: 3 additions & 3 deletions services/report/languages/cobertura.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def from_xml(xml, report_builder_session: ReportBuilderSession) -> Report:
("parsers", "cobertura", "handle_missing_conditions"),
False,
):
if type(coverage) is str:
if isinstance(coverage, str):
covered_conditions, total_conditions = coverage.split("/")
if len(conditions) < int(total_conditions):
# <line number="23" hits="0" branch="true" condition-coverage="0% (0/2)">
Expand All @@ -150,7 +150,7 @@ def from_xml(xml, report_builder_session: ReportBuilderSession) -> Report:
)
else: # previous behaviour
if (
type(coverage) is str
isinstance(coverage, str)
and coverage[0] == "0"
and len(conditions) < int(coverage.split("/")[1])
):
Expand All @@ -168,7 +168,7 @@ def from_xml(xml, report_builder_session: ReportBuilderSession) -> Report:
if conditions:
missing_branches = conditions
if (
type(coverage) is str
isinstance(coverage, str)
and not coverage[0] == "0"
and read_yaml_field(
repo_yaml,
Expand Down
2 changes: 1 addition & 1 deletion services/report/languages/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def get_location(node):


def must_be_dict(value):
if type(value) is not dict:
if not isinstance(value, dict):
return {}
else:
return value
Expand Down
2 changes: 1 addition & 1 deletion services/report/languages/pycoverage.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def matches_content(self, content, first_line, name) -> bool:
)

def _normalize_label(self, testname) -> str:
if type(testname) == int or type(testname) == float:
if isinstance(testname, int) or isinstance(testname, float):
# This is from a compressed report.
# Pull label from the labels_table
# But the labels_table keys are strings, because of JSON format
Expand Down
2 changes: 1 addition & 1 deletion services/report/languages/tests/unit/test_go.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,7 +624,7 @@ def test_combine_partials(self):
[2, 24, 1],
[24, None, 0],
]
assert go.combine_partials([(2, 2, 1), (2, 2, 0)]) == None
assert go.combine_partials([(2, 2, 1), (2, 2, 0)]) is None
assert go.combine_partials([(0, None, 28), (0, None, 0)]) == [[0, None, 28]]
assert go.combine_partials([(2, 35, 1), (35, None, 1)]) == [[2, None, 1]]
assert go.combine_partials([(2, 35, "1/2"), (35, None, "1/2")]) == [
Expand Down
2 changes: 1 addition & 1 deletion services/report/languages/tests/unit/test_xcode.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
| 1|line
1k| 2|line
warning: The file '/Users/Jack/Documents/Coupgon/sdk-ios/Source/CPGCoupgonsViewController.swift' isn't covered.
\033/file:\033[0m
\033\x1b[0;36m/file:\033[0m
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This and the following two files were some broken escape characters

1m| 3|line
1| 4| }

Expand Down
2 changes: 1 addition & 1 deletion services/report/languages/tests/unit/test_xcode2.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
1| |line
2| 1k|line
warning: The file '/Users/Jack/Documents/Coupgon/sdk-ios/Source/CPGCoupgonsViewController.swift' isn't covered.
\033/file:\033[0m
\033\x1b[0;36m/file:\033[0m
3| 1m|line
4| 1| }

Expand Down
4 changes: 2 additions & 2 deletions services/report/languages/v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def _list_to_dict(lines):
in: [None, 1] || {"1": 1}
out: {"1": 1}
"""
if type(lines) is list:
if isinstance(lines, list):
if len(lines) > 1:
return dict(
[
Expand All @@ -53,7 +53,7 @@ def _list_to_dict(lines):


def from_json(json, report_builder_session: ReportBuilderSession) -> Report:
if type(json["coverage"]) is dict:
if isinstance(json["coverage"], dict):
# messages = json.get('messages', {})
for fn, lns in json["coverage"].items():
fn = report_builder_session.path_fixer(fn)
Expand Down
2 changes: 1 addition & 1 deletion services/report/languages/xcode.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

START_PARTIAL = "\033[0;41m"
END_PARTIAL = "\033[0m"
NAME_COLOR = "\033"
NAME_COLOR = "\033\x1b[0;36m"


class XCodeProcessor(BaseLanguageProcessor):
Expand Down
10 changes: 6 additions & 4 deletions services/report/raw_upload_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,12 @@
SpecialLabelsEnum.CODECOV_ALL_LABELS_PLACEHOLDER.corresponding_label
)


# This is a lambda function to return different objects
DEFAULT_LABEL_INDEX = lambda: {
SpecialLabelsEnum.CODECOV_ALL_LABELS_PLACEHOLDER.corresponding_index: SpecialLabelsEnum.CODECOV_ALL_LABELS_PLACEHOLDER.corresponding_label
}
def DEFAULT_LABEL_INDEX():
return {

Check warning on line 32 in services/report/raw_upload_processor.py

View check run for this annotation

Codecov Notifications / codecov/patch

services/report/raw_upload_processor.py#L32

Added line #L32 was not covered by tests

Check warning on line 32 in services/report/raw_upload_processor.py

View check run for this annotation

Codecov - QA / codecov/patch

services/report/raw_upload_processor.py#L32

Added line #L32 was not covered by tests

Check warning on line 32 in services/report/raw_upload_processor.py

View check run for this annotation

Codecov Public QA / codecov/patch

services/report/raw_upload_processor.py#L32

Added line #L32 was not covered by tests
SpecialLabelsEnum.CODECOV_ALL_LABELS_PLACEHOLDER.corresponding_index: SpecialLabelsEnum.CODECOV_ALL_LABELS_PLACEHOLDER.corresponding_label
}


def invert_pattern(string: str) -> str:
Expand Down Expand Up @@ -240,7 +242,7 @@
] = SpecialLabelsEnum.CODECOV_ALL_LABELS_PLACEHOLDER.corresponding_label

def possibly_translate_label(label_or_id: typing.Union[str, int]) -> int:
if type(label_or_id) == int:
if isinstance(label_or_id, int):
return label_or_id
if label_or_id in reverse_index_cache:
return reverse_index_cache[label_or_id]
Expand Down
6 changes: 3 additions & 3 deletions services/tests/test_github.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,17 +75,17 @@ def test_get_app_for_commit(self, mock_redis, dbsession):
mock_redis.get.side_effect = lambda key: redis_keys.get(key)
assert get_github_app_for_commit(fake_commit_12) == "1200"
assert get_github_app_for_commit(fake_commit_10) == "1000"
assert get_github_app_for_commit(fake_commit_50) == None
assert get_github_app_for_commit(fake_commit_50) is None
# This feature is Github-exclusive, so we skip checking for commits that are in repos of other providers
assert get_github_app_for_commit(fake_commit_gitlab) == None
assert get_github_app_for_commit(fake_commit_gitlab) is None

def test_get_app_for_commit_error(self, mock_redis):
repo_github = RepositoryFactory(owner__service="github")
mock_redis.get.side_effect = RedisError
fake_commit_12 = MagicMock(
name="fake_commit", **{"id": 12, "repository": repo_github}
)
assert get_github_app_for_commit(fake_commit_12) == None
assert get_github_app_for_commit(fake_commit_12) is None
mock_redis.get.assert_called_with("app_to_use_for_commit_12")

@pytest.mark.integration
Expand Down
4 changes: 2 additions & 2 deletions services/tests/test_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,7 +552,7 @@ def test_commit_measurement_insert_components(
)
.one_or_none()
)
assert path_not_found_measurements == None
assert path_not_found_measurements is None

empty_path_measurements = (
dbsession.query(Measurement)
Expand All @@ -564,7 +564,7 @@ def test_commit_measurement_insert_components(
)
.one_or_none()
)
assert empty_path_measurements == None
assert empty_path_measurements is None

def test_commit_measurement_update_component(
self, dbsession, sample_report_for_components, repository, mocker
Expand Down
2 changes: 1 addition & 1 deletion tasks/backfill_commit_data_to_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def handle_all_report_rows(
db_session.query(CommitReport)
.filter_by(commit_id=commit.id_)
.filter(
(CommitReport.report_type == None)
(CommitReport.report_type == None) # noqa: E711
| (CommitReport.report_type == ReportType.COVERAGE.value)
)
.all()
Expand Down
2 changes: 1 addition & 1 deletion tasks/backfill_existing_gh_app_installations.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@
extra=dict(ownerid=ownerid, parent_id=self.request.parent_id),
)
return {"successful": True, "reason": "backfill task finished"}
except:
except Exception:

Check warning on line 119 in tasks/backfill_existing_gh_app_installations.py

View check run for this annotation

Codecov Notifications / codecov/patch

tasks/backfill_existing_gh_app_installations.py#L119

Added line #L119 was not covered by tests

Check warning on line 119 in tasks/backfill_existing_gh_app_installations.py

View check run for this annotation

Codecov - QA / codecov/patch

tasks/backfill_existing_gh_app_installations.py#L119

Added line #L119 was not covered by tests

Check warning on line 119 in tasks/backfill_existing_gh_app_installations.py

View check run for this annotation

Codecov Public QA / codecov/patch

tasks/backfill_existing_gh_app_installations.py#L119

Added line #L119 was not covered by tests
log.info(
"Backfill unsuccessful for this owner",
extra=dict(ownerid=ownerid, parent_id=self.request.parent_id),
Expand Down
Loading
Loading