Skip to content

Commit

Permalink
Merge pull request #13 from zytedata/modernize
Browse files Browse the repository at this point in the history
Add Python 3.13, drop Python 3.8, add mypy and pre-commit
  • Loading branch information
wRAR authored Nov 4, 2024
2 parents 08ecded + 11d565a commit ab365ed
Show file tree
Hide file tree
Showing 11 changed files with 77 additions and 13 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
python-version: '3.13'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand Down
7 changes: 3 additions & 4 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,11 @@ jobs:
fail-fast: false
matrix:
include:
- python-version: "3.8"
- python-version: "3.9"
- python-version: "3.10"
- python-version: "3.11"
- python-version: "3.12"
- python-version: "3.12"
- python-version: "3.13"

steps:
- uses: actions/checkout@v4
Expand All @@ -42,8 +41,8 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.12"]
tox-job: ["twine-check"]
python-version: ["3.13"]
tox-job: ["linters", "mypy", "twine-check"]

steps:
- uses: actions/checkout@v4
Expand Down
22 changes: 22 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
repos:
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 24.10.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 7.1.1
hooks:
- id: flake8
additional_dependencies:
- flake8-docstrings
- flake8-print
- repo: https://github.com/adamchainz/blacken-docs
rev: 1.19.1
hooks:
- id: blacken-docs
additional_dependencies:
- black==24.10.0
2 changes: 1 addition & 1 deletion duplicate_url_discarder_rules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
RULE_PATHS_ARTICLE: List[str] = []
RULE_PATHS_PRODUCT: List[str] = []

for path in RULE_PATHS:
for path in RULE_PATHS or []:
filename = Path(path).name
if filename == "article.json":
RULE_PATHS_ARTICLE.append(path)
Expand Down
23 changes: 21 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@ classifiers = [
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: Implementation :: CPython",
]
requires-python = ">=3.8"
requires-python = ">=3.9"
dynamic = ["version"]

[tool.setuptools.dynamic]
Expand All @@ -33,3 +33,22 @@ Source = "https://github.com/zytedata/duplicate-url-discarder-rules"

[tool.setuptools.package-data]
'duplicate_url_discarder_rules' = ['**/*.json']

[tool.black]
target-version = ["py39", "py310", "py311", "py312", "py313"]

[tool.isort]
profile = "black"
multi_line_output = 3

[tool.mypy]
strict = true
implicit_reexport = true

[[tool.mypy.overrides]]
module = "tests.*.*"
disallow_untyped_defs = false

[[tool.mypy.overrides]]
module = "url_matcher"
ignore_missing_imports = true
4 changes: 4 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[flake8]
ignore = E203, E266, E501, W503, C901
max-line-length = 88
select = B,C,E,F,W,T4
3 changes: 2 additions & 1 deletion tests/normalizer/test_normalizer.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from duplicate_url_discarder.url_canonicalizer import UrlCanonicalizer
from duplicate_url_discarder.processors import NormalizerProcessor
from duplicate_url_discarder.url_canonicalizer import UrlCanonicalizer

from duplicate_url_discarder_rules import RULE_PATHS


def test_normalizer_main_rules():
assert RULE_PATHS is not None
rule_path = [path for path in RULE_PATHS if path.endswith("normalizer/main.json")]
assert len(rule_path) == 1

Expand Down
3 changes: 2 additions & 1 deletion tests/queryRemovalExcept/test_query_removal_except.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from duplicate_url_discarder.url_canonicalizer import UrlCanonicalizer
from duplicate_url_discarder.processors import QueryRemovalExceptProcessor
from duplicate_url_discarder.url_canonicalizer import UrlCanonicalizer

from duplicate_url_discarder_rules import RULE_PATHS


def test_query_removal_except_product_rules():
assert RULE_PATHS is not None
rule_path = [
path for path in RULE_PATHS if path.endswith("queryRemovalExcept/product.json")
]
Expand Down
3 changes: 2 additions & 1 deletion tests/subpathRemoval/test_subpath_removal.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from duplicate_url_discarder.url_canonicalizer import UrlCanonicalizer
from duplicate_url_discarder.processors import SubpathRemovalProcessor
from duplicate_url_discarder.url_canonicalizer import UrlCanonicalizer

from duplicate_url_discarder_rules import RULE_PATHS


def test_subpath_removal_product_rules():
assert RULE_PATHS is not None
rule_path = [
path for path in RULE_PATHS if path.endswith("subpathRemoval/product.json")
]
Expand Down
9 changes: 8 additions & 1 deletion tests/test_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,16 @@

from url_matcher import Patterns

from duplicate_url_discarder_rules import RULE_PATHS, RULE_PATHS_ARTICLE, RULE_PATHS_PRODUCT, RULE_PATHS_COMMON
from duplicate_url_discarder_rules import (
RULE_PATHS,
RULE_PATHS_ARTICLE,
RULE_PATHS_COMMON,
RULE_PATHS_PRODUCT,
)


def test_rule_validity():
assert RULE_PATHS is not None
for path in RULE_PATHS:
try:
with open(path, "r") as f:
Expand Down Expand Up @@ -40,6 +46,7 @@ def test_rule_validity():


def test_rules_concat():
assert RULE_PATHS is not None
all_rules = RULE_PATHS_COMMON + RULE_PATHS_ARTICLE + RULE_PATHS_PRODUCT
assert isinstance(all_rules, list)
for path in all_rules:
Expand Down
12 changes: 11 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[tox]
envlist = py38,py39,py310,py311,py312,twine-check
envlist = py39,py310,py311,py312,py313,linters,mypy,twine-check

[testenv]
deps =
Expand All @@ -11,6 +11,16 @@ commands =
py.test \
{posargs:duplicate_url_discarder_rules tests}

[testenv:linters]
deps = pre-commit
commands = pre-commit run --all-files --show-diff-on-failure

[testenv:mypy]
deps =
mypy==1.13.0
duplicate-url-discarder
commands = mypy duplicate_url_discarder_rules tests

[testenv:twine-check]
deps =
build
Expand Down

0 comments on commit ab365ed

Please sign in to comment.