From f2066507f3103213c68b7de5046029a17d11aeaa Mon Sep 17 00:00:00 2001 From: Jelmer Draaijer Date: Sat, 5 Oct 2024 21:20:29 +0200 Subject: [PATCH 01/12] Adopt uv as package manager --- README.md | 4 +- .../developing-locally-docker.rst | 24 ++- .../developing-locally.rst | 30 ++-- docs/Makefile | 2 +- hooks/post_gen_project.py | 15 +- tests/test_bare.sh | 8 +- tests/test_cookiecutter_generation.py | 45 +++++- tests/test_docker.sh | 19 +++ uv.lock | 102 +++++++------ {{cookiecutter.project_slug}}/.drone.yml | 12 +- .../.github/workflows/ci.yml | 23 ++- {{cookiecutter.project_slug}}/.gitlab-ci.yml | 12 +- .../.pre-commit-config.yaml | 7 + {{cookiecutter.project_slug}}/.travis.yml | 5 +- {{cookiecutter.project_slug}}/README.md | 18 +-- .../compose/local/django/Dockerfile | 58 +++---- .../compose/local/docs/Dockerfile | 27 ++-- .../compose/production/django/Dockerfile | 52 +++---- .../docker-compose.docs.yml | 2 - .../docker-compose.local.yml | 5 +- {{cookiecutter.project_slug}}/docs/howto.rst | 4 +- {{cookiecutter.project_slug}}/pyproject.toml | 142 ++++++++++-------- .../requirements.txt | 3 - .../utility/install_python_dependencies.sh | 2 +- {{cookiecutter.project_slug}}/uv.lock | 2 + 25 files changed, 348 insertions(+), 275 deletions(-) delete mode 100644 {{cookiecutter.project_slug}}/requirements.txt create mode 100644 {{cookiecutter.project_slug}}/uv.lock diff --git a/README.md b/README.md index 2d8f4af1bc..c642f7d21f 100644 --- a/README.md +++ b/README.md @@ -84,11 +84,11 @@ and then editing the results to include your name, email, and various configurat First, get Cookiecutter. Trust me, it's awesome: - $ pip install "cookiecutter>=1.7.0" + $ uv tool install "cookiecutter>=1.7.0" Now run it against this repo: - $ cookiecutter https://github.com/cookiecutter/cookiecutter-django + $ uvx cookiecutter https://github.com/cookiecutter/cookiecutter-django You'll be prompted for some values. Provide them, then a Django project will be created for you. diff --git a/docs/2-local-development/developing-locally-docker.rst b/docs/2-local-development/developing-locally-docker.rst index 07969e5b6c..aa6ef34877 100644 --- a/docs/2-local-development/developing-locally-docker.rst +++ b/docs/2-local-development/developing-locally-docker.rst @@ -36,6 +36,24 @@ This can take a while, especially the first time you run this particular command Generally, if you want to emulate production environment use ``docker-compose.production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it! +After we have created our initial image we nee to generate a lockfile for our dependencies. +Docker cannot write to the host system during builds, so we have to run the command to generate the lockfile in the container. +This is important for reproducible builds and to ensure that the dependencies are installed correctly in the container. +Updating the lockfile manually is normally not necessary when you add packages through `uv add `. + + $ docker compose -f docker-compose.local.yml run --rm django uv lock + +This is done by running the following command: :: + + $ docker compose -f docker-compose.local.yml run --rm django uv lock + +To be sure we are on the right track we need to build our image again: :: + + $ docker compose -f docker-compose.local.yml build + + + + Before doing any git commit, `pre-commit`_ should be installed globally on your local machine, and then:: $ git init @@ -154,10 +172,10 @@ This tells our computer that all future commands are specifically for the dev1 m Add 3rd party python packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To install a new 3rd party python package, you cannot use ``pip install ``, that would only add the package to the container. The container is ephemeral, so that new library won't be persisted if you run another container. Instead, you should modify the Docker image: -You have to modify the relevant requirement file: base, local or production by adding: :: +To install a new 3rd party python package, you cannot use ``uv add ``, that would only add the package to the container. The container is ephemeral, so that new library won't be persisted if you run another container. Instead, you should modify the Docker image: +You have to modify pyproject.toml and either add it to project.dependencies or to tool.uv.dev-dependencies by adding: :: - == + "==" To get this change picked up, you'll need to rebuild the image(s) and restart the running container: :: diff --git a/docs/2-local-development/developing-locally.rst b/docs/2-local-development/developing-locally.rst index b8484bfe0f..85abf6b4df 100644 --- a/docs/2-local-development/developing-locally.rst +++ b/docs/2-local-development/developing-locally.rst @@ -1,7 +1,7 @@ Getting Up and Running Locally ============================== -.. index:: pip, virtualenv, PostgreSQL +.. index:: PostgreSQL Setting Up Development Environment @@ -9,29 +9,19 @@ Setting Up Development Environment Make sure to have the following on your host: -* Python 3.12 +* uv https://docs.astral.sh/uv/getting-started/installation/ * PostgreSQL_. * Redis_, if using Celery * Cookiecutter_ -First things first. - -#. Create a virtualenv: :: - - $ python3.12 -m venv - -#. Activate the virtualenv you have just created: :: - - $ source /bin/activate - #. .. include:: generate-project-block.rst #. Install development requirements: :: $ cd - $ pip install -r requirements/local.txt + $ uv sync $ git init # A git repo is required for pre-commit to install - $ pre-commit install + $ uv run pre-commit install .. note:: @@ -71,15 +61,15 @@ First things first. #. Apply migrations: :: - $ python manage.py migrate + $ uv run python manage.py migrate #. If you're running synchronously, see the application being served through Django development server: :: - $ python manage.py runserver 0.0.0.0:8000 + $ uv run python manage.py runserver 0.0.0.0:8000 or if you're running asynchronously: :: - $ uvicorn config.asgi:application --host 0.0.0.0 --reload --reload-include '*.html' + $ uv run uvicorn config.asgi:application --host 0.0.0.0 --reload --reload-include '*.html' If you've opted for Webpack or Gulp as frontend pipeline, please see the :ref:`dedicated section ` below. @@ -136,7 +126,7 @@ Following this structured approach, here's how to add a new app: #. **Create the app** using Django's ``startapp`` command, replacing ```` with your desired app name: :: - $ python manage.py startapp + $ uv run python manage.py startapp #. **Move the app** to the Django Project Root, maintaining the project's two-tier structure: :: @@ -203,14 +193,14 @@ Next, make sure `redis-server` is installed (per the `Getting started with Redis Start the Celery worker by running the following command in another terminal:: - $ celery -A config.celery_app worker --loglevel=info + $ uv run celery -A config.celery_app worker --loglevel=info That Celery worker should be running whenever your app is running, typically as a background process, so that it can pick up any tasks that get queued. Learn more from the `Celery Workers Guide`_. The project comes with a simple task for manual testing purposes, inside `/users/tasks.py`. To queue that task locally, start the Django shell, import the task, and call `delay()` on it:: - $ python manage.py shell + $ uv run python manage.py shell >> from .users.tasks import get_users_count >> get_users_count.delay() diff --git a/docs/Makefile b/docs/Makefile index 722f50c7c8..fec1fc9656 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -3,7 +3,7 @@ # You can set these variables from the command line. SPHINXOPTS = -SPHINXBUILD = sphinx-build +SPHINXBUILD = uv run sphinx-build SOURCEDIR = . BUILDDIR = _build diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 318c5beb7e..81762fac0b 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -76,7 +76,7 @@ def remove_utility_files(): def remove_heroku_files(): - file_names = ["Procfile", "requirements.txt"] + file_names = ["Procfile"] for file_name in file_names: if file_name == "requirements.txt" and "{{ cookiecutter.ci_tool }}".lower() == "travis": # don't remove the file if we are using travisci but not using heroku @@ -188,20 +188,24 @@ def handle_js_runner(choice, use_docker, use_async): def remove_prettier_pre_commit(): - pre_commit_yaml = Path(".pre-commit-config.yaml") - content = pre_commit_yaml.read_text().splitlines() + remove_repo_from_pre_commit_config("mirrors-prettier") + + +def remove_repo_from_pre_commit_config(repo_to_remove: str): + pre_commit_config = Path(".pre-commit-config.yaml") + content = pre_commit_config.read_text().splitlines(True) removing = False new_lines = [] for line in content: if removing and "- repo:" in line: removing = False - if "mirrors-prettier" in line: + if repo_to_remove in line: removing = True if not removing: new_lines.append(line) - pre_commit_yaml.write_text("\n".join(new_lines)) + pre_commit_config.write_text("\n".join(new_lines)) def remove_celery_files(): @@ -438,6 +442,7 @@ def main(): if "{{ cookiecutter.use_heroku }}".lower() == "n": remove_heroku_files() + remove_repo_from_pre_commit_config("uv-pre-commit") if "{{ cookiecutter.use_docker }}".lower() == "n" and "{{ cookiecutter.use_heroku }}".lower() == "n": if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y": diff --git a/tests/test_bare.sh b/tests/test_bare.sh index f38c9357e7..58ac3d4578 100755 --- a/tests/test_bare.sh +++ b/tests/test_bare.sh @@ -18,13 +18,13 @@ cd my_awesome_project sudo utility/install_os_dependencies.sh install # Install Python deps -pip install -r requirements/local.txt +uv sync # run the project's tests -pytest +uv run pytest # Make sure the check doesn't raise any warnings -python manage.py check --fail-level WARNING +uv run python manage.py check --fail-level WARNING # Run npm build script if package.json is present if [ -f "package.json" ] @@ -34,4 +34,4 @@ then fi # Generate the HTML for the documentation -cd docs && make html +cd docs && uv run make html diff --git a/tests/test_cookiecutter_generation.py b/tests/test_cookiecutter_generation.py index 76d2f4b98a..b646a1fe7a 100755 --- a/tests/test_cookiecutter_generation.py +++ b/tests/test_cookiecutter_generation.py @@ -2,6 +2,7 @@ import os import re import sys +import tomllib from collections.abc import Iterable from pathlib import Path @@ -275,7 +276,7 @@ def test_djlint_check_passes(cookies, context_override): @pytest.mark.parametrize( ("use_docker", "expected_test_script"), [ - ("n", "pytest"), + ("n", "uv run pytest"), ("y", "docker compose -f docker-compose.local.yml run django pytest"), ], ) @@ -300,7 +301,7 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip @pytest.mark.parametrize( ("use_docker", "expected_test_script"), [ - ("n", "pytest"), + ("n", "uv run pytest"), ("y", "docker compose -f docker-compose.local.yml run django pytest"), ], ) @@ -317,7 +318,7 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec try: gitlab_config = yaml.safe_load(gitlab_yml) assert gitlab_config["precommit"]["script"] == [ - "pre-commit run --show-diff-on-failure --color=always --all-files", + "uv run pre-commit run --show-diff-on-failure --color=always --all-files", ] assert gitlab_config["pytest"]["script"] == [expected_test_script] except yaml.YAMLError as e: @@ -327,7 +328,7 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec @pytest.mark.parametrize( ("use_docker", "expected_test_script"), [ - ("n", "pytest"), + ("n", "uv run pytest"), ("y", "docker compose -f docker-compose.local.yml run django pytest"), ], ) @@ -414,3 +415,39 @@ def test_trim_domain_email(cookies, context): base_settings = result.project_path / "config" / "settings" / "base.py" assert '"me@example.com"' in base_settings.read_text() + + +def test_pyproject_toml(cookies, context): + author_name = "Project Author" + author_email = "me@example.com" + context.update( + { + "description": "DESCRIPTION", + "domain_name": "example.com", + "email": author_email, + "author_name": author_name, + } + ) + result = cookies.bake(extra_context=context) + assert result.exit_code == 0 + + pyproject_toml = result.project_path / "pyproject.toml" + + data = tomllib.loads(pyproject_toml.read_text()) + + assert data + assert data["project"]["authors"][0]["email"] == author_email + assert data["project"]["authors"][0]["name"] == author_name + assert data["project"]["name"] == context["project_slug"] + + +def test_pre_commit_without_heroku(cookies, context): + context.update({"use_heroku": "n"}) + result = cookies.bake(extra_context=context) + assert result.exit_code == 0 + + pre_commit_config = result.project_path / ".pre-commit-config.yaml" + + data = pre_commit_config.read_text() + + assert "uv-pre-commit" not in data diff --git a/tests/test_docker.sh b/tests/test_docker.sh index 171e0c8097..c551ad2bc6 100755 --- a/tests/test_docker.sh +++ b/tests/test_docker.sh @@ -5,11 +5,22 @@ set -o errexit set -x +set -e + +finish() { + # Your cleanup code here + docker compose -f docker-compose.local.yml down --remove-orphans + docker volume rm my_awesome_project_my_awesome_project_local_postgres_data + +} +trap finish EXIT # create a cache directory mkdir -p .cache/docker cd .cache/docker +sudo rm -rf my_awesome_project + # create the project using the default settings in cookiecutter.json uv run cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@" cd my_awesome_project @@ -17,9 +28,15 @@ cd my_awesome_project # make sure all images build docker compose -f docker-compose.local.yml build +docker compose -f docker-compose.local.yml run django uv lock + +docker compose -f docker-compose.local.yml build + # run the project's type checks docker compose -f docker-compose.local.yml run --rm django mypy my_awesome_project + + # run the project's tests docker compose -f docker-compose.local.yml run --rm django pytest @@ -44,6 +61,8 @@ docker compose -f docker-compose.local.yml run --rm \ # Generate the HTML for the documentation docker compose -f docker-compose.docs.yml run --rm docs make html +docker build -f ./compose/production/django/Dockerfile . + # Run npm build script if package.json is present if [ -f "package.json" ] then diff --git a/uv.lock b/uv.lock index 90c8637042..60e10011ba 100644 --- a/uv.lock +++ b/uv.lock @@ -12,16 +12,16 @@ wheels = [ [[package]] name = "anyio" -version = "4.7.0" +version = "4.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f6/40/318e58f669b1a9e00f5c4453910682e2d9dd594334539c7b7817dabb765f/anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48", size = 177076 } +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/7a/4daaf3b6c08ad7ceffea4634ec206faeff697526421c20f07628c7372156/anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352", size = 93052 }, + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, ] [[package]] @@ -39,11 +39,11 @@ wheels = [ [[package]] name = "babel" -version = "2.16.0" +version = "2.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 }, + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537 }, ] [[package]] @@ -526,7 +526,7 @@ wheels = [ [[package]] name = "myst-parser" -version = "4.0.0" +version = "4.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docutils" }, @@ -536,9 +536,9 @@ dependencies = [ { name = "pyyaml" }, { name = "sphinx" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/85/55/6d1741a1780e5e65038b74bce6689da15f620261c490c3511eb4c12bac4b/myst_parser-4.0.0.tar.gz", hash = "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531", size = 93858 } +sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/b4/b036f8fdb667587bb37df29dc6644681dd78b7a2a6321a34684b79412b28/myst_parser-4.0.0-py3-none-any.whl", hash = "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d", size = 84563 }, + { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579 }, ] [[package]] @@ -828,6 +828,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/d9/c2a126eeae791e90ea099d05cb0515feea3688474b978343f3cdcfe04523/rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc", size = 241597 }, ] +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742 }, +] + [[package]] name = "ruff" version = "0.11.11" @@ -900,7 +909,7 @@ wheels = [ [[package]] name = "sphinx" -version = "8.1.3" +version = "8.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alabaster" }, @@ -912,6 +921,7 @@ dependencies = [ { name = "packaging" }, { name = "pygments" }, { name = "requests" }, + { name = "roman-numerals-py" }, { name = "snowballstemmer" }, { name = "sphinxcontrib-applehelp" }, { name = "sphinxcontrib-devhelp" }, @@ -920,9 +930,9 @@ dependencies = [ { name = "sphinxcontrib-qthelp" }, { name = "sphinxcontrib-serializinghtml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611 } +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876 } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125 }, + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741 }, ] [[package]] @@ -1024,14 +1034,14 @@ wheels = [ [[package]] name = "starlette" -version = "0.45.1" +version = "0.46.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c1/be/b398217eb35b356d2d9bb84ec67071ea2842e02950fcf38b33df9d5b24ba/starlette-0.45.1.tar.gz", hash = "sha256:a8ae1fa3b1ab7ca83a4abd77871921a13fb5aeaf4874436fb96c29dfcd4ecfa3", size = 2573953 } +sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/2c/a50484b035ee0e13ebb7a42391e391befbfc1b6a9ad5503e83badd182ada/starlette-0.45.1-py3-none-any.whl", hash = "sha256:5656c0524f586e9148d9a3c1dd5257fb42a99892fb0dc6877dd76ef4d184aac3", size = 71488 }, + { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995 }, ] [[package]] @@ -1178,46 +1188,46 @@ wheels = [ [[package]] name = "watchfiles" -version = "1.0.3" +version = "1.0.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/7e/4569184ea04b501840771b8fcecee19b2233a8b72c196061263c0ef23c0b/watchfiles-1.0.3.tar.gz", hash = "sha256:f3ff7da165c99a5412fe5dd2304dd2dbaaaa5da718aad942dcb3a178eaa70c56", size = 38185 } +sdist = { url = "https://files.pythonhosted.org/packages/f5/26/c705fc77d0a9ecdb9b66f1e2976d95b81df3cae518967431e7dbf9b5e219/watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205", size = 94625 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/a9/c8b5ab33444306e1a324cb2b51644f8458dd459e30c3841f925012893e6a/watchfiles-1.0.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:93436ed550e429da007fbafb723e0769f25bae178fbb287a94cb4ccdf42d3af3", size = 391395 }, - { url = "https://files.pythonhosted.org/packages/ad/d3/403af5f07359863c03951796ddab265ee8cce1a6147510203d0bf43950e7/watchfiles-1.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c18f3502ad0737813c7dad70e3e1cc966cc147fbaeef47a09463bbffe70b0a00", size = 381432 }, - { url = "https://files.pythonhosted.org/packages/f6/5f/921f2f2beabaf24b1ad81ac22bb69df8dd5771fdb68d6f34a5912a420941/watchfiles-1.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5bc3ca468bb58a2ef50441f953e1f77b9a61bd1b8c347c8223403dc9b4ac9a", size = 441448 }, - { url = "https://files.pythonhosted.org/packages/63/d7/67d0d750b246f248ccdb400a85a253e93e419ea5b6cbe968fa48b97a5f30/watchfiles-1.0.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0d1ec043f02ca04bf21b1b32cab155ce90c651aaf5540db8eb8ad7f7e645cba8", size = 446852 }, - { url = "https://files.pythonhosted.org/packages/53/7c/d7cd94c7d0905f1e2f1c2232ea9bc39b1a48affd007e09c547ead96edb8f/watchfiles-1.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f58d3bfafecf3d81c15d99fc0ecf4319e80ac712c77cf0ce2661c8cf8bf84066", size = 471662 }, - { url = "https://files.pythonhosted.org/packages/26/81/738f8e66f7525753996b8aa292f78dcec1ef77887d62e6cdfb04cc2f352f/watchfiles-1.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1df924ba82ae9e77340101c28d56cbaff2c991bd6fe8444a545d24075abb0a87", size = 493765 }, - { url = "https://files.pythonhosted.org/packages/d2/50/78e21f5da24ab39114e9b24f7b0945ea1c6fc7bc9ae86cd87f8eaeb47325/watchfiles-1.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:632a52dcaee44792d0965c17bdfe5dc0edad5b86d6a29e53d6ad4bf92dc0ff49", size = 490558 }, - { url = "https://files.pythonhosted.org/packages/a8/93/1873fea6354b2858eae8970991d64e9a449d87726d596490d46bf00af8ed/watchfiles-1.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bf4b459d94a0387617a1b499f314aa04d8a64b7a0747d15d425b8c8b151da0", size = 442808 }, - { url = "https://files.pythonhosted.org/packages/4f/b4/2fc4c92fb28b029f66d04a4d430fe929284e9ff717b04bb7a3bb8a7a5605/watchfiles-1.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca94c85911601b097d53caeeec30201736ad69a93f30d15672b967558df02885", size = 615287 }, - { url = "https://files.pythonhosted.org/packages/1e/d4/93da24db39257e440240d338b617c5153ad11d361c34108f5c0e1e0743eb/watchfiles-1.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:65ab1fb635476f6170b07e8e21db0424de94877e4b76b7feabfe11f9a5fc12b5", size = 612812 }, - { url = "https://files.pythonhosted.org/packages/c6/67/9fd3661c2dc0309abd6021876653d91e8b64fb279529e2cadaa3520ef3e3/watchfiles-1.0.3-cp312-cp312-win32.whl", hash = "sha256:49bc1bc26abf4f32e132652f4b3bfeec77d8f8f62f57652703ef127e85a3e38d", size = 271642 }, - { url = "https://files.pythonhosted.org/packages/ae/aa/8c887edb78cd67f5d4d6a35c3aeb46d748643ebf962163130fb1871e2ee0/watchfiles-1.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:48681c86f2cb08348631fed788a116c89c787fdf1e6381c5febafd782f6c3b44", size = 285505 }, - { url = "https://files.pythonhosted.org/packages/7b/31/d212fa6390f0e73a91913ada0b925b294a78d67794795371208baf73f0b5/watchfiles-1.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:9e080cf917b35b20c889225a13f290f2716748362f6071b859b60b8847a6aa43", size = 277263 }, + { url = "https://files.pythonhosted.org/packages/5b/1a/8f4d9a1461709756ace48c98f07772bc6d4519b1e48b5fa24a4061216256/watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2", size = 391345 }, + { url = "https://files.pythonhosted.org/packages/bc/d2/6750b7b3527b1cdaa33731438432e7238a6c6c40a9924049e4cebfa40805/watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9", size = 381515 }, + { url = "https://files.pythonhosted.org/packages/4e/17/80500e42363deef1e4b4818729ed939aaddc56f82f4e72b2508729dd3c6b/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712", size = 449767 }, + { url = "https://files.pythonhosted.org/packages/10/37/1427fa4cfa09adbe04b1e97bced19a29a3462cc64c78630787b613a23f18/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12", size = 455677 }, + { url = "https://files.pythonhosted.org/packages/c5/7a/39e9397f3a19cb549a7d380412fd9e507d4854eddc0700bfad10ef6d4dba/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844", size = 482219 }, + { url = "https://files.pythonhosted.org/packages/45/2d/7113931a77e2ea4436cad0c1690c09a40a7f31d366f79c6f0a5bc7a4f6d5/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733", size = 518830 }, + { url = "https://files.pythonhosted.org/packages/f9/1b/50733b1980fa81ef3c70388a546481ae5fa4c2080040100cd7bf3bf7b321/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af", size = 497997 }, + { url = "https://files.pythonhosted.org/packages/2b/b4/9396cc61b948ef18943e7c85ecfa64cf940c88977d882da57147f62b34b1/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a", size = 452249 }, + { url = "https://files.pythonhosted.org/packages/fb/69/0c65a5a29e057ad0dc691c2fa6c23b2983c7dabaa190ba553b29ac84c3cc/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff", size = 614412 }, + { url = "https://files.pythonhosted.org/packages/7f/b9/319fcba6eba5fad34327d7ce16a6b163b39741016b1996f4a3c96b8dd0e1/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e", size = 611982 }, + { url = "https://files.pythonhosted.org/packages/f1/47/143c92418e30cb9348a4387bfa149c8e0e404a7c5b0585d46d2f7031b4b9/watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94", size = 271822 }, + { url = "https://files.pythonhosted.org/packages/ea/94/b0165481bff99a64b29e46e07ac2e0df9f7a957ef13bec4ceab8515f44e3/watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c", size = 285441 }, + { url = "https://files.pythonhosted.org/packages/11/de/09fe56317d582742d7ca8c2ca7b52a85927ebb50678d9b0fa8194658f536/watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90", size = 277141 }, ] [[package]] name = "websockets" -version = "14.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f4/1b/380b883ce05bb5f45a905b61790319a28958a9ab1e4b6b95ff5464b60ca1/websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8", size = 162840 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/55/64/55698544ce29e877c9188f1aee9093712411a8fc9732cca14985e49a8e9c/websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed", size = 161957 }, - { url = "https://files.pythonhosted.org/packages/a2/b1/b088f67c2b365f2c86c7b48edb8848ac27e508caf910a9d9d831b2f343cb/websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d", size = 159620 }, - { url = "https://files.pythonhosted.org/packages/c1/89/2a09db1bbb40ba967a1b8225b07b7df89fea44f06de9365f17f684d0f7e6/websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707", size = 159852 }, - { url = "https://files.pythonhosted.org/packages/ca/c1/f983138cd56e7d3079f1966e81f77ce6643f230cd309f73aa156bb181749/websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a", size = 169675 }, - { url = "https://files.pythonhosted.org/packages/c1/c8/84191455d8660e2a0bdb33878d4ee5dfa4a2cedbcdc88bbd097303b65bfa/websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45", size = 168619 }, - { url = "https://files.pythonhosted.org/packages/8d/a7/62e551fdcd7d44ea74a006dc193aba370505278ad76efd938664531ce9d6/websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58", size = 169042 }, - { url = "https://files.pythonhosted.org/packages/ad/ed/1532786f55922c1e9c4d329608e36a15fdab186def3ca9eb10d7465bc1cc/websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058", size = 169345 }, - { url = "https://files.pythonhosted.org/packages/ea/fb/160f66960d495df3de63d9bcff78e1b42545b2a123cc611950ffe6468016/websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4", size = 168725 }, - { url = "https://files.pythonhosted.org/packages/cf/53/1bf0c06618b5ac35f1d7906444b9958f8485682ab0ea40dee7b17a32da1e/websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05", size = 168712 }, - { url = "https://files.pythonhosted.org/packages/e5/22/5ec2f39fff75f44aa626f86fa7f20594524a447d9c3be94d8482cd5572ef/websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0", size = 162838 }, - { url = "https://files.pythonhosted.org/packages/74/27/28f07df09f2983178db7bf6c9cccc847205d2b92ced986cd79565d68af4f/websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f", size = 163277 }, - { url = "https://files.pythonhosted.org/packages/b0/0b/c7e5d11020242984d9d37990310520ed663b942333b83a033c2f20191113/websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e", size = 156277 }, +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437 }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096 }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332 }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152 }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096 }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523 }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790 }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165 }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160 }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395 }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841 }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 }, ] [[package]] diff --git a/{{cookiecutter.project_slug}}/.drone.yml b/{{cookiecutter.project_slug}}/.drone.yml index 20d6fb1bbd..f8b654b12e 100644 --- a/{{cookiecutter.project_slug}}/.drone.yml +++ b/{{cookiecutter.project_slug}}/.drone.yml @@ -13,7 +13,7 @@ environment: steps: - name: lint pull: if-not-exists - image: python:3.12 + image: ghcr.io/astral-sh/uv:python3.12 environment: PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit volumes: @@ -21,8 +21,8 @@ steps: path: ${PRE_COMMIT_HOME} commands: - export PRE_COMMIT_HOME=$CI_PROJECT_DIR/.cache/pre-commit - - pip install -q pre-commit - - pre-commit run --show-diff-on-failure --color=always --all-files + - uv pip install -q pre-commit pre-commit-uv + - uv run pre-commit run --show-diff-on-failure --color=always --all-files - name: test pull: if-not-exists @@ -37,10 +37,10 @@ steps: - docker-compose -f docker-compose.local.yml up -d - docker-compose -f docker-compose.local.yml run django pytest {%- else %} - image: python:3.12 + image: ghcr.io/astral-sh/uv:python3.12 commands: - - pip install -r requirements/local.txt - - pytest + - uv sync --frozen + - uv run pytest {%- endif%} volumes: diff --git a/{{cookiecutter.project_slug}}/.github/workflows/ci.yml b/{{cookiecutter.project_slug}}/.github/workflows/ci.yml index cc762a8303..2679a01243 100644 --- a/{{cookiecutter.project_slug}}/.github/workflows/ci.yml +++ b/{{cookiecutter.project_slug}}/.github/workflows/ci.yml @@ -107,26 +107,25 @@ jobs: run: docker compose -f docker-compose.local.yml down {%- else %} + - name: Install uv + uses: astral-sh/setup-uv@v5 + with: + enable-cache: "true" + - name: Set up Python uses: actions/setup-python@v5 with: - python-version-file: '.python-version' - cache: pip - cache-dependency-path: | - requirements/base.txt - requirements/local.txt + python-version-file: ".python-version" - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements/local.txt + - name: Install dependencies + run: uv sync - name: Check DB Migrations - run: python manage.py makemigrations --check + run: uv run python manage.py makemigrations --check - name: Run DB Migrations - run: python manage.py migrate + run: uv run python manage.py migrate - name: Test with pytest - run: pytest + run: uv run pytest {%- endif %} diff --git a/{{cookiecutter.project_slug}}/.gitlab-ci.yml b/{{cookiecutter.project_slug}}/.gitlab-ci.yml index 9c7cd53672..c881e22146 100644 --- a/{{cookiecutter.project_slug}}/.gitlab-ci.yml +++ b/{{cookiecutter.project_slug}}/.gitlab-ci.yml @@ -13,16 +13,16 @@ variables: precommit: stage: lint - image: python:3.12 + image: ghcr.io/astral-sh/uv:python3.12 variables: PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit cache: paths: - ${PRE_COMMIT_HOME} before_script: - - pip install -q pre-commit + - uv pip install -q pre-commit pre-commit-uv script: - - pre-commit run --show-diff-on-failure --color=always --all-files + - uv run pre-commit run --show-diff-on-failure --color=always --all-files pytest: stage: test @@ -39,13 +39,13 @@ pytest: script: - docker compose -f docker-compose.local.yml run django pytest {%- else %} - image: python:3.12 + image: ghcr.io/astral-sh/uv:python3.12 services: - postgres:{{ cookiecutter.postgresql_version }} variables: DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB before_script: - - pip install -r requirements/local.txt + - uv sync --frozen script: - - pytest + - uv run pytest {%- endif %} diff --git a/{{cookiecutter.project_slug}}/.pre-commit-config.yaml b/{{cookiecutter.project_slug}}/.pre-commit-config.yaml index 49d580d0a3..ebf4f9172c 100644 --- a/{{cookiecutter.project_slug}}/.pre-commit-config.yaml +++ b/{{cookiecutter.project_slug}}/.pre-commit-config.yaml @@ -50,6 +50,13 @@ repos: - id: djlint-reformat-django - id: djlint-django + - repo: https://github.com/astral-sh/uv-pre-commit + # uv version. + rev: 0.4.15 + hooks: + - id: uv-export + args: ['--frozen', '--no-dev'] + # sets up .pre-commit-ci.yaml to ensure pre-commit dependencies stay up to date ci: autoupdate_schedule: weekly diff --git a/{{cookiecutter.project_slug}}/.travis.yml b/{{cookiecutter.project_slug}}/.travis.yml index 97f9f60a27..942eea7651 100644 --- a/{{cookiecutter.project_slug}}/.travis.yml +++ b/{{cookiecutter.project_slug}}/.travis.yml @@ -40,7 +40,8 @@ jobs: python: - "3.12" install: - - pip install -r requirements/local.txt + - pip install uv + - uv sync script: - - pytest + - uv run pytest {%- endif %} diff --git a/{{cookiecutter.project_slug}}/README.md b/{{cookiecutter.project_slug}}/README.md index 9ee864afbd..dca38279ff 100644 --- a/{{cookiecutter.project_slug}}/README.md +++ b/{{cookiecutter.project_slug}}/README.md @@ -22,7 +22,7 @@ Moved to [settings](https://cookiecutter-django.readthedocs.io/en/latest/1-getti - To create a **superuser account**, use this command: - $ python manage.py createsuperuser + $ uv run python manage.py createsuperuser For convenience, you can keep your normal user logged in on Chrome and your superuser logged in on Firefox (or similar), so that you can see how the site behaves for both kinds of users. @@ -30,19 +30,19 @@ For convenience, you can keep your normal user logged in on Chrome and your supe Running type checks with mypy: - $ mypy {{cookiecutter.project_slug}} + $ uv run mypy {{cookiecutter.project_slug}} ### Test coverage To run the tests, check your test coverage, and generate an HTML coverage report: - $ coverage run -m pytest - $ coverage html - $ open htmlcov/index.html + $ uv run coverage run -m pytest + $ uv run coverage html + $ uv run open htmlcov/index.html #### Running tests with pytest - $ pytest + $ uv run pytest ### Live reloading and Sass CSS compilation @@ -58,7 +58,7 @@ To run a celery worker: ```bash cd {{cookiecutter.project_slug}} -celery -A config.celery_app worker -l info +uv run celery -A config.celery_app worker -l info ``` Please note: For Celery's import magic to work, it is important _where_ the celery commands are run. If you are in the same folder with _manage.py_, you should be right. @@ -67,14 +67,14 @@ To run [periodic tasks](https://docs.celeryq.dev/en/stable/userguide/periodic-ta ```bash cd {{cookiecutter.project_slug}} -celery -A config.celery_app beat +uv run celery -A config.celery_app beat ``` or you can embed the beat service inside a worker with the `-B` option (not recommended for production use): ```bash cd {{cookiecutter.project_slug}} -celery -A config.celery_app worker -B -l info +uv run celery -A config.celery_app worker -B -l info ``` {%- endif %} diff --git a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile index fcf455047d..201f0d21be 100644 --- a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile +++ b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile @@ -1,37 +1,35 @@ # define an alias for the specific python version used in this file. -FROM docker.io/python:3.12.10-slim-bookworm AS python +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS python # Python build stage FROM python AS python-build-stage -ARG BUILD_ENVIRONMENT=local +ARG APP_HOME=/app + +WORKDIR ${APP_HOME} + +# we need to move the virtualenv outside of the $APP_HOME directory because it will be overriden by the docker compose mount +ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy # Install apt packages RUN apt-get update && apt-get install --no-install-recommends -y \ # dependencies for building Python packages build-essential \ # psycopg dependencies - libpq-dev + libpq-dev \ + gettext \ + wait-for-it # Requirements are installed here to ensure they will be cached. -COPY ./requirements . - -# Create Python Dependency and Sub-Dependency Wheels. -RUN pip wheel --wheel-dir /usr/src/app/wheels \ - -r ${BUILD_ENVIRONMENT}.txt - - -# Python 'run' stage -FROM python AS python-run-stage - -ARG BUILD_ENVIRONMENT=local -ARG APP_HOME=/app +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=uv.lock,target=uv.lock:rw \ + uv sync --no-install-project -ENV PYTHONUNBUFFERED=1 -ENV PYTHONDONTWRITEBYTECODE=1 -ENV BUILD_ENV=${BUILD_ENVIRONMENT} +COPY . ${APP_HOME} -WORKDIR ${APP_HOME} +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync {% if cookiecutter.use_docker == "y" %} # devcontainer dependencies and utils @@ -45,24 +43,7 @@ RUN groupadd --gid 1000 dev-user \ && chmod 0440 /etc/sudoers.d/dev-user {% endif %} -# Install required system dependencies -RUN apt-get update && apt-get install --no-install-recommends -y \ - # psycopg dependencies - libpq-dev \ - wait-for-it \ - # Translations dependencies - gettext \ - # cleaning up unused files - && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ - && rm -rf /var/lib/apt/lists/* - -# All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction -# copy python dependency wheels from python-build-stage -COPY --from=python-build-stage /usr/src/app/wheels /wheels/ - -# use wheels to install python dependencies -RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \ - && rm -rf /wheels/ +ENV PATH="/${APP_HOME}/.venv/bin:$PATH" COPY ./compose/production/django/entrypoint /entrypoint RUN sed -i 's/\r$//g' /entrypoint @@ -86,7 +67,4 @@ RUN sed -i 's/\r$//g' /start-flower RUN chmod +x /start-flower {% endif %} -# copy application code to WORKDIR -COPY . ${APP_HOME} - ENTRYPOINT ["/entrypoint"] diff --git a/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile b/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile index cfcb43f3c1..c5dfa538c9 100644 --- a/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile +++ b/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile @@ -1,11 +1,13 @@ # define an alias for the specific python version used in this file. -FROM docker.io/python:3.12.10-slim-bookworm AS python +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS python # Python build stage FROM python AS python-build-stage -ENV PYTHONDONTWRITEBYTECODE=1 +ARG APP_HOME=/app + +WORKDIR ${APP_HOME} RUN apt-get update && apt-get install --no-install-recommends -y \ # dependencies for building Python packages @@ -17,12 +19,15 @@ RUN apt-get update && apt-get install --no-install-recommends -y \ && rm -rf /var/lib/apt/lists/* # Requirements are installed here to ensure they will be cached. -COPY ./requirements /requirements +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --no-install-project + +COPY . ${APP_HOME} -# create python dependency wheels -RUN pip wheel --no-cache-dir --wheel-dir /usr/src/app/wheels \ - -r /requirements/local.txt -r /requirements/production.txt \ - && rm -rf /requirements +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync # Python 'run' stage @@ -49,14 +54,12 @@ RUN apt-get update && apt-get install --no-install-recommends -y \ && rm -rf /var/lib/apt/lists/* # copy python dependency wheels from python-build-stage -COPY --from=python-build-stage /usr/src/app/wheels /wheels - -# use wheels to install python dependencies -RUN pip install --no-cache /wheels/* \ - && rm -rf /wheels +COPY --from=python-build-stage --chown=app:app /app /app COPY ./compose/local/docs/start /start-docs RUN sed -i 's/\r$//g' /start-docs RUN chmod +x /start-docs +ENV PATH="/app/.venv/bin:$PATH" + WORKDIR /docs diff --git a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile index 35f0114169..4a07f6ce10 100644 --- a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile +++ b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile @@ -1,3 +1,4 @@ +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS python {% if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] -%} FROM docker.io/node:22.14-bookworm-slim AS client-builder @@ -22,7 +23,6 @@ ENV DJANGO_AZURE_ACCOUNT_NAME=${DJANGO_AZURE_ACCOUNT_NAME} {%- endif %} {%- endif %} RUN npm run build - {%- endif %} # define an alias for the specific python version used in this file. FROM docker.io/python:3.12.10-slim-bookworm AS python @@ -30,7 +30,11 @@ FROM docker.io/python:3.12.10-slim-bookworm AS python # Python build stage FROM python AS python-build-stage -ARG BUILD_ENVIRONMENT=production +ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy + +ARG APP_HOME=/app + +WORKDIR ${APP_HOME} # Install apt packages RUN apt-get update && apt-get install --no-install-recommends -y \ @@ -41,23 +45,24 @@ RUN apt-get update && apt-get install --no-install-recommends -y \ # Requirements are installed here to ensure they will be cached. -COPY ./requirements . - -# Create Python Dependency and Sub-Dependency Wheels. -RUN pip wheel --wheel-dir /usr/src/app/wheels \ - -r ${BUILD_ENVIRONMENT}.txt +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --frozen --no-install-project --no-dev +{%- if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %} +COPY --from=client-builder ${APP_HOME} ${APP_HOME} +{% else %} +COPY . ${APP_HOME} +{%- endif %} +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen --no-dev # Python 'run' stage FROM python AS python-run-stage -ARG BUILD_ENVIRONMENT=production ARG APP_HOME=/app -ENV PYTHONUNBUFFERED=1 -ENV PYTHONDONTWRITEBYTECODE=1 -ENV BUILD_ENV=${BUILD_ENVIRONMENT} - WORKDIR ${APP_HOME} RUN addgroup --system django \ @@ -76,14 +81,6 @@ RUN apt-get update && apt-get install --no-install-recommends -y \ && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ && rm -rf /var/lib/apt/lists/* -# All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction -# copy python dependency wheels from python-build-stage -COPY --from=python-build-stage /usr/src/app/wheels /wheels/ - -# use wheels to install python dependencies -RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \ - && rm -rf /wheels/ - COPY --chown=django:django ./compose/production/django/entrypoint /entrypoint RUN sed -i 's/\r$//g' /entrypoint @@ -111,21 +108,16 @@ RUN sed -i 's/\r$//g' /start-flower RUN chmod +x /start-flower {%- endif %} +# Copy the application from the builder +COPY --from=python-build-stage --chown=django:django ${APP_HOME} ${APP_HOME} -# copy application code to WORKDIR -{%- if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %} -COPY --from=client-builder --chown=django:django ${APP_HOME} ${APP_HOME} -{% else %} -COPY --chown=django:django . ${APP_HOME} -{%- endif %} - -{%- if cookiecutter.cloud_provider == 'None' %} +{%- if cookiecutter.cloud_provider == 'None' %} # explicitly create the media folder before changing ownership below RUN mkdir -p ${APP_HOME}/{{ cookiecutter.project_slug }}/media {%- endif %} -# make django owner of the WORKDIR directory as well. -RUN chown -R django:django ${APP_HOME} +# Place executables in the environment at the front of the path +ENV PATH="/app/.venv/bin:$PATH" USER django diff --git a/{{cookiecutter.project_slug}}/docker-compose.docs.yml b/{{cookiecutter.project_slug}}/docker-compose.docs.yml index 215b6c3b77..011356c8a3 100644 --- a/{{cookiecutter.project_slug}}/docker-compose.docs.yml +++ b/{{cookiecutter.project_slug}}/docker-compose.docs.yml @@ -9,8 +9,6 @@ services: - ./.envs/.local/.django volumes: - ./docs:/docs:z - - ./config:/app/config:z - - ./{{ cookiecutter.project_slug }}:/app/{{ cookiecutter.project_slug }}:z ports: - '9000:9000' command: /start-docs diff --git a/{{cookiecutter.project_slug}}/docker-compose.local.yml b/{{cookiecutter.project_slug}}/docker-compose.local.yml index 9a066ccf12..a3601ec2bc 100644 --- a/{{cookiecutter.project_slug}}/docker-compose.local.yml +++ b/{{cookiecutter.project_slug}}/docker-compose.local.yml @@ -10,6 +10,9 @@ services: dockerfile: ./compose/local/django/Dockerfile image: {{ cookiecutter.project_slug }}_local_django container_name: {{ cookiecutter.project_slug }}_local_django + volumes: + - /app/.venv + - .:/app:z depends_on: - postgres {%- if cookiecutter.use_celery == 'y' %} @@ -18,8 +21,6 @@ services: {%- if cookiecutter.use_mailpit == 'y' %} - mailpit {%- endif %} - volumes: - - .:/app:z env_file: - ./.envs/.local/.django - ./.envs/.local/.postgres diff --git a/{{cookiecutter.project_slug}}/docs/howto.rst b/{{cookiecutter.project_slug}}/docs/howto.rst index 944c2b7318..2ad20966b0 100644 --- a/{{cookiecutter.project_slug}}/docs/howto.rst +++ b/{{cookiecutter.project_slug}}/docs/howto.rst @@ -9,7 +9,7 @@ Documentation can be written as rst files in `{{cookiecutter.project_slug}}/docs {% if cookiecutter.use_docker == 'n' %} To build and serve docs, use the command:: - make livehtml + uv run make livehtml from inside the `{{cookiecutter.project_slug}}/docs` directory. {% else %} @@ -35,7 +35,7 @@ For an in-use example, see the `page source <_sources/users.rst.txt>`_ for :ref: To compile all docstrings automatically into documentation source files, use the command: :: - make apidocs + uv run make apidocs {% if cookiecutter.use_docker == 'y' %} This can be done in the docker container: diff --git a/{{cookiecutter.project_slug}}/pyproject.toml b/{{cookiecutter.project_slug}}/pyproject.toml index eaead446c2..0147262bf5 100644 --- a/{{cookiecutter.project_slug}}/pyproject.toml +++ b/{{cookiecutter.project_slug}}/pyproject.toml @@ -26,9 +26,9 @@ warn_redundant_casts = true warn_unused_configs = true plugins = [ "mypy_django_plugin.main", -{%- if cookiecutter.use_drf == "y" %} + {%- if cookiecutter.use_drf == "y" %} "mypy_drf_plugin.main", -{%- endif %} + {%- endif %} ] [[tool.mypy.overrides]] @@ -68,69 +68,69 @@ extend-exclude = [ [tool.ruff.lint] select = [ - "F", - "E", - "W", - "C90", - "I", - "N", - "UP", - "YTT", - # "ANN", # flake8-annotations: we should support this in the future but 100+ errors atm - "ASYNC", - "S", - "BLE", - "FBT", - "B", - "A", - "COM", - "C4", - "DTZ", - "T10", - "DJ", - "EM", - "EXE", - "FA", - 'ISC', - "ICN", - "G", - 'INP', - 'PIE', - "T20", - 'PYI', - 'PT', - "Q", - "RSE", - "RET", - "SLF", - "SLOT", - "SIM", - "TID", - "TC", - "INT", - # "ARG", # Unused function argument - "PTH", - "ERA", - "PD", - "PGH", - "PL", - "TRY", - "FLY", - # "NPY", - # "AIR", - "PERF", - # "FURB", - # "LOG", - "RUF", + "F", + "E", + "W", + "C90", + "I", + "N", + "UP", + "YTT", + # "ANN", # flake8-annotations: we should support this in the future but 100+ errors atm + "ASYNC", + "S", + "BLE", + "FBT", + "B", + "A", + "COM", + "C4", + "DTZ", + "T10", + "DJ", + "EM", + "EXE", + "FA", + 'ISC', + "ICN", + "G", + 'INP', + 'PIE', + "T20", + 'PYI', + 'PT', + "Q", + "RSE", + "RET", + "SLF", + "SLOT", + "SIM", + "TID", + "TC", + "INT", + # "ARG", # Unused function argument + "PTH", + "ERA", + "PD", + "PGH", + "PL", + "TRY", + "FLY", + # "NPY", + # "AIR", + "PERF", + # "FURB", + # "LOG", + "RUF", ] ignore = [ - "S101", # Use of assert detected https://docs.astral.sh/ruff/rules/assert/ - "RUF012", # Mutable class attributes should be annotated with `typing.ClassVar` - "SIM102", # sometimes it's better to nest - "UP038", # Checks for uses of isinstance/issubclass that take a tuple - # of types for comparison. - # Deactivated because it can make the code slow: - # https://github.com/astral-sh/ruff/issues/7871 + "S101", # Use of assert detected https://docs.astral.sh/ruff/rules/assert/ + "RUF012", # Mutable class attributes should be annotated with `typing.ClassVar` + "SIM102", # sometimes it's better to nest + "UP038", # Checks for uses of isinstance/issubclass that take a tuple + # of types for comparison. + # Deactivated because it can make the code slow: + # https://github.com/astral-sh/ruff/issues/7871 ] # The fixes in extend-unsafe-fixes will require # provide the `--unsafe-fixes` flag when fixing. @@ -140,3 +140,19 @@ extend-unsafe-fixes = [ [tool.ruff.lint.isort] force-single-line = true + +[dependency-groups] +dev = [] + +[project] +name = "{{ cookiecutter.project_slug }}" +version = "{{ cookiecutter.version }}" +description = "{{ cookiecutter.description }}" +readme = "README.md" + +license = { text = "{{ cookiecutter.open_source_license }}" } +authors = [ + { name = "{{ cookiecutter.author_name }}", email = "{{ cookiecutter.email }}" }, +] +requires-python = "==3.12.*" +dependencies = [] diff --git a/{{cookiecutter.project_slug}}/requirements.txt b/{{cookiecutter.project_slug}}/requirements.txt deleted file mode 100644 index c1b500c2b4..0000000000 --- a/{{cookiecutter.project_slug}}/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# This file is expected by Heroku. - --r requirements/production.txt diff --git a/{{cookiecutter.project_slug}}/utility/install_python_dependencies.sh b/{{cookiecutter.project_slug}}/utility/install_python_dependencies.sh index e09ebf6f85..d340bc454d 100755 --- a/{{cookiecutter.project_slug}}/utility/install_python_dependencies.sh +++ b/{{cookiecutter.project_slug}}/utility/install_python_dependencies.sh @@ -33,7 +33,7 @@ if [ -z "$VIRTUAL_ENV" ]; then echo >&2 -e "\n" exit 1; else - pip install -r $PROJECT_DIR/requirements/local.txt + uv sync --frozen {%- if cookiecutter.use_heroku == "y" -%} pip install -r $PROJECT_DIR/requirements.txt {%- endif %} diff --git a/{{cookiecutter.project_slug}}/uv.lock b/{{cookiecutter.project_slug}}/uv.lock new file mode 100644 index 0000000000..975be54ebb --- /dev/null +++ b/{{cookiecutter.project_slug}}/uv.lock @@ -0,0 +1,2 @@ +version = 1 +requires-python = "==3.12.*" From e4d87f2bf209453b471dcb7651552d11ecf20dcd Mon Sep 17 00:00:00 2001 From: Jelmer Draaijer Date: Thu, 10 Apr 2025 20:24:51 +0200 Subject: [PATCH 02/12] Add dependencies to uv in post_gen hook --- hooks/post_gen_project.py | 47 +++++++++++++++++++ tests/test_cookiecutter_generation.py | 9 +++- tox.ini | 2 +- uv.lock | 32 ++++++------- .../requirements/local.txt | 2 - 5 files changed, 72 insertions(+), 20 deletions(-) diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 81762fac0b..8b207e2ed9 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -1,7 +1,10 @@ import json +import os import random import shutil import string +import subprocess +import sys from pathlib import Path try: @@ -502,8 +505,52 @@ def main(): if "{{ cookiecutter.use_async }}".lower() == "n": remove_async_files() + setup_dependencies() + print(SUCCESS + "Project initialized, keep up the good work!" + TERMINATOR) +def setup_dependencies(): + print("Installing python dependencies using uv...") + + if "{{ cookiecutter.use_docker }}".lower() == "y": + # Build the Docker service using Docker Compose + try: + subprocess.run(["docker", "compose", "-f", "docker-compose.local.yml", "build", "django"], check=True) + except subprocess.CalledProcessError as e: + print(f"Error building Docker service: {e}", file=sys.stderr) + sys.exit(1) + + # Use Docker to run the uv command + uv_cmd = ["docker", "compose", "-f", "docker-compose.local.yml", "run", "--rm", "django", "uv"] + else: + # Use uv command directly + uv_cmd = ["uv"] + + # Install production dependencies + try: + subprocess.run(uv_cmd + ["add", "-r", "requirements/production.txt"], check=True) + except subprocess.CalledProcessError as e: + print(f"Error installing production dependencies: {e}", file=sys.stderr) + sys.exit(1) + + # Install local (development) dependencies + try: + subprocess.run(uv_cmd + ["add", "--dev", "-r", "requirements/local.txt"], check=True) + except subprocess.CalledProcessError as e: + print(f"Error installing local dependencies: {e}", file=sys.stderr) + sys.exit(1) + + # Remove the requirements directory + if os.path.exists("requirements"): + try: + shutil.rmtree("requirements") + except Exception as e: + print(f"Error removing 'requirements' folder: {e}", file=sys.stderr) + sys.exit(1) + + print("Setup complete!") + + if __name__ == "__main__": main() diff --git a/tests/test_cookiecutter_generation.py b/tests/test_cookiecutter_generation.py index b646a1fe7a..9160877645 100755 --- a/tests/test_cookiecutter_generation.py +++ b/tests/test_cookiecutter_generation.py @@ -151,7 +151,14 @@ def _fixture_id(ctx): def build_files_list(base_path: Path): """Build a list containing absolute paths to the generated files.""" - return [dirpath / file_path for dirpath, subdirs, files in base_path.walk() for file_path in files] + f = [] + for dirpath, subdirs, files in base_path.walk(): + if ".venv" in subdirs: + subdirs.remove(".venv") + + for file_path in files: + f.append(dirpath / file_path) + return f def check_paths(paths: Iterable[Path]): diff --git a/tox.ini b/tox.ini index 70cde339f4..9af0d70351 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ envlist = py312,black-template [testenv] passenv = AUTOFIXABLE_STYLES -commands = pytest -n auto {posargs:./tests} +commands = pytest --instafail -n auto {posargs:./tests} [testenv:black-template] deps = black diff --git a/uv.lock b/uv.lock index 60e10011ba..1c3007c401 100644 --- a/uv.lock +++ b/uv.lock @@ -1188,26 +1188,26 @@ wheels = [ [[package]] name = "watchfiles" -version = "1.0.4" +version = "1.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f5/26/c705fc77d0a9ecdb9b66f1e2976d95b81df3cae518967431e7dbf9b5e219/watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205", size = 94625 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/1a/8f4d9a1461709756ace48c98f07772bc6d4519b1e48b5fa24a4061216256/watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2", size = 391345 }, - { url = "https://files.pythonhosted.org/packages/bc/d2/6750b7b3527b1cdaa33731438432e7238a6c6c40a9924049e4cebfa40805/watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9", size = 381515 }, - { url = "https://files.pythonhosted.org/packages/4e/17/80500e42363deef1e4b4818729ed939aaddc56f82f4e72b2508729dd3c6b/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712", size = 449767 }, - { url = "https://files.pythonhosted.org/packages/10/37/1427fa4cfa09adbe04b1e97bced19a29a3462cc64c78630787b613a23f18/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12", size = 455677 }, - { url = "https://files.pythonhosted.org/packages/c5/7a/39e9397f3a19cb549a7d380412fd9e507d4854eddc0700bfad10ef6d4dba/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844", size = 482219 }, - { url = "https://files.pythonhosted.org/packages/45/2d/7113931a77e2ea4436cad0c1690c09a40a7f31d366f79c6f0a5bc7a4f6d5/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733", size = 518830 }, - { url = "https://files.pythonhosted.org/packages/f9/1b/50733b1980fa81ef3c70388a546481ae5fa4c2080040100cd7bf3bf7b321/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af", size = 497997 }, - { url = "https://files.pythonhosted.org/packages/2b/b4/9396cc61b948ef18943e7c85ecfa64cf940c88977d882da57147f62b34b1/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a", size = 452249 }, - { url = "https://files.pythonhosted.org/packages/fb/69/0c65a5a29e057ad0dc691c2fa6c23b2983c7dabaa190ba553b29ac84c3cc/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff", size = 614412 }, - { url = "https://files.pythonhosted.org/packages/7f/b9/319fcba6eba5fad34327d7ce16a6b163b39741016b1996f4a3c96b8dd0e1/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e", size = 611982 }, - { url = "https://files.pythonhosted.org/packages/f1/47/143c92418e30cb9348a4387bfa149c8e0e404a7c5b0585d46d2f7031b4b9/watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94", size = 271822 }, - { url = "https://files.pythonhosted.org/packages/ea/94/b0165481bff99a64b29e46e07ac2e0df9f7a957ef13bec4ceab8515f44e3/watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c", size = 285441 }, - { url = "https://files.pythonhosted.org/packages/11/de/09fe56317d582742d7ca8c2ca7b52a85927ebb50678d9b0fa8194658f536/watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90", size = 277141 }, +sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511 }, + { url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715 }, + { url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138 }, + { url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592 }, + { url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532 }, + { url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865 }, + { url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887 }, + { url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498 }, + { url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663 }, + { url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410 }, + { url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965 }, + { url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693 }, + { url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287 }, ] [[package]] diff --git a/{{cookiecutter.project_slug}}/requirements/local.txt b/{{cookiecutter.project_slug}}/requirements/local.txt index 21a85096fb..37f82144d6 100644 --- a/{{cookiecutter.project_slug}}/requirements/local.txt +++ b/{{cookiecutter.project_slug}}/requirements/local.txt @@ -1,5 +1,3 @@ --r production.txt - Werkzeug[watchdog]==3.1.3 # https://github.com/pallets/werkzeug ipdb==0.13.13 # https://github.com/gotcha/ipdb {%- if cookiecutter.use_docker == 'y' %} From 53748eb05ac63244116a6970594bd06feee675f9 Mon Sep 17 00:00:00 2001 From: jelmert Date: Tue, 15 Apr 2025 08:43:55 +0200 Subject: [PATCH 03/12] Add UV_PYTHON_DOWNLOADS=0 to local Dockerfile and add .venv/lib/python3.12/site-packages to site-packages --- {{cookiecutter.project_slug}}/compose/local/django/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile index 201f0d21be..b1ad21b6e4 100644 --- a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile +++ b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile @@ -9,7 +9,7 @@ ARG APP_HOME=/app WORKDIR ${APP_HOME} # we need to move the virtualenv outside of the $APP_HOME directory because it will be overriden by the docker compose mount -ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy +ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy UV_PYTHON_DOWNLOADS=0 # Install apt packages RUN apt-get update && apt-get install --no-install-recommends -y \ @@ -44,6 +44,7 @@ RUN groupadd --gid 1000 dev-user \ {% endif %} ENV PATH="/${APP_HOME}/.venv/bin:$PATH" +ENV PYTHONPATH="${APP_HOME}/.venv/lib/python3.12/site-packages:$PYTHONPATH" COPY ./compose/production/django/entrypoint /entrypoint RUN sed -i 's/\r$//g' /entrypoint From d19f22967f1e86649add248c75d8c0ca0c47089a Mon Sep 17 00:00:00 2001 From: jelmert Date: Tue, 15 Apr 2025 09:29:35 +0200 Subject: [PATCH 04/12] Add mounts when syncing uv --- {{cookiecutter.project_slug}}/compose/local/django/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile index b1ad21b6e4..5153d42cfe 100644 --- a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile +++ b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile @@ -29,6 +29,8 @@ RUN --mount=type=cache,target=/root/.cache/uv \ COPY . ${APP_HOME} RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=uv.lock,target=uv.lock:rw \ uv sync {% if cookiecutter.use_docker == "y" %} From 2bcf200ea84eb644761939d21e8c3b44beb64bf9 Mon Sep 17 00:00:00 2001 From: jelmert Date: Tue, 15 Apr 2025 09:35:21 +0200 Subject: [PATCH 05/12] Exclude __pycache__ dirs for project generation --- tests/test_cookiecutter_generation.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_cookiecutter_generation.py b/tests/test_cookiecutter_generation.py index 9160877645..4872f3fcf7 100755 --- a/tests/test_cookiecutter_generation.py +++ b/tests/test_cookiecutter_generation.py @@ -151,10 +151,11 @@ def _fixture_id(ctx): def build_files_list(base_path: Path): """Build a list containing absolute paths to the generated files.""" + excluded_dirs = {".venv", "__pycache__"} + f = [] for dirpath, subdirs, files in base_path.walk(): - if ".venv" in subdirs: - subdirs.remove(".venv") + subdirs[:] = [d for d in subdirs if d not in excluded_dirs] for file_path in files: f.append(dirpath / file_path) From 10d5234c02faea332afa6eeccbaa7520fcccf4e0 Mon Sep 17 00:00:00 2001 From: jelmert Date: Tue, 15 Apr 2025 09:53:57 +0200 Subject: [PATCH 06/12] Add --no-sync to `uv add` commands --- hooks/post_gen_project.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 8b207e2ed9..59bb534ae5 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -529,14 +529,14 @@ def setup_dependencies(): # Install production dependencies try: - subprocess.run(uv_cmd + ["add", "-r", "requirements/production.txt"], check=True) + subprocess.run(uv_cmd + ["add", "--no-sync", "-r", "requirements/production.txt"], check=True) except subprocess.CalledProcessError as e: print(f"Error installing production dependencies: {e}", file=sys.stderr) sys.exit(1) # Install local (development) dependencies try: - subprocess.run(uv_cmd + ["add", "--dev", "-r", "requirements/local.txt"], check=True) + subprocess.run(uv_cmd + ["add", "--no-sync", "--dev", "-r", "requirements/local.txt"], check=True) except subprocess.CalledProcessError as e: print(f"Error installing local dependencies: {e}", file=sys.stderr) sys.exit(1) From 714fce9c1b905d593009c15ca646c7ed52ab53ca Mon Sep 17 00:00:00 2001 From: jelmert Date: Tue, 22 Apr 2025 09:59:29 +0200 Subject: [PATCH 07/12] Update production Dockerfile --- tests/test_docker.sh | 66 +++++++++++-------- .../compose/production/django/Dockerfile | 12 ++-- .../compose/production/django/start | 4 +- 3 files changed, 50 insertions(+), 32 deletions(-) diff --git a/tests/test_docker.sh b/tests/test_docker.sh index c551ad2bc6..2ea235a071 100755 --- a/tests/test_docker.sh +++ b/tests/test_docker.sh @@ -32,36 +32,50 @@ docker compose -f docker-compose.local.yml run django uv lock docker compose -f docker-compose.local.yml build -# run the project's type checks -docker compose -f docker-compose.local.yml run --rm django mypy my_awesome_project - - - -# run the project's tests -docker compose -f docker-compose.local.yml run --rm django pytest - -# return non-zero status code if there are migrations that have not been created -docker compose -f docker-compose.local.yml run --rm django python manage.py makemigrations --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; } - -# Test support for translations -docker compose -f docker-compose.local.yml run --rm django python manage.py makemessages --all - -# Make sure the check doesn't raise any warnings -docker compose -f docker-compose.local.yml run --rm \ - -e DJANGO_SECRET_KEY="$(openssl rand -base64 64)" \ - -e REDIS_URL=redis://redis:6379/0 \ - -e DJANGO_AWS_ACCESS_KEY_ID=x \ - -e DJANGO_AWS_SECRET_ACCESS_KEY=x \ - -e DJANGO_AWS_STORAGE_BUCKET_NAME=x \ - -e DJANGO_ADMIN_URL=x \ - -e MAILGUN_API_KEY=x \ - -e MAILGUN_DOMAIN=x \ - django python manage.py check --settings=config.settings.production --deploy --database default --fail-level WARNING +## run the project's type checks +#docker compose -f docker-compose.local.yml run --rm django mypy my_awesome_project +# +# +# +## run the project's tests +#docker compose -f docker-compose.local.yml run --rm django pytest +# +## return non-zero status code if there are migrations that have not been created +#docker compose -f docker-compose.local.yml run --rm django python manage.py makemigrations --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; } +# +## Test support for translations +#docker compose -f docker-compose.local.yml run --rm django python manage.py makemessages --all +# +## Make sure the check doesn't raise any warnings +#docker compose -f docker-compose.local.yml run --rm \ +# -e DJANGO_SECRET_KEY="$(openssl rand -base64 64)" \ +# -e REDIS_URL=redis://redis:6379/0 \ +# -e DJANGO_AWS_ACCESS_KEY_ID=x \ +# -e DJANGO_AWS_SECRET_ACCESS_KEY=x \ +# -e DJANGO_AWS_STORAGE_BUCKET_NAME=x \ +# -e DJANGO_ADMIN_URL=x \ +# -e MAILGUN_API_KEY=x \ +# -e MAILGUN_DOMAIN=x \ +# django python manage.py check --settings=config.settings.production --deploy --database default --fail-level WARNING # Generate the HTML for the documentation docker compose -f docker-compose.docs.yml run --rm docs make html -docker build -f ./compose/production/django/Dockerfile . +docker build -f ./compose/production/django/Dockerfile -t django-prod . + +docker run --rm \ +--env-file .envs/.local/.django \ +--env-file .envs/.local/.postgres \ +--network my_awesome_project_default \ +-e DJANGO_SECRET_KEY="$(openssl rand -base64 64)" \ +-e REDIS_URL=redis://redis:6379/0 \ +-e DJANGO_AWS_ACCESS_KEY_ID=x \ +-e DJANGO_AWS_SECRET_ACCESS_KEY=x \ +-e DJANGO_AWS_STORAGE_BUCKET_NAME=x \ +-e DJANGO_ADMIN_URL=x \ +-e MAILGUN_API_KEY=x \ +-e MAILGUN_DOMAIN=x \ +django-prod python manage.py check --settings=config.settings.production --deploy --database default --fail-level WARNING # Run npm build script if package.json is present if [ -f "package.json" ] diff --git a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile index 4a07f6ce10..1f78d45919 100644 --- a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile +++ b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile @@ -1,4 +1,3 @@ -FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS python {% if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] -%} FROM docker.io/node:22.14-bookworm-slim AS client-builder @@ -28,9 +27,9 @@ RUN npm run build FROM docker.io/python:3.12.10-slim-bookworm AS python # Python build stage -FROM python AS python-build-stage +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS python-build-stage -ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy +ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy UV_PYTHON_DOWNLOADS=0 ARG APP_HOME=/app @@ -56,10 +55,12 @@ COPY . ${APP_HOME} {%- endif %} RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ uv sync --frozen --no-dev # Python 'run' stage -FROM python AS python-run-stage +FROM python:3.12-slim-bookworm AS python-run-stage ARG APP_HOME=/app @@ -111,6 +112,9 @@ RUN chmod +x /start-flower # Copy the application from the builder COPY --from=python-build-stage --chown=django:django ${APP_HOME} ${APP_HOME} +# make django owner of the WORKDIR directory as well. +RUN chown django:django ${APP_HOME} + {%- if cookiecutter.cloud_provider == 'None' %} # explicitly create the media folder before changing ownership below RUN mkdir -p ${APP_HOME}/{{ cookiecutter.project_slug }}/media diff --git a/{{cookiecutter.project_slug}}/compose/production/django/start b/{{cookiecutter.project_slug}}/compose/production/django/start index 38fc29b552..f45e7b28d4 100644 --- a/{{cookiecutter.project_slug}}/compose/production/django/start +++ b/{{cookiecutter.project_slug}}/compose/production/django/start @@ -28,7 +28,7 @@ if compress_enabled; then fi {%- endif %} {%- if cookiecutter.use_async == 'y' %} -exec /usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn_worker.UvicornWorker +exec gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn_worker.UvicornWorker {%- else %} -exec /usr/local/bin/gunicorn config.wsgi --bind 0.0.0.0:5000 --chdir=/app +exec gunicorn config.wsgi --bind 0.0.0.0:5000 --chdir=/app {%- endif %} From 86bfd48b6f0de7f5ac291a5b30643dd9c1b09307 Mon Sep 17 00:00:00 2001 From: Jelmer Draaijer Date: Fri, 30 May 2025 09:14:26 +0200 Subject: [PATCH 08/12] Set dependabot ecosystem to uv --- {{cookiecutter.project_slug}}/.github/dependabot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/{{cookiecutter.project_slug}}/.github/dependabot.yml b/{{cookiecutter.project_slug}}/.github/dependabot.yml index 45f35a28e5..1bf321b980 100644 --- a/{{cookiecutter.project_slug}}/.github/dependabot.yml +++ b/{{cookiecutter.project_slug}}/.github/dependabot.yml @@ -55,7 +55,7 @@ updates: {%- endif %} # Enable version updates for Python/Pip - Production - - package-ecosystem: 'pip' + - package-ecosystem: 'uv' # Look for a `requirements.txt` in the `root` directory # also 'setup.cfg', '.python-version' and 'requirements/*.txt' directory: '/' From fe1fde0d3e21a9790f8941b6486467fab3dd9eed Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Fri, 29 Aug 2025 09:36:52 +0100 Subject: [PATCH 09/12] Fix/ignore a few Ruff issues --- hooks/post_gen_project.py | 16 ++++++++-------- tests/test_cookiecutter_generation.py | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 25edecfcec..83c7d0e079 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -1,6 +1,5 @@ # ruff: noqa: PLR0133 import json -import os import random import shutil import string @@ -203,7 +202,7 @@ def remove_prettier_pre_commit(): def remove_repo_from_pre_commit_config(repo_to_remove: str): pre_commit_config = Path(".pre-commit-config.yaml") - content = pre_commit_config.read_text().splitlines(True) + content = pre_commit_config.read_text().splitlines(keepends=True) removing = False new_lines = [] @@ -518,7 +517,7 @@ def setup_dependencies(): if "{{ cookiecutter.use_docker }}".lower() == "y": # Build the Docker service using Docker Compose try: - subprocess.run(["docker", "compose", "-f", "docker-compose.local.yml", "build", "django"], check=True) + subprocess.run(["docker", "compose", "-f", "docker-compose.local.yml", "build", "django"], check=True) # noqa: S607 except subprocess.CalledProcessError as e: print(f"Error building Docker service: {e}", file=sys.stderr) sys.exit(1) @@ -531,23 +530,24 @@ def setup_dependencies(): # Install production dependencies try: - subprocess.run(uv_cmd + ["add", "--no-sync", "-r", "requirements/production.txt"], check=True) + subprocess.run([*uv_cmd, "add", "--no-sync", "-r", "requirements/production.txt"], check=True) # noqa: S603 except subprocess.CalledProcessError as e: print(f"Error installing production dependencies: {e}", file=sys.stderr) sys.exit(1) # Install local (development) dependencies try: - subprocess.run(uv_cmd + ["add", "--no-sync", "--dev", "-r", "requirements/local.txt"], check=True) + subprocess.run([*uv_cmd, "add", "--no-sync", "--dev", "-r", "requirements/local.txt"], check=True) # noqa: S603 except subprocess.CalledProcessError as e: print(f"Error installing local dependencies: {e}", file=sys.stderr) sys.exit(1) # Remove the requirements directory - if os.path.exists("requirements"): + requirements_dir = Path("requirements") + if requirements_dir.exists(): try: - shutil.rmtree("requirements") - except Exception as e: + shutil.rmtree(requirements_dir) + except Exception as e: # noqa: BLE001 print(f"Error removing 'requirements' folder: {e}", file=sys.stderr) sys.exit(1) diff --git a/tests/test_cookiecutter_generation.py b/tests/test_cookiecutter_generation.py index 004e412f66..fce443a6d8 100755 --- a/tests/test_cookiecutter_generation.py +++ b/tests/test_cookiecutter_generation.py @@ -158,7 +158,7 @@ def build_files_list(base_path: Path): subdirs[:] = [d for d in subdirs if d not in excluded_dirs] for file_path in files: - f.append(dirpath / file_path) + f.append(dirpath / file_path) # noqa: PERF401 return f From f0ad07e7ad6c39565510515d768e8f9c7897c424 Mon Sep 17 00:00:00 2001 From: Bruno Alla Date: Fri, 29 Aug 2025 09:43:07 +0100 Subject: [PATCH 10/12] Remove leading $ signs in README to simplify copy/pasting --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 71fe889041..540b763c47 100644 --- a/README.md +++ b/README.md @@ -84,11 +84,11 @@ and then editing the results to include your name, email, and various configurat First, get Cookiecutter. Trust me, it's awesome: - $ uv tool install "cookiecutter>=1.7.0" + uv tool install "cookiecutter>=1.7.0" Now run it against this repo: - $ uvx cookiecutter https://github.com/cookiecutter/cookiecutter-django + uvx cookiecutter https://github.com/cookiecutter/cookiecutter-django You'll be prompted for some values. Provide them, then a Django project will be created for you. From 291fb7510b74a72068a1518e08de7756422614ee Mon Sep 17 00:00:00 2001 From: Baptiste Pereira Date: Tue, 2 Sep 2025 18:53:41 +0200 Subject: [PATCH 11/12] post_gen_project/pre-commit: remove extra line breaks when editing config --- hooks/post_gen_project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 83c7d0e079..07ea3dca14 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -202,7 +202,7 @@ def remove_prettier_pre_commit(): def remove_repo_from_pre_commit_config(repo_to_remove: str): pre_commit_config = Path(".pre-commit-config.yaml") - content = pre_commit_config.read_text().splitlines(keepends=True) + content = pre_commit_config.read_text().splitlines(keepends=False) removing = False new_lines = [] From aa0726d6f2366d14cfa40bf8e8c5b3bd4053ff0b Mon Sep 17 00:00:00 2001 From: Baptiste Pereira Date: Tue, 2 Sep 2025 21:40:59 +0200 Subject: [PATCH 12/12] post_gen_project/pre-commit: keep uv-pre-commit for heroku --- hooks/post_gen_project.py | 1 - 1 file changed, 1 deletion(-) diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 07ea3dca14..2be09cb6be 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -445,7 +445,6 @@ def main(): # noqa: C901, PLR0912, PLR0915 if "{{ cookiecutter.use_heroku }}".lower() == "n": remove_heroku_files() - remove_repo_from_pre_commit_config("uv-pre-commit") if "{{ cookiecutter.use_docker }}".lower() == "n" and "{{ cookiecutter.use_heroku }}".lower() == "n": if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":