diff --git a/Makefile b/Makefile index 2121ac7d04ef..3b92f72c3cde 100644 --- a/Makefile +++ b/Makefile @@ -91,6 +91,7 @@ initdb: .state/docker-build-web docker-compose run --rm web psql -h db -d warehouse -U postgres -c "UPDATE users SET name='Ee Durbin' WHERE username='ewdurbin'" docker-compose run --rm web python -m warehouse db upgrade head docker-compose run --rm web python -m warehouse sponsors populate-db + docker-compose run --rm web python -m warehouse classifiers sync $(MAKE) reindex reindex: .state/docker-build-web diff --git a/bin/release b/bin/release index 1759c65fe3d4..d0539a34a57e 100755 --- a/bin/release +++ b/bin/release @@ -8,3 +8,6 @@ python -m warehouse db upgrade head # Insert/upgrade malware checks. python -m warehouse malware sync-checks + +# Insert/upgrade classifiers. +python -m warehouse classifiers sync diff --git a/requirements/main.txt b/requirements/main.txt index b46bb77ea3d8..82e87ebab58f 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -1253,9 +1253,9 @@ translationstring==1.4 \ --hash=sha256:5f4dc4d939573db851c8d840551e1a0fb27b946afe3b95aafc22577eed2d6262 \ --hash=sha256:bf947538d76e69ba12ab17283b10355a9ecfbc078e6123443f43f2107f6376f3 # via pyramid -trove-classifiers==2022.6.26 \ - --hash=sha256:361d6e85bcea11b90be8b4c3ab4f23ddea0c6ee566ca4a82f5f2e4318d08c1b8 \ - --hash=sha256:97be455919ba5d0f715147e7f4e17f64c8d46645d9f1dbac92a68201ca2373d7 +trove-classifiers==2022.7.22 \ + --hash=sha256:0545d0e62af12c722578c9c99f3391b9ce81fa4fe1fd608d6c029b6a55da6456 \ + --hash=sha256:b5d23dbbaf3969c1b8d14e2f56cda4c3b6427c0f9917f367f179f9db393b8721 # via -r requirements/main.in typeguard==2.13.3 \ --hash=sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4 \ diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py index a6432a5ed30b..ecd3444e8d26 100644 --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -43,6 +43,9 @@ class Meta: id = factory.Faker("uuid4", cast_to=None) name = factory.Faker("pystr", max_chars=12) + normalized_name = factory.LazyAttribute( + lambda o: packaging.utils.canonicalize_name(o.name) + ) class ProjectEventFactory(WarehouseFactory): diff --git a/tests/unit/cli/test_classifiers.py b/tests/unit/cli/test_classifiers.py new file mode 100644 index 000000000000..35a6344f5202 --- /dev/null +++ b/tests/unit/cli/test_classifiers.py @@ -0,0 +1,64 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse import db +from warehouse.classifiers.models import Classifier +from warehouse.cli import classifiers + + +def test_classifiers_update(db_request, monkeypatch, cli): + engine = pretend.stub() + config = pretend.stub(registry={"sqlalchemy.engine": engine}) + session_cls = pretend.call_recorder(lambda bind: db_request.db) + monkeypatch.setattr(db, "Session", session_cls) + + cs = [ + c.classifier + for c in db_request.db.query(Classifier).order_by(Classifier.ordering).all() + ] + + monkeypatch.setattr(classifiers, "sorted_classifiers", ["C :: D", "A :: B"] + cs) + + db_request.db.add(Classifier(classifier="A :: B", ordering=0)) + assert db_request.db.query(Classifier).filter_by(classifier="C :: D").count() == 0 + cli.invoke(classifiers.sync, obj=config) + + c = db_request.db.query(Classifier).filter_by(classifier="C :: D").one() + + assert c.classifier == "C :: D" + assert c.ordering == 0 + + c = db_request.db.query(Classifier).filter_by(classifier="A :: B").one() + + assert c.classifier == "A :: B" + assert c.ordering == 1 + + +def test_classifiers_no_update(db_request, monkeypatch, cli): + engine = pretend.stub() + config = pretend.stub(registry={"sqlalchemy.engine": engine}) + session_cls = pretend.call_recorder(lambda bind: db_request.db) + monkeypatch.setattr(db, "Session", session_cls) + + original = db_request.db.query(Classifier).order_by(Classifier.ordering).all() + + monkeypatch.setattr( + classifiers, "sorted_classifiers", [c.classifier for c in original] + ) + + cli.invoke(classifiers.sync, obj=config) + + after = db_request.db.query(Classifier).order_by(Classifier.ordering).all() + + assert original == after diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index e045538d16db..28634e9605f4 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -3134,77 +3134,6 @@ def test_upload_succeeds_creates_release( ), ] - def test_upload_succeeds_creates_classifier( - self, pyramid_config, db_request, metrics, monkeypatch - ): - pyramid_config.testing_securitypolicy(userid=1) - - user = UserFactory.create() - EmailFactory.create(user=user) - project = ProjectFactory.create() - RoleFactory.create(user=user, project=project) - - monkeypatch.setattr(legacy, "classifiers", {"AA :: BB", "CC :: DD"}) - - db_request.db.add(Classifier(classifier="AA :: BB")) - - filename = "{}-{}.tar.gz".format(project.name, "1.0") - - db_request.user = user - db_request.user_agent = "warehouse-tests/6.6.6" - db_request.POST = MultiDict( - { - "metadata_version": "1.2", - "name": project.name, - "version": "1.0", - "summary": "This is my summary!", - "filetype": "sdist", - "md5_digest": _TAR_GZ_PKG_MD5, - "content": pretend.stub( - filename=filename, - file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), - type="application/tar", - ), - } - ) - db_request.POST.extend( - [ - ("classifiers", "AA :: BB"), - ("classifiers", "CC :: DD"), - ("requires_dist", "foo"), - ("requires_dist", "bar (>1.0)"), - ("project_urls", "Test, https://example.com/"), - ("requires_external", "Cheese (>1.0)"), - ("provides", "testing"), - ] - ) - - storage_service = pretend.stub(store=lambda path, filepath, meta: None) - db_request.find_service = lambda svc, name=None, context=None: { - IFileStorage: storage_service, - IMetricsService: metrics, - }.get(svc) - - resp = legacy.file_upload(db_request) - - assert resp.status_code == 200 - - # Ensure that a new Classifier has been created - classifier = ( - db_request.db.query(Classifier) - .filter(Classifier.classifier == "CC :: DD") - .one() - ) - assert classifier.classifier == "CC :: DD" - - # Ensure that the Release has the new classifier - release = ( - db_request.db.query(Release) - .filter((Release.project == project) & (Release.version == "1.0")) - .one() - ) - assert release.classifiers == ["AA :: BB", "CC :: DD"] - def test_all_valid_classifiers_can_be_created(self, db_request): for classifier in classifiers: db_request.db.add(Classifier(classifier=classifier)) diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py index aa2430bb7efe..c86cd1d44b43 100644 --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -42,6 +42,7 @@ def _assert_has_cors_headers(headers): class TestJSONProject: def test_normalizing_redirects(self, db_request): project = ProjectFactory.create() + ReleaseFactory.create(project=project, version="1.0") name = project.name.lower() if name == project.normalized_name: @@ -52,7 +53,7 @@ def test_normalizing_redirects(self, db_request): lambda name: "/project/the-redirect/" ) - resp = json.json_project(project, db_request) + resp = json.json_project(db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/" @@ -63,7 +64,8 @@ def test_normalizing_redirects(self, db_request): def test_missing_release(self, db_request): project = ProjectFactory.create() - resp = json.json_project(project, db_request) + db_request.matchdict = {"name": project.normalized_name} + resp = json.json_project(db_request) assert isinstance(resp, HTTPNotFound) _assert_has_cors_headers(resp.headers) @@ -81,8 +83,9 @@ def test_with_prereleases(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -102,8 +105,9 @@ def test_only_prereleases(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -129,8 +133,9 @@ def test_all_releases_yanked(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -156,8 +161,9 @@ def test_latest_release_yanked(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -184,8 +190,9 @@ def test_all_non_prereleases_yanked(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -254,8 +261,9 @@ def test_renders(self, pyramid_config, db_request, db_session): je = JournalEntryFactory.create(name=project.name, submitted_by=user) db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + db_request.matchdict = {"name": project.normalized_name} - result = json.json_project(project, db_request) + result = json.json_project(db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=files[0].path), @@ -405,6 +413,7 @@ def test_renders(self, pyramid_config, db_request, db_session): class TestJSONProjectSlash: def test_normalizing_redirects(self, db_request): project = ProjectFactory.create() + ReleaseFactory.create(project=project, version="1.0") name = project.name.lower() if name == project.normalized_name: @@ -415,7 +424,7 @@ def test_normalizing_redirects(self, db_request): lambda name: "/project/the-redirect/" ) - resp = json.json_project_slash(project, db_request) + resp = json.json_project_slash(db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/" @@ -434,12 +443,12 @@ def test_normalizing_redirects(self, db_request): if name == release.project.normalized_name: name = release.project.name.upper() - db_request.matchdict = {"name": name} + db_request.matchdict = {"name": name, "version": "3.0"} db_request.current_route_path = pretend.call_recorder( lambda name: "/project/the-redirect/3.0/" ) - resp = json.json_release(release, db_request) + resp = json.json_release(db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/3.0/" @@ -448,6 +457,13 @@ def test_normalizing_redirects(self, db_request): pretend.call(name=release.project.normalized_name) ] + def test_missing_release(self, db_request): + project = ProjectFactory.create() + db_request.matchdict = {"name": project.normalized_name, "version": "3.0"} + resp = json.json_release(db_request) + assert isinstance(resp, HTTPNotFound) + _assert_has_cors_headers(resp.headers) + def test_detail_renders(self, pyramid_config, db_request, db_session): project = ProjectFactory.create(has_docs=True) description_content_type = "text/x-rst" @@ -510,8 +526,12 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): je = JournalEntryFactory.create(name=project.name, submitted_by=user) db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + db_request.matchdict = { + "name": project.normalized_name, + "version": releases[3].canonical_version, + } - result = json.json_release(releases[3], db_request) + result = json.json_release(db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=files[2].path), @@ -597,8 +617,12 @@ def test_minimal_renders(self, pyramid_config, db_request): url = "/the/fake/url/" db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + db_request.matchdict = { + "name": project.normalized_name, + "version": release.canonical_version, + } - result = json.json_release(release, db_request) + result = json.json_release(db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=file.path), @@ -679,8 +703,12 @@ def test_vulnerabilities_renders(self, pyramid_config, db_request): url = "/the/fake/url/" db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + db_request.matchdict = { + "name": project.normalized_name, + "version": release.canonical_version, + } - result = json.json_release(release, db_request) + result = json.json_release(db_request) assert result["vulnerabilities"] == [ { @@ -704,12 +732,12 @@ def test_normalizing_redirects(self, db_request): if name == release.project.normalized_name: name = release.project.name.upper() - db_request.matchdict = {"name": name} + db_request.matchdict = {"name": name, "version": "3.0"} db_request.current_route_path = pretend.call_recorder( lambda name: "/project/the-redirect/3.0/" ) - resp = json.json_release_slash(release, db_request) + resp = json.json_release_slash(db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/3.0/" diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py index c3a34fa8ffab..48ddd071a54e 100644 --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -444,32 +444,24 @@ def add_policy(name, filename): pretend.call( "legacy.api.json.project", "/pypi/{name}/json", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.project_slash", "/pypi/{name}/json/", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.release", "/pypi/{name}/{version}/json", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}/{version}", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.release_slash", "/pypi/{name}/{version}/json/", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}/{version}", read_only=True, domain=warehouse, ), diff --git a/warehouse/classifiers/models.py b/warehouse/classifiers/models.py index 3564a7ac5a30..0dde6961e4a0 100644 --- a/warehouse/classifiers/models.py +++ b/warehouse/classifiers/models.py @@ -28,3 +28,4 @@ class Classifier(db.ModelBase): id = Column(Integer, primary_key=True, nullable=False) classifier = Column(Text, unique=True) + ordering = Column(Integer, nullable=True) diff --git a/warehouse/cli/classifiers.py b/warehouse/cli/classifiers.py new file mode 100644 index 000000000000..feeb850a058d --- /dev/null +++ b/warehouse/cli/classifiers.py @@ -0,0 +1,61 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from trove_classifiers import all_classifiers as sorted_classifiers + +from warehouse.cli import warehouse + + +@warehouse.group() # pragma: no branch +def classifiers(): + """ + Manage the Warehouse Malware Checks. + """ + + +@classifiers.command() +@click.pass_obj +def sync(config): + """ + Sync the Warehouse database with the classifiers. + """ + # Imported here because we don't want to trigger an import from anything + # but warehouse.cli at the module scope. + from warehouse.classifiers.models import Classifier + from warehouse.db import Session + + session = Session(bind=config.registry["sqlalchemy.engine"]) + + # Look up all of the valid classifiers + all_classifiers = session.query(Classifier).all() + + # Determine if we need to add any new classifiers to the database + missing_classifiers = set(sorted_classifiers) - set( + c.classifier for c in all_classifiers + ) + + # Add any new classifiers to the database + if missing_classifiers: + for name in missing_classifiers: + missing_classifier = Classifier( + classifier=name, ordering=sorted_classifiers.index(name) + ) + session.add(missing_classifier) + + # Check to see if any of our existing classifiers need their ordering + # changed + for classifier in all_classifiers: + classifier.ordering = sorted_classifiers.index(classifier.classifier) + + session.commit() diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 3fbbc4f0e938..1959be1bac56 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -1011,26 +1011,13 @@ def file_upload(request): .one() ) except NoResultFound: - # Look up all of the valid classifiers - all_classifiers = request.db.query(Classifier).all() - # Get all the classifiers for this release - release_classifiers = [ - c for c in all_classifiers if c.classifier in form.classifiers.data - ] - - # Determine if we need to add any new classifiers to the database - missing_classifiers = set(form.classifiers.data or []) - set( - c.classifier for c in release_classifiers + release_classifiers = ( + request.db.query(Classifier) + .filter(Classifier.classifier.in_(form.classifiers.data)) + .all() ) - # Add any new classifiers to the database - if missing_classifiers: - for missing_classifier_name in missing_classifiers: - missing_classifier = Classifier(classifier=missing_classifier_name) - request.db.add(missing_classifier) - release_classifiers.append(missing_classifier) - # Parse the Project URLs structure into a key/value dict project_urls = { name.strip(): url.strip() diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py index 9762ea48f49b..759ccfe59daf 100644 --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -10,9 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from packaging.utils import canonicalize_name, canonicalize_version from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound from pyramid.view import view_config -from sqlalchemy.orm import Load +from sqlalchemy.orm import Load, joinedload from sqlalchemy.orm.exc import NoResultFound from warehouse.cache.http import cache_control @@ -167,23 +168,24 @@ def _json_data(request, project, release, *, all_releases): @view_config( route_name="legacy.api.json.project", - context=Project, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_project(project, request): - if project.normalized_name != request.matchdict.get( - "name", project.normalized_name - ): - return HTTPMovedPermanently( - request.current_route_path(name=project.normalized_name), - headers=_CORS_HEADERS, - ) +def json_project(request): + normalized_name = canonicalize_name(request.matchdict["name"]) try: release = ( request.db.query(Release) - .filter(Release.project == project) + .join(Project) + .options( + joinedload(Release.project), + joinedload(Release.description), + joinedload(Release._project_urls), + joinedload(Release._requires_dist), + joinedload(Release.vulnerabilities), + ) + .filter(Project.normalized_name == normalized_name) .order_by( Release.yanked.asc(), Release.is_prerelease.nullslast(), @@ -192,9 +194,16 @@ def json_project(project, request): .limit(1) .one() ) + project = release.project except NoResultFound: return HTTPNotFound(headers=_CORS_HEADERS) + if project.normalized_name != request.matchdict["name"]: + return HTTPMovedPermanently( + request.current_route_path(name=project.normalized_name), + headers=_CORS_HEADERS, + ) + # Apply CORS headers. request.response.headers.update(_CORS_HEADERS) @@ -209,26 +218,44 @@ def json_project(project, request): @view_config( route_name="legacy.api.json.project_slash", - context=Project, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_project_slash(project, request): - return json_project(project, request) +def json_project_slash(request): + return json_project(request) @view_config( route_name="legacy.api.json.release", - context=Release, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_release(release, request): - project = release.project +def json_release(request): + normalized_name = canonicalize_name(request.matchdict["name"]) + canonical_version = canonicalize_version(request.matchdict["version"]) + + try: + release = ( + request.db.query(Release) + .join(Project) + .options( + joinedload(Release.project), + joinedload(Release.description), + joinedload(Release._project_urls), + joinedload(Release._requires_dist), + joinedload(Release.vulnerabilities), + ) + .filter(Project.normalized_name == normalized_name) + .filter( + Release.canonical_version == canonical_version, + ) + .one() + ) + project = release.project + except NoResultFound: + return HTTPNotFound(headers=_CORS_HEADERS) - if project.normalized_name != request.matchdict.get( - "name", project.normalized_name - ): + if project.normalized_name != request.matchdict["name"]: return HTTPMovedPermanently( request.current_route_path(name=project.normalized_name), headers=_CORS_HEADERS, @@ -246,9 +273,8 @@ def json_release(release, request): @view_config( route_name="legacy.api.json.release_slash", - context=Release, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_release_slash(release, request): - return json_release(release, request) +def json_release_slash(request): + return json_release(request) diff --git a/warehouse/migrations/env.py b/warehouse/migrations/env.py index 2daf93cab33a..bb170a2919a7 100644 --- a/warehouse/migrations/env.py +++ b/warehouse/migrations/env.py @@ -57,6 +57,7 @@ def run_migrations_online(): connection=connection, target_metadata=db.metadata, compare_server_default=True, + transaction_per_migration=True, ) with context.begin_transaction(): context.run_migrations() diff --git a/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py new file mode 100644 index 000000000000..bd8da4a1b429 --- /dev/null +++ b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Migrate Existing Data for Release.is_prerelease + +Revision ID: 4490777c984f +Revises: b0dbcd2f5c77 +Create Date: 2022-06-27 17:49:09.835384 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "4490777c984f" +down_revision = "b0dbcd2f5c77" + + +def _get_num_rows(conn): + return list( + conn.execute( + sa.text("SELECT COUNT(id) FROM releases WHERE is_prerelease IS NULL") + ) + )[0][0] + + +def upgrade(): + conn = op.get_bind() + total_rows = _get_num_rows(conn) + max_loops = total_rows / 100000 * 2 + loops = 0 + while _get_num_rows(conn) > 0 and loops < max_loops: + loops += 1 + conn.execute( + sa.text( + """ + UPDATE releases + SET is_prerelease = pep440_is_prerelease(version) + WHERE id IN ( + SELECT id + FROM releases + WHERE is_prerelease IS NULL + LIMIT 100000 + ) + """ + ) + ) + conn.execute("COMMIT") + + op.alter_column( + "releases", + "is_prerelease", + existing_type=sa.BOOLEAN(), + server_default=sa.text("false"), + nullable=False, + ) + + +def downgrade(): + op.alter_column( + "releases", + "is_prerelease", + existing_type=sa.BOOLEAN(), + server_default=None, + nullable=True, + ) diff --git a/warehouse/migrations/versions/8a335305fd39_add_a_column_for_ordering_classifiers.py b/warehouse/migrations/versions/8a335305fd39_add_a_column_for_ordering_classifiers.py new file mode 100644 index 000000000000..63da9c02e399 --- /dev/null +++ b/warehouse/migrations/versions/8a335305fd39_add_a_column_for_ordering_classifiers.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add a column for ordering classifiers + +Revision ID: 8a335305fd39 +Revises: 4490777c984f +Create Date: 2022-07-22 00:06:40.868910 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "8a335305fd39" +down_revision = "4490777c984f" + + +def upgrade(): + op.add_column( + "trove_classifiers", sa.Column("ordering", sa.Integer(), nullable=True) + ) + + +def downgrade(): + op.drop_column("trove_classifiers", "ordering") diff --git a/warehouse/migrations/versions/b0dbcd2f5c77_add_a_column_for_denormalizing_release_.py b/warehouse/migrations/versions/b0dbcd2f5c77_add_a_column_for_denormalizing_release_.py new file mode 100644 index 000000000000..66b2c7ddc416 --- /dev/null +++ b/warehouse/migrations/versions/b0dbcd2f5c77_add_a_column_for_denormalizing_release_.py @@ -0,0 +1,53 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add a column for denormalizing Release.is_prerelease + +Revision ID: b0dbcd2f5c77 +Revises: 8bee9c119e41 +Create Date: 2022-06-27 17:19:00.117464 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "b0dbcd2f5c77" +down_revision = "1e61006a47c2" + + +def upgrade(): + op.add_column("releases", sa.Column("is_prerelease", sa.Boolean(), nullable=True)) + + op.execute( + """ CREATE OR REPLACE FUNCTION maintain_releases_is_prerelease() + RETURNS TRIGGER AS $$ + BEGIN + NEW.is_prerelease := pep440_is_prerelease(NEW.version); + RETURN NEW; + END; + $$ + LANGUAGE plpgsql + """ + ) + + op.execute( + """ CREATE TRIGGER releases_update_is_prerelease + BEFORE INSERT OR UPDATE OF version ON releases + FOR EACH ROW + EXECUTE PROCEDURE maintain_releases_is_prerelease() + """ + ) + + +def downgrade(): + op.drop_column("releases", "is_prerelease") diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index 74bfe164d3ae..a4f7586afe49 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -47,8 +47,6 @@ from sqlalchemy.orm import validates from sqlalchemy.orm.collections import attribute_mapped_collection from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound -from sqlalchemy.sql import expression -from trove_classifiers import sorted_classifiers from warehouse import db from warehouse.accounts.models import User @@ -419,7 +417,7 @@ def __table_args__(cls): # noqa ) version = Column(Text, nullable=False) canonical_version = Column(Text, nullable=False) - is_prerelease = orm.column_property(func.pep440_is_prerelease(version)) + is_prerelease = Column(Boolean, nullable=False, server_default=sql.false()) author = Column(Text) author_email = Column(Text) maintainer = Column(Text) @@ -461,10 +459,7 @@ def __table_args__(cls): # noqa Classifier, backref="project_releases", secondary=lambda: release_classifiers, # type: ignore - order_by=expression.case( - {c: i for i, c in enumerate(sorted_classifiers)}, - value=Classifier.classifier, - ), + order_by=Classifier.ordering, passive_deletes=True, ) classifiers = association_proxy("_classifiers", "classifier") diff --git a/warehouse/routes.py b/warehouse/routes.py index 07cbf3c4f043..086e4545c1bb 100644 --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -442,16 +442,12 @@ def includeme(config): config.add_route( "legacy.api.json.project", "/pypi/{name}/json", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}", read_only=True, domain=warehouse, ) config.add_route( "legacy.api.json.project_slash", "/pypi/{name}/json/", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}", read_only=True, domain=warehouse, ) @@ -459,16 +455,12 @@ def includeme(config): config.add_route( "legacy.api.json.release", "/pypi/{name}/{version}/json", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}/{version}", read_only=True, domain=warehouse, ) config.add_route( "legacy.api.json.release_slash", "/pypi/{name}/{version}/json/", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}/{version}", read_only=True, domain=warehouse, )