From f17f97516991b43cafc320c612a353c591201bca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 17:14:22 +0000 Subject: [PATCH 001/114] build(deps-dev): bump setuptools from 67.5.1 to 67.6.0 (#2273) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9ab027a4a..22c94f001 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1140,14 +1140,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "67.5.1" +version = "67.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.5.1-py3-none-any.whl", hash = "sha256:1c39d42bda4cb89f7fdcad52b6762e3c309ec8f8715b27c684176b7d71283242"}, - {file = "setuptools-67.5.1.tar.gz", hash = "sha256:15136a251127da2d2e77ac7a1bc231eb504654f7e3346d93613a13f2e2787535"}, + {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, + {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, ] [package.extras] From 60d98dbbf781371efd74de7d525443a11daa622e Mon Sep 17 00:00:00 2001 From: Han Kruiger Date: Tue, 14 Mar 2023 00:40:48 +0100 Subject: [PATCH 002/114] fix: correctly handle resources with multiple comments in defined_namespace_creator (#2254) FIxed a bug in defined_namespace_creator so that when a resource in an ontology is commented on multiple times (with e.g. rdfs:comment), it creates only a single class variable in the resulting DefinedNamespace where the Python comment is the concatenation of the rdfs:comments. --- rdflib/tools/defined_namespace_creator.py | 3 +- test/data/contrived/README.md | 5 ++ test/data/contrived/multiple-comments.ttl | 17 +++++++ .../test_definednamespace_creator.py | 49 +++++++++++++++++++ 4 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 test/data/contrived/README.md create mode 100644 test/data/contrived/multiple-comments.ttl diff --git a/rdflib/tools/defined_namespace_creator.py b/rdflib/tools/defined_namespace_creator.py index 0c93ea756..1076cd6e0 100644 --- a/rdflib/tools/defined_namespace_creator.py +++ b/rdflib/tools/defined_namespace_creator.py @@ -77,7 +77,7 @@ def get_target_namespace_elements( ) -> Tuple[List[Tuple[str, str]], List[str]]: namespaces = {"dcterms": DCTERMS, "owl": OWL, "rdfs": RDFS, "skos": SKOS} q = """ - SELECT DISTINCT ?s ?def + SELECT ?s (GROUP_CONCAT(DISTINCT STR(?def)) AS ?defs) WHERE { # all things in the RDF data (anything RDF.type...) ?s a ?o . @@ -90,6 +90,7 @@ def get_target_namespace_elements( # only get results for the target namespace (supplied by user) FILTER STRSTARTS(STR(?s), "xxx") } + GROUP BY ?s """.replace( "xxx", target_namespace ) diff --git a/test/data/contrived/README.md b/test/data/contrived/README.md new file mode 100644 index 000000000..fd1e0e7d3 --- /dev/null +++ b/test/data/contrived/README.md @@ -0,0 +1,5 @@ +# Contrived Test Data + +This directory contains test data contrived for specific purposes. Files in this +directory should clearly indicate their purpose with a comment. + diff --git a/test/data/contrived/multiple-comments.ttl b/test/data/contrived/multiple-comments.ttl new file mode 100644 index 000000000..a2c073712 --- /dev/null +++ b/test/data/contrived/multiple-comments.ttl @@ -0,0 +1,17 @@ +@prefix rdfs: . +@prefix owl: . +@prefix ex: . + +# This file contains a RDF class with multiple rdfs:comment properties and is +# used to verify the RDFLib defined namespace creator. It is used in +# . + + + a owl:Ontology . + +ex:SomeClass a rdfs:Class, owl:Class; + rdfs:label "Some class"@en; + rdfs:comment "If one uses multiple comment properties, "@en; + rdfs:comment "then it should still only create a single class variable."@en; + rdfs:isDefinedBy ; +. diff --git a/test/test_namespace/test_definednamespace_creator.py b/test/test_namespace/test_definednamespace_creator.py index 65734b217..8866a05d9 100644 --- a/test/test_namespace/test_definednamespace_creator.py +++ b/test/test_namespace/test_definednamespace_creator.py @@ -114,3 +114,52 @@ def test_definednamespace_creator_bad_ns(): universal_newlines=True, ) assert completed.returncode == 1, "subprocess exited incorrectly (failure expected)" + + +def test_definednamespace_creator_multiple_comments(): + """ + Tests that only a single URIRef is declared, even when multiple + rdfs:comments are linked to the resource. + """ + + definednamespace_script = ( + Path(__file__).parent.parent.parent + / "rdflib" + / "tools" + / "defined_namespace_creator.py" + ) + multiple_comments_data_file = ( + Path(__file__).parent.parent / "data" / "contrived" / "multiple-comments.ttl" + ) + print("\n") + print(f"Using {definednamespace_script}...") + print(f"Testing {multiple_comments_data_file}...") + completed = subprocess.run( + [ + sys.executable, + str(definednamespace_script), + str(multiple_comments_data_file), + "http://example.org/multiline-string-example#", + "MULTILINESTRINGEXAMPLE", + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + ) + assert completed.returncode == 0, "subprocess exited incorrectly" + assert Path.is_file( + Path("_MULTILINESTRINGEXAMPLE.py") + ), "_MULTILINESTRINGEXAMPLE.py file not created" + + some_class_count = 0 + with open(Path("_MULTILINESTRINGEXAMPLE.py")) as f: + for line in f.readlines(): + if "SomeClass: URIRef" in line: + some_class_count += 1 + + assert ( + some_class_count == 1 + ), f"found {some_class_count} SomeClass definitions instead of 1." + + # cleanup + Path.unlink(Path("_MULTILINESTRINGEXAMPLE.py")) From 1c256765ac7d5e7327695a44269be09e51bd88b1 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Thu, 16 Mar 2023 20:56:48 +0100 Subject: [PATCH 003/114] docs: document avaiable security measures (#2270) docs: document available security measures Several security measures can be used to mitigate risk when processing potentially malicious input. This change adds documentation about available security measures and examples and tests that illustrate their usage. --- docs/apidocs/examples.rst | 16 +++ docs/index.rst | 13 +++ docs/security_considerations.rst | 113 +++++++++++++++++++ examples/secure_with_audit.py | 120 ++++++++++++++++++++ examples/secure_with_urlopen.py | 82 ++++++++++++++ rdflib/graph.py | 97 +++++++++++++--- rdflib/plugins/sparql/evaluate.py | 16 +++ rdflib/plugins/sparql/processor.py | 28 +++++ rdflib/plugins/sparql/update.py | 13 +++ test/conftest.py | 22 +++- test/test_misc/test_security.py | 172 +++++++++++++++++++++++++++++ test/utils/audit.py | 28 +++++ test/utils/urlopen.py | 14 +++ 13 files changed, 719 insertions(+), 15 deletions(-) create mode 100644 docs/security_considerations.rst create mode 100644 examples/secure_with_audit.py create mode 100644 examples/secure_with_urlopen.py create mode 100644 test/test_misc/test_security.py create mode 100644 test/utils/audit.py create mode 100644 test/utils/urlopen.py diff --git a/docs/apidocs/examples.rst b/docs/apidocs/examples.rst index 4e3908b56..43b92c137 100644 --- a/docs/apidocs/examples.rst +++ b/docs/apidocs/examples.rst @@ -115,3 +115,19 @@ These examples all live in ``./examples`` in the source-distribution of RDFLib. :undoc-members: :show-inheritance: +:mod:`~examples.secure_with_audit` Module +----------------------------------------- + +.. automodule:: examples.secure_with_audit + :members: + :undoc-members: + :show-inheritance: + + +:mod:`~examples.secure_with_urlopen` Module +------------------------------------------- + +.. automodule:: examples.secure_with_urlopen + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/index.rst b/docs/index.rst index 6a265dd23..e36962ea0 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -26,6 +26,18 @@ RDFLib is a pure Python package for working with `RDF `_ * both Queries and Updates are supported +.. caution:: + + RDFLib is designed to access arbitrary network and file resources, in some + cases these are directly requested resources, in other cases they are + indirectly referenced resources. + + If you are using RDFLib to process untrusted documents or queries you should + take measures to restrict file and network access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. Getting started --------------- @@ -56,6 +68,7 @@ If you are familiar with RDF and are looking for details on how RDFLib handles i merging upgrade5to6 upgrade4to5 + security_considerations Reference diff --git a/docs/security_considerations.rst b/docs/security_considerations.rst new file mode 100644 index 000000000..7e25695b7 --- /dev/null +++ b/docs/security_considerations.rst @@ -0,0 +1,113 @@ +.. _security_considerations: Security Considerations + +======================= +Security Considerations +======================= + +RDFLib is designed to access arbitrary network and file resources, in some cases +these are directly requested resources, in other cases they are indirectly +referenced resources. + +An example of where indirect resources are access is JSON-LD processing, where +network or file resources referenced by ``@context`` values will be loaded and +processed. + +RDFLib also supports SPARQL, which has federated query capabilities that allow +queries to query arbitrary remote endpoints. + +If you are using RDFLib to process untrusted documents or queries you should +take measures to restrict file and network access. + +Some measures that can be taken to restrict file and network access are: + +* `Operating System Security Measures`_. +* `Python Runtime Audit Hooks`_. +* `Custom URL Openers`_. + +Of these, operating system security measures are recommended. The other +measures work, but they are not as effective as operating system security +measures, and even if they are used they should be used in conjunction with +operating system security measures. + +Operating System Security Measures +================================== + +Most operating systems provide functionality that can be used to restrict +network and file access of a process. + +Some examples of these include: + +* `Open Container Initiative (OCI) Containers + `_ (aka Docker containers). + + Most OCI runtimes provide mechanisms to restrict network and file access of + containers. For example, using Docker, you can limit your container to only + being access files explicitly mapped into the container and only access the + network through a firewall. For more information refer to the + documentation of the tool you use to manage your OCI containers: + + * `Kubernetes `_ + * `Docker `_ + * `Podman `_ + +* `firejail `_ can be used to + sandbox a process on Linux and restrict its network and file access. + +* File and network access restrictions. + + Most operating systems provide a way to restrict operating system users to + only being able to access files and network resources that are explicitly + allowed. Applications that process untrusted input could be run as a user with + these restrictions in place. + +Many other measures are available, however, listing them outside the scope +of this document. + +Of the listed measures OCI containers are recommended. In most cases, OCI +containers are constrained by default and can't access the loopback interface +and can only access files that are explicitly mapped into the container. + +Python Runtime Audit Hooks +========================== + +From Python 3.8 onwards, Python provides a mechanism to install runtime audit +hooks that can be used to limit access to files and network resources. + +The runtime audit hook system is described in more detail in `PEP 578 – Python +Runtime Audit Hooks `_. + +Runtime audit hooks can be installed using the `sys.addaudithook +`_ function, and +will then get called when audit events occur. The audit events raised by the +Python runtime and standard library are described in Python's `audit events +table `_. + +RDFLib uses `urllib.request.urlopen` for HTTP, HTTPS and other network access, +and this function raises a ``urllib.Request`` audit event. For file access, +RDFLib uses `open`, which raises an ``open`` audit event. + +Users of RDFLib can install audit hooks that react to these audit events and +raises an exception when an attempt is made to access files or network resources +that are not explicitly allowed. + +RDFLib's test suite includes tests which verify that audit hooks can block +access to network and file resources. + +RDFLib also includes an example that shows how runtime audit hooks can be +used to restrict network and file access in :mod:`~examples.secure_with_audit`. + +Custom URL Openers +================== + +RDFLib uses the `urllib.request.urlopen` for HTTP, HTTPS and other network +access. This function will use a `urllib.request.OpenerDirector` installed with +`urllib.request.install_opener` to open the URLs. + +Users of RDFLib can install a custom URL opener that raise an exception when an +attempt is made to access network resources that are not explicitly allowed. + +RDFLib's test suite includes tests which verify that custom URL openers can be +used to block access to network resources. + +RDFLib also includes an example that shows how a custom opener can be used to +restrict network access in :mod:`~examples.secure_with_urlopen`. diff --git a/examples/secure_with_audit.py b/examples/secure_with_audit.py new file mode 100644 index 000000000..434be5a49 --- /dev/null +++ b/examples/secure_with_audit.py @@ -0,0 +1,120 @@ +""" +This example demonstrates how to use `Python audit hooks +`_ to block access +to files and URLs. + +It installs a audit hook with `sys.addaudithook `_ that blocks access to files and +URLs that end with ``blocked.jsonld``. + +The code in the example then verifies that the audit hook is blocking access to +URLs and files as expected. +""" + +import logging +import os +import sys +from typing import Any, Optional, Tuple + +from rdflib import Graph + + +def audit_hook(name: str, args: Tuple[Any, ...]) -> None: + """ + An audit hook that blocks access when an attempt is made to open a + file or URL that ends with ``blocked.jsonld``. + + Details of the audit events can be seen in the `audit events + table `_. + + :param name: The name of the audit event. + :param args: The arguments of the audit event. + :return: `None` if the audit hook does not block access. + :raises PermissionError: If the file or URL being accessed ends with ``blocked.jsonld``. + """ + if name == "urllib.Request" and args[0].endswith("blocked.jsonld"): + raise PermissionError("Permission denied for URL") + if name == "open" and args[0].endswith("blocked.jsonld"): + raise PermissionError("Permission denied for file") + return None + + +def main() -> None: + """ + The main code of the example. + + The important steps are: + + * Install a custom audit hook that blocks some URLs and files. + * Attempt to parse a JSON-LD document that will result in a blocked URL being accessed. + * Verify that the audit hook blocked access to the URL. + * Attempt to parse a JSON-LD document that will result in a blocked file being accessed. + * Verify that the audit hook blocked access to the file. + """ + + logging.basicConfig( + level=os.environ.get("PYTHON_LOGGING_LEVEL", logging.INFO), + stream=sys.stderr, + datefmt="%Y-%m-%dT%H:%M:%S", + format=( + "%(asctime)s.%(msecs)03d %(process)d %(thread)d %(levelno)03d:%(levelname)-8s " + "%(name)-12s %(module)s:%(lineno)s:%(funcName)s %(message)s" + ), + ) + + if sys.version_info < (3, 8): + logging.warn("This example requires Python 3.8 or higher") + return None + + # Install the audit hook + # + # note on type error: This is needed because we are running mypy with python + # 3.7 mode, so mypy thinks the previous condition will always be true. + sys.addaudithook(audit_hook) # type: ignore[unreachable] + + graph = Graph() + + # Attempt to parse a JSON-LD document that will result in the blocked URL + # being accessed. + error: Optional[PermissionError] = None + try: + graph.parse( + data=r"""{ + "@context": "http://example.org/blocked.jsonld", + "@id": "example:subject", + "example:predicate": { "@id": "example:object" } + }""", + format="json-ld", + ) + except PermissionError as caught: + logging.info("Permission denied: %s", caught) + error = caught + + # `Graph.parse` would have resulted in a `PermissionError` being raised from + # the audit hook. + assert isinstance(error, PermissionError) + assert error.args[0] == "Permission denied for URL" + + # Attempt to parse a JSON-LD document that will result in the blocked file + # being accessed. + error = None + try: + graph.parse( + data=r"""{ + "@context": "file:///srv/blocked.jsonld", + "@id": "example:subject", + "example:predicate": { "@id": "example:object" } + }""", + format="json-ld", + ) + except PermissionError as caught: + logging.info("Permission denied: %s", caught) + error = caught + + # `Graph.parse` would have resulted in a `PermissionError` being raised from + # the audit hook. + assert isinstance(error, PermissionError) + assert error.args[0] == "Permission denied for file" + + +if __name__ == "__main__": + main() diff --git a/examples/secure_with_urlopen.py b/examples/secure_with_urlopen.py new file mode 100644 index 000000000..fd6576b1e --- /dev/null +++ b/examples/secure_with_urlopen.py @@ -0,0 +1,82 @@ +""" +This example demonstrates how to use a custom global URL opener installed with `urllib.request.install_opener` to block access to URLs. +""" +import http.client +import logging +import os +import sys +from typing import Optional +from urllib.request import HTTPHandler, OpenerDirector, Request, install_opener + +from rdflib import Graph + + +class SecuredHTTPHandler(HTTPHandler): + """ + A HTTP handler that blocks access to URLs that end with "blocked.jsonld". + """ + + def http_open(self, req: Request) -> http.client.HTTPResponse: + """ + Block access to URLs that end with "blocked.jsonld". + + :param req: The request to open. + :return: The response. + :raises PermissionError: If the URL ends with "blocked.jsonld". + """ + if req.get_full_url().endswith("blocked.jsonld"): + raise PermissionError("Permission denied for URL") + return super().http_open(req) + + +def main() -> None: + """ + The main code of the example. + + The important steps are: + + * Install a custom global URL opener that blocks some URLs. + * Attempt to parse a JSON-LD document that will result in a blocked URL being accessed. + * Verify that the URL opener blocked access to the URL. + """ + + logging.basicConfig( + level=os.environ.get("PYTHON_LOGGING_LEVEL", logging.INFO), + stream=sys.stderr, + datefmt="%Y-%m-%dT%H:%M:%S", + format=( + "%(asctime)s.%(msecs)03d %(process)d %(thread)d %(levelno)03d:%(levelname)-8s " + "%(name)-12s %(module)s:%(lineno)s:%(funcName)s %(message)s" + ), + ) + + opener = OpenerDirector() + opener.add_handler(SecuredHTTPHandler()) + install_opener(opener) + + graph = Graph() + + # Attempt to parse a JSON-LD document that will result in the blocked URL + # being accessed. + error: Optional[PermissionError] = None + try: + graph.parse( + data=r"""{ + "@context": "http://example.org/blocked.jsonld", + "@id": "example:subject", + "example:predicate": { "@id": "example:object" } + }""", + format="json-ld", + ) + except PermissionError as caught: + logging.info("Permission denied: %s", caught) + error = caught + + # `Graph.parse` would have resulted in a `PermissionError` being raised from + # the url opener. + assert isinstance(error, PermissionError) + assert error.args[0] == "Permission denied for URL" + + +if __name__ == "__main__": + main() diff --git a/rdflib/graph.py b/rdflib/graph.py index 717788fda..7d32ab38a 100644 --- a/rdflib/graph.py +++ b/rdflib/graph.py @@ -1387,22 +1387,34 @@ def parse( """ Parse an RDF source adding the resulting triples to the Graph. - The source is specified using one of source, location, file or - data. + The source is specified using one of source, location, file or data. + + .. caution:: + + This method can access directly or indirectly requested network or + file resources, for example, when parsing JSON-LD documents with + ``@context`` directives that point to a network location. + + When processing untrusted or potentially malicious documents, + measures should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. :Parameters: - ``source``: An InputSource, file-like object, or string. In the case of a string the string is the location of the source. - - ``location``: A string indicating the relative or absolute URL of the - source. Graph's absolutize method is used if a relative location + - ``location``: A string indicating the relative or absolute URL of + the source. Graph's absolutize method is used if a relative location is specified. - ``file``: A file-like object. - ``data``: A string containing the data to be parsed. - - ``format``: Used if format can not be determined from source, e.g. file - extension or Media Type. Defaults to text/turtle. Format support can - be extended with plugins, but "xml", "n3" (use for turtle), "nt" & - "trix" are built in. + - ``format``: Used if format can not be determined from source, e.g. + file extension or Media Type. Defaults to text/turtle. Format + support can be extended with plugins, but "xml", "n3" (use for + turtle), "nt" & "trix" are built in. - ``publicID``: the logical URI to use as the document base. If None specified the document location is used (at least in the case where there is a document location). @@ -1507,12 +1519,25 @@ def query( """ Query this graph. - A type of 'prepared queries' can be realised by providing - initial variable bindings with initBindings + A type of 'prepared queries' can be realised by providing initial + variable bindings with initBindings + + Initial namespaces are used to resolve prefixes used in the query, if + none are given, the namespaces from the graph's namespace manager are + used. + + .. caution:: + + This method can access indirectly requested network endpoints, for + example, query processing will attempt to access network endpoints + specified in ``SERVICE`` directives. - Initial namespaces are used to resolve prefixes used in the query, - if none are given, the namespaces from the graph's namespace manager - are used. + When processing untrusted or potentially malicious queries, measures + should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. :returntype: :class:`~rdflib.query.Result` @@ -1550,7 +1575,22 @@ def update( use_store_provided: bool = True, **kwargs: Any, ) -> None: - """Update this graph with the given update query.""" + """ + Update this graph with the given update query. + + .. caution:: + + This method can access indirectly requested network endpoints, for + example, query processing will attempt to access network endpoints + specified in ``SERVICE`` directives. + + When processing untrusted or potentially malicious queries, measures + should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. + """ initBindings = initBindings or {} # noqa: N806 initNs = initNs or dict(self.namespaces()) # noqa: N806 @@ -2171,6 +2211,19 @@ def parse( The graph into which the source was parsed. In the case of n3 it returns the root context. + + .. caution:: + + This method can access directly or indirectly requested network or + file resources, for example, when parsing JSON-LD documents with + ``@context`` directives that point to a network location. + + When processing untrusted or potentially malicious documents, + measures should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. """ source = create_input_source( @@ -2401,6 +2454,22 @@ def parse( data: Optional[Union[str, bytes]] = None, **args: Any, ) -> "Graph": + """ + + .. caution:: + + This method can access directly or indirectly requested network or + file resources, for example, when parsing JSON-LD documents with + ``@context`` directives that point to a network location. + + When processing untrusted or potentially malicious documents, + measures should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. + """ + c = ConjunctiveGraph.parse( self, source, publicID, format, location, file, data, **args ) diff --git a/rdflib/plugins/sparql/evaluate.py b/rdflib/plugins/sparql/evaluate.py index 252c73ba4..4f8d687b4 100644 --- a/rdflib/plugins/sparql/evaluate.py +++ b/rdflib/plugins/sparql/evaluate.py @@ -645,6 +645,22 @@ def evalQuery( initBindings: Mapping[str, Identifier], base: Optional[str] = None, ) -> Mapping[Any, Any]: + """ + + .. caution:: + + This method can access indirectly requested network endpoints, for + example, query processing will attempt to access network endpoints + specified in ``SERVICE`` directives. + + When processing untrusted or potentially malicious queries, measures + should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. + """ + initBindings = dict((Variable(k), v) for k, v in initBindings.items()) ctx = QueryContext(graph, initBindings=initBindings) diff --git a/rdflib/plugins/sparql/processor.py b/rdflib/plugins/sparql/processor.py index e4d83494e..c2fb7e54b 100644 --- a/rdflib/plugins/sparql/processor.py +++ b/rdflib/plugins/sparql/processor.py @@ -76,6 +76,21 @@ def update( initBindings: Mapping[str, Identifier] = {}, initNs: Mapping[str, Any] = {}, ) -> None: + """ + .. caution:: + + This method can access indirectly requested network endpoints, for + example, query processing will attempt to access network endpoints + specified in ``SERVICE`` directives. + + When processing untrusted or potentially malicious queries, measures + should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. + """ + if isinstance(strOrQuery, str): strOrQuery = translateUpdate(parseUpdate(strOrQuery), initNs=initNs) @@ -102,6 +117,19 @@ def query( # type: ignore[override] Evaluate a query with the given initial bindings, and initial namespaces. The given base is used to resolve relative URIs in the query and will be overridden by any BASE given in the query. + + .. caution:: + + This method can access indirectly requested network endpoints, for + example, query processing will attempt to access network endpoints + specified in ``SERVICE`` directives. + + When processing untrusted or potentially malicious queries, measures + should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. """ if not isinstance(strOrQuery, Query): diff --git a/rdflib/plugins/sparql/update.py b/rdflib/plugins/sparql/update.py index 9be375bd2..f27ee9b36 100644 --- a/rdflib/plugins/sparql/update.py +++ b/rdflib/plugins/sparql/update.py @@ -299,6 +299,19 @@ def evalUpdate( This will return None on success and raise Exceptions on error + .. caution:: + + This method can access indirectly requested network endpoints, for + example, query processing will attempt to access network endpoints + specified in ``SERVICE`` directives. + + When processing untrusted or potentially malicious queries, measures + should be taken to restrict network and file access. + + For information on available security measures, see the RDFLib + :doc:`Security Considerations ` + documentation. + """ for u in update.algebra: diff --git a/test/conftest.py b/test/conftest.py index daee3f288..98fe47385 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,10 +1,14 @@ +import sys +from contextlib import ExitStack + import pytest pytest.register_assert_rewrite("test.utils") +from test.utils.audit import AuditHookDispatcher # noqa: E402 from test.utils.http import ctx_http_server # noqa: E402 from test.utils.httpfileserver import HTTPFileServer # noqa: E402 -from typing import Generator # noqa: E402 +from typing import Generator, Optional # noqa: E402 from rdflib import Graph @@ -47,3 +51,19 @@ def function_httpmock( ) -> Generator[ServedBaseHTTPServerMock, None, None]: _session_function_httpmock.reset() yield _session_function_httpmock + + +@pytest.fixture(scope="session", autouse=True) +def audit_hook_dispatcher() -> Generator[Optional[AuditHookDispatcher], None, None]: + if sys.version_info >= (3, 8): + dispatcher = AuditHookDispatcher() + sys.addaudithook(dispatcher.audit) + yield dispatcher + else: + yield None + + +@pytest.fixture(scope="function") +def exit_stack() -> Generator[ExitStack, None, None]: + with ExitStack() as stack: + yield stack diff --git a/test/test_misc/test_security.py b/test/test_misc/test_security.py new file mode 100644 index 000000000..b4c8fc229 --- /dev/null +++ b/test/test_misc/test_security.py @@ -0,0 +1,172 @@ +import enum +import http.client +import itertools +import logging +from contextlib import ExitStack +from pathlib import Path +from test.utils.audit import AuditHookDispatcher +from test.utils.httpfileserver import HTTPFileServer, ProtoFileResource +from test.utils.urlopen import context_urlopener +from textwrap import dedent +from typing import Any, Iterable, Optional, Tuple +from urllib.request import HTTPHandler, OpenerDirector, Request + +import pytest +from _pytest.mark.structures import ParameterSet + +from rdflib import Graph +from rdflib.namespace import Namespace + +from ..utils import GraphHelper +from ..utils.path import ctx_chdir + +EGNS = Namespace("http://example.org/") + +JSONLD_CONTEXT = """ +{ + "@context": { + "ex": "http://example.org/" + } +} +""" + +EXPECTED_GRAPH = Graph().add((EGNS.subject, EGNS.predicate, EGNS.object)) + + +def test_default(tmp_path: Path) -> None: + context_file = tmp_path / "context.jsonld" + context_file.write_text(dedent(JSONLD_CONTEXT)) + + data = f""" + {{ + "@context": "{context_file.as_uri()}", + "@id": "ex:subject", + "ex:predicate": {{ "@id": "ex:object" }} + }} + """ + + graph = Graph() + graph.parse(format="json-ld", data=data) + logging.debug("graph = %s", GraphHelper.triple_set(graph)) + GraphHelper.assert_sets_equals(EXPECTED_GRAPH, graph) + + +class Defence(enum.Enum): + NONE = enum.auto() + AUDIT_HOOK = enum.auto() + URL_OPENER = enum.auto() + + +class URIKind(enum.Enum): + FILE = enum.auto() + HTTP = enum.auto() + RELATIVE = enum.auto() + + +def generate_make_block_file_cases() -> Iterable[ParameterSet]: + for defence, uri_kind in itertools.product(Defence, URIKind): + if defence == Defence.URL_OPENER and uri_kind != URIKind.HTTP: + # URL opener only works for not file URIs + continue + yield pytest.param(defence, uri_kind) + + +@pytest.mark.parametrize(["defence", "uri_kind"], generate_make_block_file_cases()) +def test_block_file( + tmp_path: Path, + audit_hook_dispatcher: Optional[AuditHookDispatcher], + http_file_server: HTTPFileServer, + exit_stack: ExitStack, + defence: Defence, + uri_kind: URIKind, +) -> None: + if audit_hook_dispatcher is None: + pytest.skip( + "audit hook dispatcher not available, likely because of Python version" + ) + + context_file = tmp_path / "context.jsonld" + context_file.write_text(dedent(JSONLD_CONTEXT)) + context_file_served = http_file_server.add_file_with_caching( + ProtoFileResource((), context_file) + ) + + context_uri: str + if uri_kind == URIKind.FILE: + context_uri = context_file.as_uri() + elif uri_kind == URIKind.HTTP: + context_uri = context_file_served.request_url + elif uri_kind == URIKind.RELATIVE: + context_uri = context_file.name + exit_stack.enter_context(ctx_chdir(tmp_path)) + else: + raise ValueError(f"unknown URI kind: {uri_kind}") + + data = f""" + {{ + "@context": "{context_uri}", + "@id": "ex:subject", + "ex:predicate": {{ "@id": "ex:object" }} + }} + """ + + data_file = tmp_path / "data.jsonld" + data_file.write_text(dedent(data)) + + if defence == Defence.AUDIT_HOOK and uri_kind == URIKind.FILE: + + def audit_hook(name: str, args: Tuple[Any, ...]) -> None: + logging.info("block_file_access: name = %s, args = %s", name, args) + if name == "open" and args[0] == f"{context_file.absolute()}": + raise PermissionError("access blocked") + + exit_stack.enter_context(audit_hook_dispatcher.ctx_hook("open", audit_hook)) + + elif defence == Defence.AUDIT_HOOK and uri_kind == URIKind.RELATIVE: + + def audit_hook(name: str, args: Tuple[Any, ...]) -> None: + logging.info("block_file_access: name = %s, args = %s", name, args) + if name == "open" and args[0] == f"{Path.cwd() / context_file.name}": + raise PermissionError("access blocked") + + exit_stack.enter_context(audit_hook_dispatcher.ctx_hook("open", audit_hook)) + + elif defence == Defence.AUDIT_HOOK and uri_kind == URIKind.HTTP: + + def audit_hook(name: str, args: Tuple[Any, ...]) -> None: + logging.info("block_file_access: name = %s, args = %s", name, args) + if name == "urllib.Request" and args[0] == context_file_served.request_url: + raise PermissionError("access blocked") + + exit_stack.enter_context( + audit_hook_dispatcher.ctx_hook("urllib.Request", audit_hook) + ) + + elif defence == Defence.URL_OPENER and uri_kind == URIKind.HTTP: + opener = OpenerDirector() + + class SecuredHTTPHandler(HTTPHandler): + def http_open(self, req: Request) -> http.client.HTTPResponse: + if req.get_full_url() == context_file_served.request_url: + raise PermissionError("access blocked") + return super().http_open(req) + + opener.add_handler(SecuredHTTPHandler()) + + exit_stack.enter_context(context_urlopener(opener)) + + elif defence == Defence.NONE: + pass + else: + raise ValueError( + f"unsupported defence {defence} and uri_kind {uri_kind} combination" + ) + + graph = Graph() + if defence != Defence.NONE: + with pytest.raises(PermissionError): + graph.parse(format="json-ld", data=data) + assert len(graph) == 0 + else: + graph.parse(format="json-ld", data=data) + GraphHelper.assert_sets_equals(EXPECTED_GRAPH, graph) diff --git a/test/utils/audit.py b/test/utils/audit.py new file mode 100644 index 000000000..00045275a --- /dev/null +++ b/test/utils/audit.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from collections import defaultdict +from contextlib import contextmanager +from dataclasses import dataclass, field +from typing import Any, Callable, DefaultDict, Generator, List, Tuple + +AuditHookType = Callable[[str, Tuple[Any, ...]], Any] + + +@dataclass +class AuditHookDispatcher: + handlers: DefaultDict[str, List[AuditHookType]] = field( + default_factory=lambda: defaultdict(list) + ) + + def audit(self, name: str, args: Tuple[Any, ...]) -> Any: + handlers = self.handlers[name] + for handler in handlers: + handler(name, args) + + @contextmanager + def ctx_hook(self, name: str, hook: AuditHookType) -> Generator[None, None, None]: + self.handlers[name].append(hook) + try: + yield None + finally: + self.handlers[name].remove(hook) diff --git a/test/utils/urlopen.py b/test/utils/urlopen.py new file mode 100644 index 000000000..fb6597077 --- /dev/null +++ b/test/utils/urlopen.py @@ -0,0 +1,14 @@ +import urllib.request +from contextlib import contextmanager +from typing import Generator, Optional +from urllib.request import OpenerDirector, install_opener + + +@contextmanager +def context_urlopener(opener: OpenerDirector) -> Generator[OpenerDirector, None, None]: + old_opener: Optional[OpenerDirector] = urllib.request._opener # type: ignore[attr-defined] + try: + install_opener(opener) + yield opener + finally: + install_opener(old_opener) # type: ignore[arg-type] From 309848bda0098e8d5dc3dcec179d9ca0e2196455 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Thu, 16 Mar 2023 23:26:37 +0100 Subject: [PATCH 004/114] chore: prepare for release 6.3.0 (#2276) Update the changelog and other parts to prepare for the 6.3.0 release. This will be tagged to 6.3.0. --- CHANGELOG.md | 542 ++++++++++++++++++++++++--------------------- LICENSE | 2 +- README.md | 4 +- docs/conf.py | 2 +- pyproject.toml | 2 +- rdflib/__init__.py | 2 +- 6 files changed, 292 insertions(+), 262 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ec476ea39..798dabfd3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,65 +1,27 @@ -# 2022-10-16 RELEASE MAJOR.MINOR.PATCH +# 2023-03-16 RELEASE 6.3.0 -## User facing changes - -This section lists changes that have a potential impact on users of RDFLib, -changes with no user impact are not included in this section. - - +## Important Information - +- RDFLib will drop support for Python 3.7 when it becomes EOL on 2023-06-27, + this will not be considered a breaking change, and RDFLib's major version + number will not be changed solely on the basis of Python 3.7 support being + dropped. +## User facing changes - - - - - +This section lists changes that have a potential impact on users of RDFLib, +changes with no user impact are not included in this section. - Add chunk serializer that facilitates the encoding of a graph into multiple N-Triples encoded chunks. [PR #1968](https://github.com/RDFLib/rdflib/pull/1968). - - - - - - - - - - - - - Fixes passing `NamespaceManager` in `ConjunctiveGraph`'s method `get_context()`. - The `get_context()` method will now pass the `NamespaceManager` of `ConjunctiveGraph` to the `namespace_manager` attribute of the newly created context graph, instead of the `ConjunctiveGraph` object itself. This cleans up an old FIXME commment. + The `get_context()` method will now pass the `NamespaceManager` of `ConjunctiveGraph` to the `namespace_manager` attribute of the newly created context graph, instead of the `ConjunctiveGraph` object itself. This cleans up an old `FIXME` comment. [PR #2073](https://github.com/RDFLib/rdflib/pull/2073). - - - - - - - - - - - - - InfixOWL fixes and cleanup. Closed [issue #2030](https://github.com/RDFLib/rdflib/issues/2030). [PR #2024](https://github.com/RDFLib/rdflib/pull/2024), @@ -79,67 +41,18 @@ and will be removed for release. major version. - Eliminated the use of mutable data structures in some argument defaults. - - - - - - - - - - - - - Fixed some cross-referencing issues in RDFLib documentation. Closed [issue #1878](https://github.com/RDFLib/rdflib/issues/1878). [PR #2036](https://github.com/RDFLib/rdflib/pull/2036). - - - - - - - - - - - - - Fixed import of `xml.sax.handler` in `rdflib.plugins.parsers.trix` so that it no longer tries to import it from `xml.sax.saxutils`. [PR #2041](https://github.com/RDFLib/rdflib/pull/2041). - - - - - - - - - - - - - Removed a pre python 3.5 regex related workaround in the REPLACE SPARQL function. [PR #2042](https://github.com/RDFLib/rdflib/pull/2042). - - - - - - - - - - - - - - Fixed some issues with SPARQL XML result parsing that caused problems with [`lxml`](https://lxml.de/). Closed [issue #2035](https://github.com/RDFLib/rdflib/issues/2035), [issue #1847](https://github.com/RDFLib/rdflib/issues/1847). @@ -152,173 +65,70 @@ and will be removed for release. - Elements inside `` that are not `` are now ignored. - Also added type hints to `rdflib.plugins.sparql.results.xmlresults`. - - - - - - - - - - - - - -- Added type hints. - - `rdflib.store` and builtin stores have mostly complete type hints. - [PR #2057](https://github.com/RDFLib/rdflib/pull/2057). - - `rdflib.graph` have mostly complete type hints. +- Added type hints to the following modules: + - `rdflib.store`. + [PR #2057](https://github.com/RDFLib/rdflib/pull/2057). + - `rdflib.graph`. [PR #2080](https://github.com/RDFLib/rdflib/pull/2080). - - `rdflib.plugins.sparql.algebra` and `rdflib.plugins.sparql.operators` have - mostly complete type hints. - [PR #2094](https://github.com/RDFLib/rdflib/pull/2094). - - `rdflib.query` and `rdflib.plugins.sparql.results.*` have mostly complete - type hints. - [PR #2097](https://github.com/RDFLib/rdflib/pull/2097). - - - - - - - - - - - - - - + - `rdflib.plugins.sparql.*`. + [PR #2094](https://github.com/RDFLib/rdflib/pull/2094), + [PR #2133](https://github.com/RDFLib/rdflib/pull/2133), + [PR #2265](https://github.com/RDFLib/rdflib/pull/2265), + [PR #2097](https://github.com/RDFLib/rdflib/pull/2097), + [PR #2268](https://github.com/RDFLib/rdflib/pull/2268). + - `rdflib.query`. + [PR #2265](https://github.com/RDFLib/rdflib/pull/2265). + - `rdflib.parser` and `rdflib.plugins.parsers.*`. + [PR #2232](https://github.com/RDFLib/rdflib/pull/2232). + - `rdflib.exceptions`. + [PR #2232](https://github.com/RDFLib/rdflib/pull/2232) + - `rdflib.shared.jsonld.*`. + [PR #2232](https://github.com/RDFLib/rdflib/pull/2232). + - `rdflib.collection`. + [PR #2263](https://github.com/RDFLib/rdflib/pull/2263). + - `rdflib.util`. + [PR #2262](https://github.com/RDFLib/rdflib/pull/2262). + - `rdflib.path`. + [PR #2261](https://github.com/RDFLib/rdflib/pull/2261). + - Removed pre python 3.7 compatibility code. [PR #2066](https://github.com/RDFLib/rdflib/pull/2066). - Removed fallback in case the `shutil` module does not have the `move` function. - - - - - - - - - - - - - - Improve file-URI and path handling in `Graph.serialize` and `Result.serialize` to address problems with windows path handling in `Result.serialize` and to make the behavior between `Graph.serialize` and `Result.serialie` more consistent. Closed [issue #2067](https://github.com/RDFLib/rdflib/issues/2067). - [PR #2068](https://github.com/RDFLib/rdflib/pull/2068). + [PR #2065](https://github.com/RDFLib/rdflib/pull/2065). - String values for the `destination` argument will now only be treated as file URIs if `urllib.parse.urlparse` returns their schema as `file`. - Simplified file writing to avoid a temporary file. - - - - - - - - - - - - - - Narrow the type of context-identifiers/graph-names from `rdflib.term.Node` to `rdflib.term.IdentifiedNode` as no supported abstract syntax allows for other types of context-identifiers. [PR #2069](https://github.com/RDFLib/rdflib/pull/2069). - - - - - - - - - - - - - - Always parse HexTuple files as utf-8. [PR #2070](https://github.com/RDFLib/rdflib/pull/2070). - - - - - - - - - - - - - - Fixed handling of `Literal` `datatype` to correctly differentiate between blank string values and undefined values, also changed the datatype of `rdflib.term.Literal.datatype` from `Optional[str]` to `Optional[URIRef]` now that all non-`URIRef` `str` values will be converted to `URIRef`. [PR #2076](https://github.com/RDFLib/rdflib/pull/2076). - - - - - - - - - - - - - - Fixed the generation of VALUES block for federated queries. The values block was including non-variable values like BNodes which resulted - in invalid queries. - [PR #2079](https://github.com/RDFLib/rdflib/pull/2079). - - - - - - - - - - - - - + in invalid queries. Closed [issue #2079](https://github.com/RDFLib/rdflib/issues/2079). + [PR #2084](https://github.com/RDFLib/rdflib/pull/2084). - Only register the `rdflib.plugins.stores.berkeleydb.BerkeleyDB` as a store plugin if the `berkeleydb` module is present. Closed [issue #1816](https://github.com/RDFLib/rdflib/issues/1816). [PR #2096](https://github.com/RDFLib/rdflib/pull/2096). - - - - - - - - - - - - - - Fixed serialization of BNodes in TriG. The TriG serializer was only considering BNode references inside a single graph and not counting the BNodes subjects as references when considering if a @@ -327,38 +137,258 @@ and will be removed for release. BNodes in other graphs. [PR #2085](https://github.com/RDFLib/rdflib/pull/2085). - - - - - - - - - - - - -- PLACEHOLDER. - Description of changes. - Closed [issue #....](https://github.com/RDFLib/rdflib/issues/). - [PR #....](https://ichard26.github.io/next-pr-number/?owner=RDFLib&name=rdflib). - - - - - - +- Deprecated `rdflib.path.evalPath` in favor of `rdflib.path.eval_path` which is + PEP-8 compliant. [PR #2046](https://github.com/RDFLib/rdflib/pull/2046) -## PRs merged since last release +- Added `charset=UTF-8` to the `Content-Type` header sent when doing an update + with `SPARQLConnector`. Closed [issue + #2095](https://github.com/RDFLib/rdflib/issues/2095). [PR + #2112](https://github.com/RDFLib/rdflib/pull/2112). + +- Removed the `rdflib.plugins.sparql.parserutils.plist` class as it served no + discernible purpose. [PR #2143](https://github.com/RDFLib/rdflib/pull/2143) + +- Changed the TriG serializer to not generate prefixes for empty graph IDs. + Closed [issue #2154](https://github.com/RDFLib/rdflib/issues/2154). + [PR #2160](https://github.com/RDFLib/rdflib/pull/2160). + +- Fixed handling of relative context files in the JSON-LD parser. + Closed [issue #2164](https://github.com/RDFLib/rdflib/issues/2164). + [PR #2165](https://github.com/RDFLib/rdflib/pull/2165). - # 2022-07-16 RELEASE 6.2.0 diff --git a/LICENSE b/LICENSE index 06c82bb32..26039b46f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2002-2022, RDFLib Team +Copyright (c) 2002-2023, RDFLib Team All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md index 605998318..49c63ac8a 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ Help with maintenance of all of the RDFLib family of packages is always welcome ## Versions & Releases -* `6.3.0a0` current `main` branch +* `6.4.0a0` current `main` branch * `6.x.y` current release and support Python 3.7+ only. Many improvements over 5.0.0 * see [Releases](https://github.com/RDFLib/rdflib/releases) * `5.x.y` supports Python 2.7 and 3.4+ and is [mostly backwards compatible with 4.2.2](https://rdflib.readthedocs.io/en/stable/upgrade4to5.html). @@ -61,7 +61,7 @@ The stable release of RDFLib may be installed with Python's package management t Alternatively manually download the package from the Python Package Index (PyPI) at https://pypi.python.org/pypi/rdflib -The current version of RDFLib is 6.2.0, see the ``CHANGELOG.md`` file for what's new in this release. +The current version of RDFLib is 6.3.0, see the ``CHANGELOG.md`` file for what's new in this release. ### Installation of the current main branch (for developers) diff --git a/docs/conf.py b/docs/conf.py index 3068cc123..40ef87a1e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -79,7 +79,7 @@ # General information about the project. project = "rdflib" -copyright = "2009 - 2022, RDFLib Team" +copyright = "2009 - 2023, RDFLib Team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/pyproject.toml b/pyproject.toml index 24be1293f..11f76e5eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "rdflib" -version = "6.3.0a0" +version = "6.3.0" description = """RDFLib is a Python library for working with RDF, \ a simple yet powerful language for representing information.""" authors = ["Daniel 'eikeon' Krech "] diff --git a/rdflib/__init__.py b/rdflib/__init__.py index 45648456f..ebf1344c8 100644 --- a/rdflib/__init__.py +++ b/rdflib/__init__.py @@ -56,7 +56,7 @@ __docformat__ = "restructuredtext en" __version__: str = _DISTRIBUTION_METADATA["Version"] -__date__ = "2022-12-20" +__date__ = "2023-03-16" __all__ = [ "URIRef", From 7beae75244dadd7e4d47af41c8bc684e2897a010 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 17 Mar 2023 00:12:10 +0100 Subject: [PATCH 005/114] chore: post 6.3.0 release tasks (#2279) Bump version of RDFLib and update docker `requirements.{in,txt}`. --- docker/latest/requirements.in | 2 +- docker/latest/requirements.txt | 11 ++++------- pyproject.toml | 2 +- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/docker/latest/requirements.in b/docker/latest/requirements.in index 5bf1d95c2..a0419fb13 100644 --- a/docker/latest/requirements.in +++ b/docker/latest/requirements.in @@ -1,4 +1,4 @@ # This file is used for building a docker image of hte latest rdflib release. It # will be updated by dependabot when new releases are made. -rdflib==6.2.0 +rdflib==6.3.0 html5lib diff --git a/docker/latest/requirements.txt b/docker/latest/requirements.txt index b038fa6a4..7f7563383 100644 --- a/docker/latest/requirements.txt +++ b/docker/latest/requirements.txt @@ -2,22 +2,19 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --resolver=backtracking ./docker/latest/requirements.in +# pip-compile --resolver=backtracking docker/latest/requirements.in # html5lib==1.1 - # via -r ./docker/latest/requirements.in + # via -r docker/latest/requirements.in isodate==0.6.1 # via rdflib pyparsing==3.0.9 # via rdflib -rdflib==6.2.0 - # via -r ./docker/latest/requirements.in +rdflib==6.3.0 + # via -r docker/latest/requirements.in six==1.16.0 # via # html5lib # isodate webencodings==0.5.1 # via html5lib - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/pyproject.toml b/pyproject.toml index 11f76e5eb..e687d17c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "rdflib" -version = "6.3.0" +version = "6.4.0a0" description = """RDFLib is a Python library for working with RDF, \ a simple yet powerful language for representing information.""" authors = ["Daniel 'eikeon' Krech "] From 334787be6994cb12a27093c0ebdbf11dfd68f26d Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 17 Mar 2023 00:46:45 +0100 Subject: [PATCH 006/114] build: explicitly specify `packages` in `pyproject.toml` (#2280) The default behaviour makes it more of a hassle to republish RDFLib to a separate package, something which I plan to do for testing purposes and possibly other reasons. More changes may follow in a similar vein. --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index e687d17c6..ce28f62be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,9 @@ classifiers=[ "Natural Language :: English" ] readme = "README.md" +packages = [ + { include = "rdflib" }, +] [tool.poetry.scripts] rdfpipe = 'rdflib.tools.rdfpipe:main' From bea782f925991d61923b6ec5f57098e9ac19acf0 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 17 Mar 2023 22:24:31 +0100 Subject: [PATCH 007/114] docs: don't use kroki (#2284) The Kroki server is currently experiencing some issues which breaks our build, this change eliminates the use of Kroki in favour of directly using the generated SVG images which is checked into git alongside the PlantUML sources. I also added a task to the Taskfile to re-generate the SVG images from the PlantUML sources by calling docker. --- Taskfile.yml | 13 + docs/_static/term_class_hierarchy.plantuml | 58 ++++ docs/_static/term_class_hierarchy.svg | 1 + docs/conf.py | 1 - docs/rdf_terms.rst | 67 +--- poetry.lock | 369 ++++++++++----------- pyproject.toml | 1 - 7 files changed, 245 insertions(+), 265 deletions(-) create mode 100644 docs/_static/term_class_hierarchy.plantuml create mode 100644 docs/_static/term_class_hierarchy.svg diff --git a/Taskfile.yml b/Taskfile.yml index 9ee289348..ec50b23b3 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -329,6 +329,19 @@ tasks: {{.DOCKER}} image push {{.OCI_REFERENCE}}:latest {{.DOCKER}} image push {{.OCI_REFERENCE}}:${_latest_rdflib_version} fi + + docs:build-diagrams: + desc: Build documentation diagrams + cmds: + - cmd: | + shopt -s globstar; + for plantuml_file in ./**/*.plantuml + do + cat "${plantuml_file}" \ + | docker run --rm -i plantuml/plantuml -tsvg -pipe \ + > "${plantuml_file%.*}.svg" + done + _rimraf: # This task is a utility task for recursively removing directories, it is # similar to rm -rf but not identical and it should work wherever there is diff --git a/docs/_static/term_class_hierarchy.plantuml b/docs/_static/term_class_hierarchy.plantuml new file mode 100644 index 000000000..a27293764 --- /dev/null +++ b/docs/_static/term_class_hierarchy.plantuml @@ -0,0 +1,58 @@ +@startuml +skinparam shadowing false +skinparam monochrome true +skinparam packageStyle rectangle +skinparam backgroundColor FFFFFE + +class Node + +class Identifier { + eq(other) -> bool + neq(other) -> bool + startswith(prefix: str, start, end) -> bool +} +Identifier -up-|> Node + +class IdentifiedNode { + toPython() -> str +} +IdentifiedNode -up-|> Identifier + +class URIRef { + n3(namespace_manager) -> str + defrag() -> URIRef + de_skolemize() -> BNode +} +URIRef -up-|> IdentifiedNode + + +class Genid +Genid -up-|> URIRef + +class RDFLibGenid +RDFLibGenid -up-|> Genid + +class BNode { + n3(namespace_manager) -> str + skolemize(authority, basepath) -> RDFLibGenid +} +BNode -up-|> IdentifiedNode + +class Literal { + datatype: Optional[str] + lang: Optional[str] + value: Any + + normalize() -> Literal + n3(namespace_manager) -> str + toPython() -> str +} +Literal -up-|> Identifier + +class Variable { + n3(namespace_manager) -> str + toPython() -> str +} +Variable -up-|> Identifier + +@enduml diff --git a/docs/_static/term_class_hierarchy.svg b/docs/_static/term_class_hierarchy.svg new file mode 100644 index 000000000..4fbb9e90c --- /dev/null +++ b/docs/_static/term_class_hierarchy.svg @@ -0,0 +1 @@ +NodeIdentifiereq(other) -> boolneq(other) -> boolstartswith(prefix: str, start, end) -> boolIdentifiedNodetoPython() -> strURIRefn3(namespace_manager) -> strdefrag() -> URIRefde_skolemize() -> BNodeGenidRDFLibGenidBNoden3(namespace_manager) -> strskolemize(authority, basepath) -> RDFLibGenidLiteraldatatype: Optional[str]lang: Optional[str]value: Anynormalize() -> Literaln3(namespace_manager) -> strtoPython() -> strVariablen3(namespace_manager) -> strtoPython() -> str \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 40ef87a1e..222d9b239 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -44,7 +44,6 @@ "sphinx.ext.ifconfig", "sphinx.ext.viewcode", "myst_parser", - "sphinxcontrib.kroki", "sphinx.ext.autosectionlabel", ] diff --git a/docs/rdf_terms.rst b/docs/rdf_terms.rst index 0ca431a4b..66abd1838 100644 --- a/docs/rdf_terms.rst +++ b/docs/rdf_terms.rst @@ -17,68 +17,11 @@ Class hierarchy All terms in RDFLib are sub-classes of the :class:`rdflib.term.Identifier` class. A class diagram of the various terms is: .. _term_class_hierarchy: -.. kroki:: - :caption: Term Class Hierarchy - :type: plantuml - - @startuml - skinparam shadowing false - skinparam monochrome true - skinparam packageStyle rectangle - skinparam backgroundColor FFFFFE - - class Node - - class Identifier { - eq(other) -> bool - neq(other) -> bool - startswith(prefix: str, start, end) -> bool - } - Identifier -up-|> Node - - class IdentifiedNode { - toPython() -> str - } - IdentifiedNode -up-|> Identifier - - class URIRef { - n3(namespace_manager) -> str - defrag() -> URIRef - de_skolemize() -> BNode - } - URIRef -up-|> IdentifiedNode - - - class Genid - Genid -up-|> URIRef - - class RDFLibGenid - RDFLibGenid -up-|> Genid - - class BNode { - n3(namespace_manager) -> str - skolemize(authority, basepath) -> RDFLibGenid - } - BNode -up-|> IdentifiedNode - - class Literal { - datatype: Optional[str] - lang: Optional[str] - value: Any - - normalize() -> Literal - n3(namespace_manager) -> str - toPython() -> str - } - Literal -up-|> Identifier - - class Variable { - n3(namespace_manager) -> str - toPython() -> str - } - Variable -up-|> Identifier - - @enduml +.. figure:: /_static/term_class_hierarchy.svg + :alt: Term Class Hierarchy + + Term Class Hierarchy + Nodes are a subset of the Terms that underlying stores actually persist. diff --git a/poetry.lock b/poetry.lock index 22c94f001..85f3a851e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. [[package]] name = "alabaster" @@ -33,18 +33,18 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy [[package]] name = "babel" -version = "2.11.0" +version = "2.12.1" description = "Internationalization utilities" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, - {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, ] [package.dependencies] -pytz = ">=2015.7" +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [[package]] name = "berkeleydb" @@ -122,100 +122,87 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] [[package]] @@ -248,63 +235,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.1" +version = "7.2.2" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49567ec91fc5e0b15356da07a2feabb421d62f52a9fff4b1ec40e9e19772f5f8"}, - {file = "coverage-7.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2ef6cae70168815ed91388948b5f4fcc69681480a0061114db737f957719f03"}, - {file = "coverage-7.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3004765bca3acd9e015794e5c2f0c9a05587f5e698127ff95e9cfba0d3f29339"}, - {file = "coverage-7.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cca7c0b7f5881dfe0291ef09ba7bb1582cb92ab0aeffd8afb00c700bf692415a"}, - {file = "coverage-7.2.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2167d116309f564af56f9aa5e75ef710ef871c5f9b313a83050035097b56820"}, - {file = "coverage-7.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cb5f152fb14857cbe7f3e8c9a5d98979c4c66319a33cad6e617f0067c9accdc4"}, - {file = "coverage-7.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:87dc37f16fb5e3a28429e094145bf7c1753e32bb50f662722e378c5851f7fdc6"}, - {file = "coverage-7.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e191a63a05851f8bce77bc875e75457f9b01d42843f8bd7feed2fc26bbe60833"}, - {file = "coverage-7.2.1-cp310-cp310-win32.whl", hash = "sha256:e3ea04b23b114572b98a88c85379e9e9ae031272ba1fb9b532aa934c621626d4"}, - {file = "coverage-7.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:0cf557827be7eca1c38a2480484d706693e7bb1929e129785fe59ec155a59de6"}, - {file = "coverage-7.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:570c21a29493b350f591a4b04c158ce1601e8d18bdcd21db136fbb135d75efa6"}, - {file = "coverage-7.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e872b082b32065ac2834149dc0adc2a2e6d8203080501e1e3c3c77851b466f9"}, - {file = "coverage-7.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac6343bae03b176e9b58104a9810df3cdccd5cfed19f99adfa807ffbf43cf9b"}, - {file = "coverage-7.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abacd0a738e71b20e224861bc87e819ef46fedba2fb01bc1af83dfd122e9c319"}, - {file = "coverage-7.2.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9256d4c60c4bbfec92721b51579c50f9e5062c21c12bec56b55292464873508"}, - {file = "coverage-7.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80559eaf6c15ce3da10edb7977a1548b393db36cbc6cf417633eca05d84dd1ed"}, - {file = "coverage-7.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0bd7e628f6c3ec4e7d2d24ec0e50aae4e5ae95ea644e849d92ae4805650b4c4e"}, - {file = "coverage-7.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09643fb0df8e29f7417adc3f40aaf379d071ee8f0350ab290517c7004f05360b"}, - {file = "coverage-7.2.1-cp311-cp311-win32.whl", hash = "sha256:1b7fb13850ecb29b62a447ac3516c777b0e7a09ecb0f4bb6718a8654c87dfc80"}, - {file = "coverage-7.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:617a94ada56bbfe547aa8d1b1a2b8299e2ec1ba14aac1d4b26a9f7d6158e1273"}, - {file = "coverage-7.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8649371570551d2fd7dee22cfbf0b61f1747cdfb2b7587bb551e4beaaa44cb97"}, - {file = "coverage-7.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d2b9b5e70a21474c105a133ba227c61bc95f2ac3b66861143ce39a5ea4b3f84"}, - {file = "coverage-7.2.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae82c988954722fa07ec5045c57b6d55bc1a0890defb57cf4a712ced65b26ddd"}, - {file = "coverage-7.2.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:861cc85dfbf55a7a768443d90a07e0ac5207704a9f97a8eb753292a7fcbdfcfc"}, - {file = "coverage-7.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0339dc3237c0d31c3b574f19c57985fcbe494280153bbcad33f2cdf469f4ac3e"}, - {file = "coverage-7.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:5928b85416a388dd557ddc006425b0c37e8468bd1c3dc118c1a3de42f59e2a54"}, - {file = "coverage-7.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8d3843ca645f62c426c3d272902b9de90558e9886f15ddf5efe757b12dd376f5"}, - {file = "coverage-7.2.1-cp37-cp37m-win32.whl", hash = "sha256:6a034480e9ebd4e83d1aa0453fd78986414b5d237aea89a8fdc35d330aa13bae"}, - {file = "coverage-7.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6fce673f79a0e017a4dc35e18dc7bb90bf6d307c67a11ad5e61ca8d42b87cbff"}, - {file = "coverage-7.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f099da6958ddfa2ed84bddea7515cb248583292e16bb9231d151cd528eab657"}, - {file = "coverage-7.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:97a3189e019d27e914ecf5c5247ea9f13261d22c3bb0cfcfd2a9b179bb36f8b1"}, - {file = "coverage-7.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a81dbcf6c6c877986083d00b834ac1e84b375220207a059ad45d12f6e518a4e3"}, - {file = "coverage-7.2.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d2c3dde4c0b9be4b02067185136b7ee4681978228ad5ec1278fa74f5ca3e99"}, - {file = "coverage-7.2.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a209d512d157379cc9ab697cbdbb4cfd18daa3e7eebaa84c3d20b6af0037384"}, - {file = "coverage-7.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f3d07edb912a978915576a776756069dede66d012baa503022d3a0adba1b6afa"}, - {file = "coverage-7.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8dca3c1706670297851bca1acff9618455122246bdae623be31eca744ade05ec"}, - {file = "coverage-7.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b1991a6d64231a3e5bbe3099fb0dd7c9aeaa4275ad0e0aeff4cb9ef885c62ba2"}, - {file = "coverage-7.2.1-cp38-cp38-win32.whl", hash = "sha256:22c308bc508372576ffa3d2dbc4824bb70d28eeb4fcd79d4d1aed663a06630d0"}, - {file = "coverage-7.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:b0c0d46de5dd97f6c2d1b560bf0fcf0215658097b604f1840365296302a9d1fb"}, - {file = "coverage-7.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4dd34a935de268a133e4741827ae951283a28c0125ddcdbcbba41c4b98f2dfef"}, - {file = "coverage-7.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0f8318ed0f3c376cfad8d3520f496946977abde080439d6689d7799791457454"}, - {file = "coverage-7.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:834c2172edff5a08d78e2f53cf5e7164aacabeb66b369f76e7bb367ca4e2d993"}, - {file = "coverage-7.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4d70c853f0546855f027890b77854508bdb4d6a81242a9d804482e667fff6e6"}, - {file = "coverage-7.2.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a6450da4c7afc4534305b2b7d8650131e130610cea448ff240b6ab73d7eab63"}, - {file = "coverage-7.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:99f4dd81b2bb8fc67c3da68b1f5ee1650aca06faa585cbc6818dbf67893c6d58"}, - {file = "coverage-7.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bdd3f2f285ddcf2e75174248b2406189261a79e7fedee2ceeadc76219b6faa0e"}, - {file = "coverage-7.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f29351393eb05e6326f044a7b45ed8e38cb4dcc38570d12791f271399dc41431"}, - {file = "coverage-7.2.1-cp39-cp39-win32.whl", hash = "sha256:e2b50ebc2b6121edf352336d503357321b9d8738bb7a72d06fc56153fd3f4cd8"}, - {file = "coverage-7.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:bd5a12239c0006252244f94863f1c518ac256160cd316ea5c47fb1a11b25889a"}, - {file = "coverage-7.2.1-pp37.pp38.pp39-none-any.whl", hash = "sha256:436313d129db7cf5b4ac355dd2bd3f7c7e5294af077b090b85de75f8458b8616"}, - {file = "coverage-7.2.1.tar.gz", hash = "sha256:c77f2a9093ccf329dd523a9b2b3c854c20d2a3d968b6def3b820272ca6732242"}, + {file = "coverage-7.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7"}, + {file = "coverage-7.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d"}, + {file = "coverage-7.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5"}, + {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169"}, + {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6"}, + {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137"}, + {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90"}, + {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2"}, + {file = "coverage-7.2.2-cp310-cp310-win32.whl", hash = "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292"}, + {file = "coverage-7.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab"}, + {file = "coverage-7.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b"}, + {file = "coverage-7.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5"}, + {file = "coverage-7.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731"}, + {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd"}, + {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d"}, + {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212"}, + {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54"}, + {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57"}, + {file = "coverage-7.2.2-cp311-cp311-win32.whl", hash = "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d"}, + {file = "coverage-7.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512"}, + {file = "coverage-7.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9"}, + {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e"}, + {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69"}, + {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0"}, + {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f"}, + {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67"}, + {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9"}, + {file = "coverage-7.2.2-cp37-cp37m-win32.whl", hash = "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8"}, + {file = "coverage-7.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25"}, + {file = "coverage-7.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6"}, + {file = "coverage-7.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5"}, + {file = "coverage-7.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4"}, + {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd"}, + {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84"}, + {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540"}, + {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88"}, + {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2"}, + {file = "coverage-7.2.2-cp38-cp38-win32.whl", hash = "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3"}, + {file = "coverage-7.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8"}, + {file = "coverage-7.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d"}, + {file = "coverage-7.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005"}, + {file = "coverage-7.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988"}, + {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149"}, + {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8"}, + {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140"}, + {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016"}, + {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be"}, + {file = "coverage-7.2.2-cp39-cp39-win32.whl", hash = "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc"}, + {file = "coverage-7.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef"}, + {file = "coverage-7.2.2-pp37.pp38.pp39-none-any.whl", hash = "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968"}, + {file = "coverage-7.2.2.tar.gz", hash = "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2"}, ] [package.dependencies] @@ -339,14 +326,14 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.0" +version = "1.1.1" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, - {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, ] [package.extras] @@ -633,14 +620,14 @@ test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1. [[package]] name = "markdown-it-py" -version = "2.1.0" +version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "markdown-it-py-2.1.0.tar.gz", hash = "sha256:cf7e59fed14b5ae17c0006eff14a2d9a00ed5f3a846148153899a0224e2c07da"}, - {file = "markdown_it_py-2.1.0-py3-none-any.whl", hash = "sha256:93de681e5c021a432c63147656fe21790bc01231e0cd2da73626f1aa3ac0fe27"}, + {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, + {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, ] [package.dependencies] @@ -648,10 +635,10 @@ mdurl = ">=0.1,<1.0" typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark (>=3.2,<4.0)"] -code-style = ["pre-commit (==2.6)"] -compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.3.6,<3.4.0)", "mistletoe (>=0.8.1,<0.9.0)", "mistune (>=2.0.2,<2.1.0)", "panflute (>=2.1.3,<2.2.0)"] -linkify = ["linkify-it-py (>=1.0,<2.0)"] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] plugins = ["mdit-py-plugins"] profiling = ["gprof2dot"] rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] @@ -731,14 +718,14 @@ files = [ [[package]] name = "mdit-py-plugins" -version = "0.3.3" +version = "0.3.5" description = "Collection of plugins for markdown-it-py" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mdit-py-plugins-0.3.3.tar.gz", hash = "sha256:5cfd7e7ac582a594e23ba6546a2f406e94e42eb33ae596d0734781261c251260"}, - {file = "mdit_py_plugins-0.3.3-py3-none-any.whl", hash = "sha256:36d08a29def19ec43acdcd8ba471d3ebab132e7879d442760d963f19913e04b9"}, + {file = "mdit-py-plugins-0.3.5.tar.gz", hash = "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a"}, + {file = "mdit_py_plugins-0.3.5-py3-none-any.whl", hash = "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e"}, ] [package.dependencies] @@ -881,14 +868,14 @@ files = [ [[package]] name = "pathspec" -version = "0.11.0" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"}, - {file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"}, + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] [[package]] @@ -920,14 +907,14 @@ flake8 = ">=3.9.1" [[package]] name = "platformdirs" -version = "3.0.0" +version = "3.1.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, - {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, + {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, + {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, ] [package.dependencies] @@ -1313,26 +1300,6 @@ files = [ [package.extras] test = ["flake8", "mypy", "pytest"] -[[package]] -name = "sphinxcontrib-kroki" -version = "1.3.0" -description = "Kroki integration into sphinx" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "sphinxcontrib-kroki-1.3.0.tar.gz", hash = "sha256:90ce45e1f5822443772d4df8ddf031746101dc1fd5a0a831a1db7e0886c49b6a"}, -] - -[package.dependencies] -pyyaml = "*" -requests = ">=2.4.2" -sphinx = "*" - -[package.extras] -code = ["black", "flake8", "mypy"] -test = ["coverage", "pytest", "pytest-cov"] - [[package]] name = "sphinxcontrib-qthelp" version = "1.0.3" @@ -1425,38 +1392,38 @@ files = [ [[package]] name = "types-setuptools" -version = "67.6.0.0" +version = "67.6.0.4" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.6.0.0.tar.gz", hash = "sha256:70b5e6a379e9fccf6579871a93ca3301a46252e3ae66957ec64281a2b6a812d9"}, - {file = "types_setuptools-67.6.0.0-py3-none-any.whl", hash = "sha256:d669a80ee8e37eb1697dc31a23d41ea2c48a635464e2c7e6370dda811459b466"}, + {file = "types-setuptools-67.6.0.4.tar.gz", hash = "sha256:157fc81797619a977e889c4bdfea205b21038ced63bb252bbb0e427540beb8d5"}, + {file = "types_setuptools-67.6.0.4-py3-none-any.whl", hash = "sha256:5183c0e1ef0447fde46d4e852b24670fb0ed915bd8ffb4ca488a423900e0a368"}, ] [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] [[package]] name = "urllib3" -version = "1.26.14" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, - {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] [package.extras] @@ -1478,19 +1445,19 @@ files = [ [[package]] name = "zipp" -version = "3.13.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.13.0-py3-none-any.whl", hash = "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"}, - {file = "zipp-3.13.0.tar.gz", hash = "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6"}, + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] berkeleydb = ["berkeleydb"] @@ -1501,4 +1468,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "d49edf98b285b3e44ee553a95842211bdfc910b99ab289b12a2106009e09deae" +content-hash = "8fe5f08c0bab3cf6d62d64432661ccb6b792f643a788bb023244a058fbebb45f" diff --git a/pyproject.toml b/pyproject.toml index ce28f62be..8329fb6ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,6 @@ setuptools = ">=65.6.3,<68.0.0" sphinx = ">4.0.0" myst-parser = "^0.18.0" sphinxcontrib-apidoc = "^0.3.0" -sphinxcontrib-kroki = "^1.3.0" sphinx-autodoc-typehints = "^1.17.1" [tool.poetry.group.flake8.dependencies] From e3884b72f92888f6ebd251c5d748ba35c796d7ca Mon Sep 17 00:00:00 2001 From: Nicholas Bollweg Date: Fri, 17 Mar 2023 16:52:11 -0500 Subject: [PATCH 008/114] build: include test in sdist (#2282) A perhaps minor regression from earlier versions is that the sdist does not include the test folder, which makes it harder for downstreams to use a single source of truth to build and test a reliable package. This restores the test folder for sdists. --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 8329fb6ce..aa53bdac7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,9 @@ readme = "README.md" packages = [ { include = "rdflib" }, ] +include = [ + { path = "test", format = "sdist" }, +] [tool.poetry.scripts] rdfpipe = 'rdflib.tools.rdfpipe:main' From f3d31b002dda6d20247619b9606eff07039099df Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sat, 18 Mar 2023 14:08:47 +0100 Subject: [PATCH 009/114] chore: prepare for release 6.3.1 (#2285) Update everything to reflect release version 6.3.1. --- CHANGELOG.md | 41 ++++++++++++++++++++++++++++ README.md | 2 +- docs/developers.rst | 66 ++++++++++++++++++++++++++------------------- pyproject.toml | 2 +- rdflib/__init__.py | 2 +- 5 files changed, 82 insertions(+), 31 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 798dabfd3..d72e4104b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,44 @@ +# 2023-03-18 RELEASE 6.3.1 + + +This is a patch release that includes a singular user facing fix, which is the +inclusion of the `test` directory in the `sdist` release artifact. + +The following sections describe the changes included in this version. + +## build: explicitly specify `packages` in `pyproject.toml` (#2280) + +Commit [334787b](https://github.com/RDFLib/rdflib/commit/334787b), closes [#2280](https://github.com/RDFLib/rdflib/issues/2280). + + +The default behaviour makes it more of a hassle to republish RDFLib to +a separate package, something which I plan to do for testing purposes +and possibly other reasons. + +More changes may follow in a similar vein. + + +## build: include test in sdist (#2282) + +Commit [e3884b7](https://github.com/RDFLib/rdflib/commit/e3884b7), closes [#2282](https://github.com/RDFLib/rdflib/issues/2282). + + +A perhaps minor regression from earlier versions is that the sdist does not include the test folder, which makes it harder for downstreams to use a single source of truth to build and test a reliable package. This restores the test folder for sdists. + +## docs: don't use kroki (#2284) + +Commit [bea782f](https://github.com/RDFLib/rdflib/commit/bea782f), closes [#2284](https://github.com/RDFLib/rdflib/issues/2284). + + +The Kroki server is currently experiencing some issues which breaks our +build, this change eliminates the use of Kroki in favour of directly +using the generated SVG images which is checked into git alongside the +PlantUML sources. + +I also added a task to the Taskfile to re-generate the SVG images from +the PlantUML sources by calling docker. + + # 2023-03-16 RELEASE 6.3.0 This is a minor release that includes bug fixes and features. diff --git a/README.md b/README.md index 49c63ac8a..172413e8b 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ The stable release of RDFLib may be installed with Python's package management t Alternatively manually download the package from the Python Package Index (PyPI) at https://pypi.python.org/pypi/rdflib -The current version of RDFLib is 6.3.0, see the ``CHANGELOG.md`` file for what's new in this release. +The current version of RDFLib is 6.3.1, see the ``CHANGELOG.md`` file for what's new in this release. ### Installation of the current main branch (for developers) diff --git a/docs/developers.rst b/docs/developers.rst index 0eb73c1a2..9d74df2f2 100644 --- a/docs/developers.rst +++ b/docs/developers.rst @@ -358,43 +358,53 @@ RDFLib 5.0.0 maintained compatibility with Python versions 2.7, 3.4, 3.5, 3.6, 3 Releasing --------- -Set to-be-released version number in :file:`rdflib/__init__.py` and -:file:`README.md`. Check date in :file:`LICENSE`. +Create a release-preparation pull request with the following changes: -Add :file:`CHANGELOG.md` entry. +* Updated copyright year in the ``LICENSE`` file. +* Updated copyright year in the ``docs/conf.py`` file. +* Updated main branch version and current version in the ``README.md`` file. The + main branch version should be the next major version with an ``a0`` suffix to + indicate it is alpha 0. When releasing 6.3.1, the main branch version in the + README should be 6.4.0a0. +* Updated version in the ``pyproject.toml`` file. +* Updated ``__date__`` in the ``rdflib/__init__.py`` file. +* Accurate ``CHANGELOG.md`` entry for the release. -Commit this change. It's preferable make the release tag via -https://github.com/RDFLib/rdflib/releases/new :: -Our Tag versions aren't started with 'v', so just use a plain 5.0.0 like -version. Release title is like "RDFLib 5.0.0", the description a copy of your -:file:`CHANGELOG.md` entry. -This gives us a nice release page like this:: -https://github.com/RDFLib/rdflib/releases/tag/4.2.2 +Once the PR is merged, switch to the main branch, build the release and upload it to PyPI: -If for whatever reason you don't want to take this approach, the old one is:: - - Tagging the release commit with:: - - git tag -am 'tagged version' X.X.X +.. code-block:: bash + + # Clean up any previous builds + \rm -vf dist/* - When pushing, remember to do:: + # Build artifacts + poetry build - git push --tags + # Check that the built wheel works correctly: + pipx run --spec "$(readlink -f dist/rdflib*.whl)" rdfpipe --version + # Publish to PyPI + poetry publish + -No matter how you create the release tag, remember to upload tarball to pypi with:: +Once this is done, create a release tag from `GitHub releases +`_. For a release of version +6.3.1 the tag should be ``6.3.1`` (without a "v" prefix), and the release title +should be "RDFLib 6.3.1". The release notes for the latest version be added to +the release description. The artifacts built with ``poetry build`` should be +uploaded to the release as release artifacts. - rm -r dist/X.X.X[.-]* # delete all previous builds for this release, just in case +The resulting release will be available at https://github.com/RDFLib/rdflib/releases/tag/6.3.1 - rm -r build - python setup.py sdist - python setup.py bdist_wheel - ls dist +Once this is done announce the release at the following locations: - # upload with twine - # WARNING: once uploaded can never be modified, only deleted! - twine upload dist/rdflib-X.X.X[.-]* +* Twitter: Just make a tweet from your own account linking to the latest release. +* RDFLib mailing list. +* RDFLib Gitter / matrix.org chat room. -Set new dev version number in the above locations, i.e. next release ``-dev``: ``5.0.1-dev`` and commit again. +Once this is all done, create another post-release pull request with the following changes: -Tweet, email mailing list and inform members in the chat. +* Set the just released version in ``docker/latest/requirements.in`` and run + ``task docker:prepare`` to update the ``docker/latest/requirements.txt`` file. +* Set the version in the ``pyproject.toml`` file to the next minor release with + a ``a0`` suffix to indicate alpha 0. diff --git a/pyproject.toml b/pyproject.toml index aa53bdac7..0999fd805 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "rdflib" -version = "6.4.0a0" +version = "6.3.1" description = """RDFLib is a Python library for working with RDF, \ a simple yet powerful language for representing information.""" authors = ["Daniel 'eikeon' Krech "] diff --git a/rdflib/__init__.py b/rdflib/__init__.py index ebf1344c8..bc49dc489 100644 --- a/rdflib/__init__.py +++ b/rdflib/__init__.py @@ -56,7 +56,7 @@ __docformat__ = "restructuredtext en" __version__: str = _DISTRIBUTION_METADATA["Version"] -__date__ = "2023-03-16" +__date__ = "2023-03-18" __all__ = [ "URIRef", From 5635ed295dcafb09de84f0ab04dcbb7f5b7e7c9b Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sun, 19 Mar 2023 15:48:54 +0100 Subject: [PATCH 010/114] chore: post 6.3.1 release tasks (#2290) --- docker/latest/requirements.in | 2 +- docker/latest/requirements.txt | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/latest/requirements.in b/docker/latest/requirements.in index a0419fb13..841a97e4d 100644 --- a/docker/latest/requirements.in +++ b/docker/latest/requirements.in @@ -1,4 +1,4 @@ # This file is used for building a docker image of hte latest rdflib release. It # will be updated by dependabot when new releases are made. -rdflib==6.3.0 +rdflib==6.3.1 html5lib diff --git a/docker/latest/requirements.txt b/docker/latest/requirements.txt index 7f7563383..d0ad24aef 100644 --- a/docker/latest/requirements.txt +++ b/docker/latest/requirements.txt @@ -10,7 +10,7 @@ isodate==0.6.1 # via rdflib pyparsing==3.0.9 # via rdflib -rdflib==6.3.0 +rdflib==6.3.1 # via -r docker/latest/requirements.in six==1.16.0 # via diff --git a/pyproject.toml b/pyproject.toml index 0999fd805..aa53bdac7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "rdflib" -version = "6.3.1" +version = "6.4.0a0" description = """RDFLib is a Python library for working with RDF, \ a simple yet powerful language for representing information.""" authors = ["Daniel 'eikeon' Krech "] From 394fb50d73ddca7b4891e43ba0ad2172de0b107d Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sun, 19 Mar 2023 15:50:11 +0100 Subject: [PATCH 011/114] fix: include docs and examples in the sdist tarball (#2289) The sdists generated by setuptools included the `docs` and `examples` directories, and they are needed for building docs and running tests using the sdist. This change includes these directories in the sdist tarball. A `test:sdist` task is also added to `Taskfile.yml` which uses the sdists to run pytest and build docs. --- Taskfile.yml | 17 +++++++++++++++++ pyproject.toml | 2 ++ 2 files changed, 19 insertions(+) diff --git a/Taskfile.yml b/Taskfile.yml index ec50b23b3..feb7624c2 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -218,6 +218,8 @@ tasks: - task: venv:clean - task: _rimraf vars: { RIMRAF_TARGET: ".var/devcontainer" } + - task: _rimraf + vars: { RIMRAF_TARGET: "var/test-sdist" } test:data:fetch: desc: Fetch test data. @@ -342,6 +344,21 @@ tasks: > "${plantuml_file%.*}.svg" done + test:sdist: + desc: Run tests on the sdist artifact + cmds: + - task: _rimraf + vars: { RIMRAF_TARGET: "dist" } + - task: _rimraf + vars: { RIMRAF_TARGET: "var/test-sdist" } + - poetry build + - python -c 'import tarfile, glob; tarfile.open(glob.glob("dist/*.tar.gz")[0]).extractall("var/test-sdist")' + - | + cd var/test-sdist/rdflib-* + poetry install + poetry run mypy --show-error-context --show-error-codes -p rdflib + poetry run sphinx-build -T -W -b html -d docs/_build/doctree docs docs/_build/html + poetry run pytest _rimraf: # This task is a utility task for recursively removing directories, it is # similar to rm -rf but not identical and it should work wherever there is diff --git a/pyproject.toml b/pyproject.toml index aa53bdac7..47bfa0943 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,8 @@ packages = [ ] include = [ { path = "test", format = "sdist" }, + { path = "docs", format = "sdist" }, + { path = "examples", format = "sdist" }, ] [tool.poetry.scripts] From 192e6d11147963a3066faec43323991c9b73f7b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 19 Mar 2023 16:16:04 +0100 Subject: [PATCH 012/114] build(deps-dev): bump mypy from 1.0.1 to 1.1.1 (#2274) build(deps-dev): bump mypy from 1.0.1 to 1.1.1 Bumps [mypy](https://github.com/python/mypy) from 1.0.1 to 1.1.1. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v1.0.1...v1.1.1) updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor Also added type ignores for newly detected type errors. Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Iwan Aucamp --- poetry.lock | 58 ++++++++++++++--------------- pyproject.toml | 2 +- rdflib/parser.py | 3 +- rdflib/plugins/sparql/aggregates.py | 4 +- rdflib/query.py | 5 ++- test/test_graph/test_graph_http.py | 3 +- 6 files changed, 39 insertions(+), 36 deletions(-) diff --git a/poetry.lock b/poetry.lock index 85f3a851e..a3d6c1db8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -750,42 +750,42 @@ files = [ [[package]] name = "mypy" -version = "1.0.1" +version = "1.1.1" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, - {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, - {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, - {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, - {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, - {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, - {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, - {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, - {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, - {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, - {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, - {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, - {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, - {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, - {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, - {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, - {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, - {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, - {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, - {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, + {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"}, + {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"}, + {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"}, + {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"}, + {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"}, + {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"}, + {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"}, + {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"}, + {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"}, + {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"}, + {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"}, + {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"}, + {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"}, + {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"}, + {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"}, + {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"}, + {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"}, + {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"}, + {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"}, + {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"}, + {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"}, + {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"}, + {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"}, + {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"}, + {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"}, + {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} typing-extensions = ">=3.10" @@ -1468,4 +1468,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "8fe5f08c0bab3cf6d62d64432661ccb6b792f643a788bb023244a058fbebb45f" +content-hash = "3a6eb023f5a417d8825791a1acfb03462bc3754c611ca43fe81c7eb62c4f4752" diff --git a/pyproject.toml b/pyproject.toml index 47bfa0943..2b3577a72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,7 +51,7 @@ lxml = {version = "^4.3.0", optional = true} [tool.poetry.group.dev.dependencies] black = "23.1.0" isort = "^5.10.0" -mypy = "1.0.1" +mypy = "^1.1.0" lxml-stubs = "^0.4.0" [tool.poetry.group.tests.dependencies] diff --git a/rdflib/parser.py b/rdflib/parser.py index 89318afff..6f23dd342 100644 --- a/rdflib/parser.py +++ b/rdflib/parser.py @@ -276,7 +276,8 @@ def _urlopen(req: Request) -> Any: # This custom error handling should be removed once all # supported versions of python support 308. if ex.code == 308: - req.full_url = ex.headers.get("Location") + # type error: Incompatible types in assignment (expression has type "Optional[Any]", variable has type "str") + req.full_url = ex.headers.get("Location") # type: ignore[assignment] return _urlopen(req) else: raise diff --git a/rdflib/plugins/sparql/aggregates.py b/rdflib/plugins/sparql/aggregates.py index fd40ab055..fb2dffedd 100644 --- a/rdflib/plugins/sparql/aggregates.py +++ b/rdflib/plugins/sparql/aggregates.py @@ -40,7 +40,7 @@ def __init__(self, aggregation: CompValue): self.expr = aggregation.vars if not aggregation.distinct: # type error: Cannot assign to a method - self.use_row = self.dont_care # type: ignore[assignment] + self.use_row = self.dont_care # type: ignore[method-assign] self.distinct = False else: self.distinct = aggregation.distinct @@ -184,7 +184,7 @@ def __init__(self, aggregation: CompValue): self.value: Any = None # DISTINCT would not change the value for MIN or MAX # type error: Cannot assign to a method - self.use_row = self.dont_care # type: ignore[assignment] + self.use_row = self.dont_care # type: ignore[method-assign] def set_value(self, bindings: MutableMapping[Variable, Identifier]) -> None: if self.value is not None: diff --git a/rdflib/query.py b/rdflib/query.py index 1cfaa1536..155c490e6 100644 --- a/rdflib/query.py +++ b/rdflib/query.py @@ -155,8 +155,9 @@ class ResultRow(Tuple["Identifier", ...]): def __new__( cls, values: Mapping["Variable", "Identifier"], labels: List["Variable"] ): - # type error: Generator has incompatible item type "Optional[Any]"; expected "_T_co" - instance = super(ResultRow, cls).__new__(cls, (values.get(v) for v in labels)) # type: ignore[misc] + # type error: Value of type variable "Self" of "__new__" of "tuple" cannot be "ResultRow" [type-var] + # type error: Generator has incompatible item type "Optional[Identifier]"; expected "_T_co" [misc] + instance = super(ResultRow, cls).__new__(cls, (values.get(v) for v in labels)) # type: ignore[type-var, misc] instance.labels = dict((str(x[1]), x[0]) for x in enumerate(labels)) return instance diff --git a/test/test_graph/test_graph_http.py b/test/test_graph/test_graph_http.py index 6a4067188..762e3d5b3 100644 --- a/test/test_graph/test_graph_http.py +++ b/test/test_graph/test_graph_http.py @@ -201,7 +201,8 @@ def test_3xx(self) -> None: httpmock.mocks[MethodName.GET].assert_called() assert len(httpmock.requests[MethodName.GET]) == 10 for request in httpmock.requests[MethodName.GET]: - assert re.match(r"text/turtle", request.headers.get("Accept")) + # type error: Argument 2 to "match" has incompatible type "Optional[Any]"; expected "str" + assert re.match(r"text/turtle", request.headers.get("Accept")) # type: ignore[arg-type] request_paths = [ request.path for request in httpmock.requests[MethodName.GET] From 7a05c15b25b5929696e30c5c96cd4bfbe566fd4b Mon Sep 17 00:00:00 2001 From: Charles Tapley Hoyt Date: Sun, 19 Mar 2023 16:27:13 +0100 Subject: [PATCH 013/114] docs: fix typo in NamespaceManager documentation (#2291) Changed `cdterms` to `dcterms`, see for more info. --- docs/namespaces_and_bindings.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/namespaces_and_bindings.rst b/docs/namespaces_and_bindings.rst index cac846802..ef7458661 100644 --- a/docs/namespaces_and_bindings.rst +++ b/docs/namespaces_and_bindings.rst @@ -70,7 +70,7 @@ Valid strategies are: * rdflib: * binds all the namespaces shipped with RDFLib as DefinedNamespace instances * all the core namespaces and all the following: brick, csvw, dc, dcat - * dcmitype, cdterms, dcam, doap, foaf, geo, odrl, org, prof, prov, qb, sdo + * dcmitype, dcterms, dcam, doap, foaf, geo, odrl, org, prof, prov, qb, sdo * sh, skos, sosa, ssn, time, vann, void * see the NAMESPACE_PREFIXES_RDFLIB object in :class:`rdflib.namespace` for up-to-date list * none: From 07f885e31da4e49198efcd9233ffa362d2ef764c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Mar 2023 19:39:27 +0100 Subject: [PATCH 014/114] build(deps-dev): bump types-setuptools from 67.6.0.4 to 67.6.0.5 (#2296) Bumps [types-setuptools](https://github.com/python/typeshed) from 67.6.0.4 to 67.6.0.5. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index a3d6c1db8..1f13f8653 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "alabaster" @@ -1392,14 +1392,14 @@ files = [ [[package]] name = "types-setuptools" -version = "67.6.0.4" +version = "67.6.0.5" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.6.0.4.tar.gz", hash = "sha256:157fc81797619a977e889c4bdfea205b21038ced63bb252bbb0e427540beb8d5"}, - {file = "types_setuptools-67.6.0.4-py3-none-any.whl", hash = "sha256:5183c0e1ef0447fde46d4e852b24670fb0ed915bd8ffb4ca488a423900e0a368"}, + {file = "types-setuptools-67.6.0.5.tar.gz", hash = "sha256:3a708e66c7bdc620e4d0439f344c750c57a4340c895a4c3ed2d0fc4ae8eb9962"}, + {file = "types_setuptools-67.6.0.5-py3-none-any.whl", hash = "sha256:dae5a4a659dbb6dba57773440f6e2dbdd8ef282dc136a174a8a59bd33d949945"}, ] [[package]] From 155895338439265b31b889e7358ba2289611c04d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Mar 2023 19:40:00 +0100 Subject: [PATCH 015/114] build(deps): bump library/python in /docker/unstable (#2299) Bumps library/python from `d0e8398` to `1d2b710`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/unstable/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/unstable/Dockerfile b/docker/unstable/Dockerfile index 406922940..a858e5b6d 100644 --- a/docker/unstable/Dockerfile +++ b/docker/unstable/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.2-slim@sha256:d0e839882b87135b355361efeb9e9030c9d2a808da06434f4c99eb4009c15e64 +FROM docker.io/library/python:3.11.2-slim@sha256:1d2b7101658e795e4d878d3f54f3354838630e1d16f5868ea18b338c12bb92c9 # This file is generated from docker:unstable in Taskfile.yml COPY var/requirements.txt /var/tmp/build/ From 5a80926fe6544dad065557c03eb7d99c447eb8ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Mar 2023 19:40:12 +0100 Subject: [PATCH 016/114] build(deps): bump library/python in /docker/latest (#2298) Bumps library/python from `d0e8398` to `1d2b710`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/latest/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/latest/Dockerfile b/docker/latest/Dockerfile index dd04515d4..96ac404af 100644 --- a/docker/latest/Dockerfile +++ b/docker/latest/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.2-slim@sha256:d0e839882b87135b355361efeb9e9030c9d2a808da06434f4c99eb4009c15e64 +FROM docker.io/library/python:3.11.2-slim@sha256:1d2b7101658e795e4d878d3f54f3354838630e1d16f5868ea18b338c12bb92c9 COPY docker/latest/requirements.txt /var/tmp/build/ From dd44ae186183b0da8fef63853c9da2be826eb643 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Tue, 21 Mar 2023 22:29:13 +0100 Subject: [PATCH 017/114] build: upgrade sphinx and change flake8 and related to only install on python >=3.8 (#2288) The main aim of this change is to get to a newer version of Sphinx. However, if just Sphinx is upgraded, poetry can't do version solving because of the version of `importlib-metadata` that flake8 needs: ```console $ poetry install Updating dependencies Resolving dependencies... (0.6s) Because no versions of flakeheaven match >3.2.1,<4.0.0 and flakeheaven (3.2.1) depends on flake8 (>=4.0.1,<5.0.0), flakeheaven (>=3.2.1,<4.0.0) requires flake8 (>=4.0.1,<5.0.0). And because no versions of flake8 match >4.0.1,<5.0.0 and flake8 (4.0.1) depends on importlib-metadata (<4.3), flakeheaven (>=3.2.1,<4.0.0) requires importlib-metadata (<4.3). And because sphinx (5.3.0) depends on importlib-metadata (>=4.8) and no versions of sphinx match >5.3.0,<6.0.0, flakeheaven (>=3.2.1,<4.0.0) is incompatible with sphinx (>=5.3.0,<6.0.0). So, because rdflib depends on both sphinx (^5.3.0) and flakeheaven (^3.2.1), version solving failed. ``` To make things work, flake8 and related is only installed for Python >=3.8, where the built-in `importlib.metadata` is used instead of the `importlib-metadata` package. This means no more flake8 on python 3.7, but it is a reasonable trade-off to get to a newer version of Sphinx, and Python 3.7 support will be dropped by 2023-06-27 anyway. Other changes: - Changed Read the Docs to use the Sphinx version from poetry instead of the custom version that was in `devtools/requirements-rtd.txt`. - Added `typing-extensions` to the poetry `docs` dependency group as it is needed for docs to build correctly. - Changed the tox `docs` environment to be closer to the Read the Docs environment. Closes . --- .readthedocs.yaml | 5 -- devtools/requirements-rtd.txt | 5 -- docs/conf.py | 4 ++ poetry.lock | 87 ++++++++++++++++++----------------- pyproject.toml | 11 +++-- tox.ini | 4 +- 6 files changed, 57 insertions(+), 59 deletions(-) delete mode 100644 devtools/requirements-rtd.txt diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 274b3ad69..07bdc9db8 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -24,11 +24,6 @@ build: - poetry config virtualenvs.create false - poetry install --only=main --only=docs --extras=html - poetry env info - # This will patch Sphinx to a later version than is in poetry.lock so that - # we build with a more up to date Sphinx. This should be eliminated when - # possible in favor of having a more up to date Sphinx in poetry.lock. - - pip install -r devtools/requirements-rtd.txt - sphinx: fail_on_warning: true diff --git a/devtools/requirements-rtd.txt b/devtools/requirements-rtd.txt deleted file mode 100644 index 32af55fb5..000000000 --- a/devtools/requirements-rtd.txt +++ /dev/null @@ -1,5 +0,0 @@ -# This file contains requirements that get patched into readthedocs so that we -# can build with a more up to date Sphinx than is in `poetry.lock`. This file -# should be eliminated once we can get a more up-to-date version of sphinx in -# poetry.lock. It is kept here so that dependabot can update it. -Sphinx==5.3.0 diff --git a/docs/conf.py b/docs/conf.py index 222d9b239..785c6fc65 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -322,5 +322,9 @@ def find_version(filename): ("py:class", "_TripleType"), ("py:class", "_TripleOrTriplePathType"), ("py:class", "TextIO"), + ("py:class", "Message"), ] ) + +if sys.version_info < (3, 8): + nitpick_ignore.extend([("py:class", "importlib_metadata.EntryPoint")]) diff --git a/poetry.lock b/poetry.lock index 1f13f8653..14eabafee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -302,14 +302,14 @@ toml = ["tomli"] [[package]] name = "docutils" -version = "0.17.1" +version = "0.19" description = "Docutils -- Python Documentation Utilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, ] [[package]] @@ -352,7 +352,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.8.0,<2.9.0" pyflakes = ">=2.4.0,<2.5.0" @@ -373,7 +372,6 @@ files = [ colorama = "*" entrypoints = "*" flake8 = ">=4.0.1,<5.0.0" -importlib-metadata = {version = ">=1.0", markers = "python_version < \"3.8\""} pygments = "*" toml = "*" urllib3 = "*" @@ -429,14 +427,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "4.2.0" +version = "4.13.0" description = "Read metadata from Python packages" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, ] [package.dependencies] @@ -444,8 +442,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" @@ -810,30 +809,31 @@ files = [ [[package]] name = "myst-parser" -version = "0.18.1" -description = "An extended commonmark compliant parser, with bridges to docutils & sphinx." +version = "1.0.0" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "myst-parser-0.18.1.tar.gz", hash = "sha256:79317f4bb2c13053dd6e64f9da1ba1da6cd9c40c8a430c447a7b146a594c246d"}, - {file = "myst_parser-0.18.1-py3-none-any.whl", hash = "sha256:61b275b85d9f58aa327f370913ae1bec26ebad372cc99f3ab85c8ec3ee8d9fb8"}, + {file = "myst-parser-1.0.0.tar.gz", hash = "sha256:502845659313099542bd38a2ae62f01360e7dd4b1310f025dd014dfc0439cdae"}, + {file = "myst_parser-1.0.0-py3-none-any.whl", hash = "sha256:69fb40a586c6fa68995e6521ac0a525793935db7e724ca9bac1d33be51be9a4c"}, ] [package.dependencies] docutils = ">=0.15,<0.20" jinja2 = "*" markdown-it-py = ">=1.0.0,<3.0.0" -mdit-py-plugins = ">=0.3.1,<0.4.0" +mdit-py-plugins = ">=0.3.4,<0.4.0" pyyaml = "*" -sphinx = ">=4,<6" -typing-extensions = "*" +sphinx = ">=5,<7" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -code-style = ["pre-commit (>=2.12,<3.0)"] +code-style = ["pre-commit (>=3.0,<4.0)"] linkify = ["linkify-it-py (>=1.0,<2.0)"] -rtd = ["ipython", "sphinx-book-theme", "sphinx-design", "sphinxcontrib.mermaid (>=0.7.1,<0.8.0)", "sphinxext-opengraph (>=0.6.3,<0.7.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] -testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=6,<7)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx (<5.2)", "sphinx-pytest"] +rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.7.5,<0.8.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"] [[package]] name = "networkx" @@ -1168,28 +1168,28 @@ files = [ [[package]] name = "sphinx" -version = "4.3.2" +version = "5.3.0" description = "Python documentation generator" category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, - {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.18" -imagesize = "*" -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" requests = ">=2.5.0" -setuptools = "*" -snowballstemmer = ">=1.1" +snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" @@ -1199,27 +1199,28 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.920)", "types-pkg-resources", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] [[package]] name = "sphinx-autodoc-typehints" -version = "1.17.1" +version = "1.22" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "sphinx_autodoc_typehints-1.17.1-py3-none-any.whl", hash = "sha256:f16491cad05a13f4825ecdf9ee4ff02925d9a3b1cf103d4d02f2f81802cce653"}, - {file = "sphinx_autodoc_typehints-1.17.1.tar.gz", hash = "sha256:844d7237d3f6280b0416f5375d9556cfd84df1945356fcc34b82e8aaacab40f3"}, + {file = "sphinx_autodoc_typehints-1.22-py3-none-any.whl", hash = "sha256:ef4a8b9d52de66065aa7d3adfabf5a436feb8a2eff07c2ddc31625d8807f2b69"}, + {file = "sphinx_autodoc_typehints-1.22.tar.gz", hash = "sha256:71fca2d5eee9b034204e4c686ab20b4d8f5eb9409396216bcae6c87c38e18ea6"}, ] [package.dependencies] -Sphinx = ">=4" +sphinx = ">=5.3" [package.extras] -testing = ["covdefaults (>=2)", "coverage (>=6)", "diff-cover (>=6.4)", "nptyping (>=1,<2)", "pytest (>=6)", "pytest-cov (>=3)", "sphobjinv (>=2)", "typing-extensions (>=3.5)"] -type-comments = ["typed-ast (>=1.4.0)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.21)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.5)", "diff-cover (>=7.3)", "nptyping (>=2.4.1)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.4)"] +type-comment = ["typed-ast (>=1.5.4)"] [[package]] name = "sphinxcontrib-apidoc" @@ -1468,4 +1469,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "3a6eb023f5a417d8825791a1acfb03462bc3754c611ca43fe81c7eb62c4f4752" +content-hash = "d58fda334d4ffe3e0d4ad1752ff57134f99436d2f66839358ecdd0912a36ea73" diff --git a/pyproject.toml b/pyproject.toml index 2b3577a72..cbf77aeba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,15 +62,16 @@ types-setuptools = ">=65.6.0.3,<68.0.0.0" setuptools = ">=65.6.3,<68.0.0" [tool.poetry.group.docs.dependencies] -sphinx = ">4.0.0" -myst-parser = "^0.18.0" +sphinx = "^5.3.0" +myst-parser = "^1.0.0" sphinxcontrib-apidoc = "^0.3.0" sphinx-autodoc-typehints = "^1.17.1" +typing-extensions = "^4.5.0" [tool.poetry.group.flake8.dependencies] -flake8 = ">=4.0.1" # flakeheaven is incompatible with flake8 >=5.0 (https://github.com/flakeheaven/flakeheaven/issues/132) -flakeheaven = "^3.2.1" -pep8-naming = "^0.13.2" +flake8 = {version = ">=4.0.1", python = ">=3.8"} # flakeheaven is incompatible with flake8 >=5.0 (https://github.com/flakeheaven/flakeheaven/issues/132) +flakeheaven = {version = "^3.2.1", python = ">=3.8"} +pep8-naming = {version = "^0.13.2", python = ">=3.8"} [tool.poetry.extras] berkeleydb = ["berkeleydb"] diff --git a/tox.ini b/tox.ini index a5b058cb4..d2207cc69 100644 --- a/tox.ini +++ b/tox.ini @@ -54,7 +54,9 @@ passenv = setenv = PYTHONHASHSEED = 0 commands_pre = - poetry install --no-root --only=docs + poetry lock --check + poetry install --only=main --only=docs --extras=html + poetry env info commands = poetry run sphinx-build -T -W -b html -d {envdir}/doctree docs docs/_build/html From fe1a8f8a3e3d03c39552fedba44b59b13d23814e Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Tue, 21 Mar 2023 22:30:09 +0100 Subject: [PATCH 018/114] fix: add `__hash__` and `__eq__` back to `rdflib.paths.Path` (#2292) These methods were removed when `@total_ordering` was added, but `@total_ordering` does not add them, so removing them essentially removes functionality. This change adds the methods back and adds tests to ensure they work correctly. All path related tests are also moved into one file. - Closes . - Closes . --- rdflib/paths.py | 6 ++++ test/test_mulpath_n3.py | 8 ----- test/{test_paths_n3.py => test_path.py} | 46 ++++++++++++++++++++++++- 3 files changed, 51 insertions(+), 9 deletions(-) delete mode 100644 test/test_mulpath_n3.py rename test/{test_paths_n3.py => test_path.py} (58%) diff --git a/rdflib/paths.py b/rdflib/paths.py index defd0e750..6ca42d74c 100644 --- a/rdflib/paths.py +++ b/rdflib/paths.py @@ -229,6 +229,12 @@ def eval( ) -> Iterator[Tuple["_SubjectType", "_ObjectType"]]: raise NotImplementedError() + def __hash__(self): + return hash(repr(self)) + + def __eq__(self, other): + return repr(self) == repr(other) + def __lt__(self, other: Any) -> bool: if not isinstance(other, (Path, Node)): raise TypeError( diff --git a/test/test_mulpath_n3.py b/test/test_mulpath_n3.py deleted file mode 100644 index 418853611..000000000 --- a/test/test_mulpath_n3.py +++ /dev/null @@ -1,8 +0,0 @@ -from rdflib import URIRef -from rdflib.paths import ZeroOrMore - - -def test_mulpath_n3(): - uri = "http://example.com/foo" - n3 = (URIRef(uri) * ZeroOrMore).n3() - assert n3 == "<" + uri + ">*" diff --git a/test/test_paths_n3.py b/test/test_path.py similarity index 58% rename from test/test_paths_n3.py rename to test/test_path.py index b78347219..ad967849f 100644 --- a/test/test_paths_n3.py +++ b/test/test_path.py @@ -3,13 +3,15 @@ import pytest -from rdflib import RDF, RDFS, Graph +from rdflib import RDF, RDFS, Graph, URIRef +from rdflib.namespace import DCAT, DCTERMS from rdflib.paths import ( AlternativePath, InvPath, MulPath, NegatedPath, OneOrMore, + Path, SequencePath, ZeroOrMore, ZeroOrOne, @@ -71,3 +73,45 @@ def test_paths_n3( logging.debug("path = %s", path) assert path.n3() == no_nsm assert path.n3(nsm) == with_nsm + + +def test_mulpath_n3(): + uri = "http://example.com/foo" + n3 = (URIRef(uri) * ZeroOrMore).n3() + assert n3 == "<" + uri + ">*" + + +@pytest.mark.parametrize( + ["lhs", "rhs"], + [ + (DCTERMS.temporal / DCAT.endDate, DCTERMS.temporal / DCAT.endDate), + (SequencePath(DCTERMS.temporal, DCAT.endDate), DCTERMS.temporal / DCAT.endDate), + ], +) +def test_eq(lhs: Path, rhs: Path) -> None: + logging.debug("lhs = %s/%r, rhs = %s/%r", type(lhs), lhs, type(rhs), rhs) + assert lhs == rhs + + +@pytest.mark.parametrize( + ["lhs", "rhs"], + [ + (DCTERMS.temporal / DCAT.endDate, DCTERMS.temporal / DCAT.endDate), + (SequencePath(DCTERMS.temporal, DCAT.endDate), DCTERMS.temporal / DCAT.endDate), + ], +) +def test_hash(lhs: Path, rhs: Path) -> None: + logging.debug("lhs = %s/%r, rhs = %s/%r", type(lhs), lhs, type(rhs), rhs) + assert hash(lhs) == hash(rhs) + + +@pytest.mark.parametrize( + ["insert_path", "check_path"], + [ + (DCTERMS.temporal / DCAT.endDate, DCTERMS.temporal / DCAT.endDate), + (SequencePath(DCTERMS.temporal, DCAT.endDate), DCTERMS.temporal / DCAT.endDate), + ], +) +def test_dict_key(insert_path: Path, check_path: Path) -> None: + d = {insert_path: "foo"} + assert d[check_path] == "foo" From adf8eb2ec7de879fd4abb17f004796bd32ec8938 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Tue, 21 Mar 2023 22:31:11 +0100 Subject: [PATCH 019/114] fix: add the `wgs` namespace binding back (#2294) inadvertently removed the `wgs` prefix. This change adds it back. - Closes . --- rdflib/namespace/__init__.py | 3 ++- test/test_namespacemanager.py | 51 +++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index 5bfac7c64..a68526e19 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -365,7 +365,7 @@ class NamespaceManager(object): * binds all the namespaces shipped with RDFLib as DefinedNamespace instances * all the core namespaces and all the following: brick, csvw, dc, dcat * dcmitype, dcterms, dcam, doap, foaf, geo, odrl, org, prof, prov, qb, sdo - * sh, skos, sosa, ssn, time, vann, void + * sh, skos, sosa, ssn, time, vann, void, wgs * see the NAMESPACE_PREFIXES_RDFLIB object for the up-to-date list * none: * binds no namespaces to prefixes @@ -914,4 +914,5 @@ def get_longest_namespace(trie: Dict[str, Any], value: str) -> Optional[str]: "time": TIME, "vann": VANN, "void": VOID, + "wgs": WGS, } diff --git a/test/test_namespacemanager.py b/test/test_namespacemanager.py index 4d073b13d..d688834ef 100644 --- a/test/test_namespacemanager.py +++ b/test/test_namespacemanager.py @@ -172,6 +172,57 @@ def test_nman_bind_namespaces( check_graph_ns(graph, expected_result) +@pytest.mark.parametrize( + ["selector", "expected_bindings"], + [ + ( + "rdflib", + { + "brick": "https://brickschema.org/schema/Brick#", + "csvw": "http://www.w3.org/ns/csvw#", + "dc": "http://purl.org/dc/elements/1.1/", + "dcat": "http://www.w3.org/ns/dcat#", + "dcmitype": "http://purl.org/dc/dcmitype/", + "dcterms": "http://purl.org/dc/terms/", + "dcam": "http://purl.org/dc/dcam/", + "doap": "http://usefulinc.com/ns/doap#", + "foaf": "http://xmlns.com/foaf/0.1/", + "odrl": "http://www.w3.org/ns/odrl/2/", + "geo": "http://www.opengis.net/ont/geosparql#", + "org": "http://www.w3.org/ns/org#", + "owl": "http://www.w3.org/2002/07/owl#", + "prof": "http://www.w3.org/ns/dx/prof/", + "prov": "http://www.w3.org/ns/prov#", + "qb": "http://purl.org/linked-data/cube#", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "sdo": "https://schema.org/", + "sh": "http://www.w3.org/ns/shacl#", + "skos": "http://www.w3.org/2004/02/skos/core#", + "sosa": "http://www.w3.org/ns/sosa/", + "ssn": "http://www.w3.org/ns/ssn/", + "time": "http://www.w3.org/2006/time#", + "vann": "http://purl.org/vocab/vann/", + "void": "http://rdfs.org/ns/void#", + "wgs": "https://www.w3.org/2003/01/geo/wgs84_pos#", + "xsd": "http://www.w3.org/2001/XMLSchema#", + "xml": "http://www.w3.org/XML/1998/namespace", + }, + ) + ], +) +def test_bound_namespaces_subset( + selector: Any, expected_bindings: Dict[str, str] +) -> None: + graph = Graph(bind_namespaces=selector) + bound_namespaces = dict( + (key, str(value)) for key, value in graph.namespace_manager.namespaces() + ) + assert ( + expected_bindings.items() <= bound_namespaces.items() + ), f"missing items {expected_bindings.items() - bound_namespaces.items()}" + + def test_compute_qname_no_generate() -> None: g = Graph() # 'core' bind_namespaces (default) with pytest.raises(KeyError): From cfe6e378e6b0aff106f6baf3b5d82adbeb547236 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Tue, 21 Mar 2023 22:31:52 +0100 Subject: [PATCH 020/114] test: add `webtest` marker to tests that use the internet (#2295) This is being done so that it is easier for downstream packagers to run the test suite without requiring internet access. To run only tests that does not use the internet, run `pytest -m "not webtest"`. The validation workflow validates that test run without internet access by running the tests inside `firejail --net=none`. - Closes . --- .github/workflows/validate.yaml | 13 ++++++- Taskfile.yml | 7 +++- pyproject.toml | 4 ++ test/conftest.py | 39 ++++++++++++++++++- test/jsonld/test_onedotone.py | 4 ++ test/test_examples.py | 1 + test/test_extras/test_infixowl/test_basic.py | 3 ++ .../test_extras/test_infixowl/test_context.py | 1 + test/test_sparql/test_service.py | 10 +++++ tox.ini | 2 +- 10 files changed, 80 insertions(+), 4 deletions(-) diff --git a/.github/workflows/validate.yaml b/.github/workflows/validate.yaml index b496b8256..9f65e91a7 100644 --- a/.github/workflows/validate.yaml +++ b/.github/workflows/validate.yaml @@ -52,6 +52,10 @@ jobs: os: ubuntu-latest TOX_EXTRA_COMMAND: "flake8 --exit-zero rdflib" TOXENV_SUFFIX: "-docs" + PREPARATION: "sudo apt-get install -y firejail" + extensive-tests: true + TOX_TEST_HARNESS: "firejail --net=none --" + TOX_PYTEST_EXTRA_ARGS: "-m 'not webtest'" - python-version: "3.11" os: ubuntu-latest TOXENV_SUFFIX: "-docs" @@ -82,11 +86,15 @@ jobs: uses: arduino/setup-task@v1 with: repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Run preparation + if: ${{ matrix.PREPARATION }} + shell: bash + run: | + ${{ matrix.PREPARATION }} - name: Run validation shell: bash run: | task \ - TOX_EXTRA_COMMAND="${{ matrix.TOX_EXTRA_COMMAND }}" \ OS=${{ matrix.os }} \ MATRIX_SUFFIX=${{ matrix.suffix }} \ EXTENSIVE=${{ matrix.extensive-tests || 'false' }} \ @@ -96,6 +104,9 @@ jobs: gha:validate env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TOX_PYTEST_EXTRA_ARGS: ${{ matrix.TOX_PYTEST_EXTRA_ARGS }} + TOX_TEST_HARNESS: ${{ matrix.TOX_TEST_HARNESS }} + TOX_EXTRA_COMMAND: ${{ matrix.TOX_EXTRA_COMMAND }} - uses: actions/upload-artifact@v3 if: ${{ (success() || failure()) }} with: diff --git a/Taskfile.yml b/Taskfile.yml index feb7624c2..b2febc570 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -98,7 +98,6 @@ tasks: - echo "TOXENV=${TOXENV}" - | {{if .TOX_PYTEST_ARGS}}TOX_PYTEST_ARGS={{shellQuote .TOX_PYTEST_ARGS}}{{end}} \ - {{if .TOX_EXTRA_COMMAND}}TOX_EXTRA_COMMAND={{shellQuote .TOX_EXTRA_COMMAND}}{{end}} \ {{if .TOX_JUNIT_XML_PREFIX}}TOX_JUNIT_XML_PREFIX={{shellQuote .TOX_JUNIT_XML_PREFIX}}{{end}} \ {{if .COVERAGE_FILE}}COVERAGE_FILE={{shellQuote .COVERAGE_FILE}}{{end}} \ {{.TEST_HARNESS}} \ @@ -359,6 +358,12 @@ tasks: poetry run mypy --show-error-context --show-error-codes -p rdflib poetry run sphinx-build -T -W -b html -d docs/_build/doctree docs docs/_build/html poetry run pytest + + test:no_internet: + desc: Run tests without internet access + cmds: + - | + {{.TEST_HARNESS}}{{.RUN_PREFIX}} firejail --net=none -- pytest -m "not webtest" {{.CLI_ARGS}} _rimraf: # This task is a utility task for recursively removing directories, it is # similar to rm -rf but not identical and it should work wherever there is diff --git a/pyproject.toml b/pyproject.toml index cbf77aeba..ddbe2700e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -156,6 +156,7 @@ addopts = [ "--ignore=rdflib/extras/external_graph_libs.py", "--ignore-glob=docs/*.py", "--doctest-glob=docs/*.rst", + "--strict-markers", ] doctest_optionflags = "ALLOW_UNICODE" filterwarnings = [ @@ -164,6 +165,9 @@ filterwarnings = [ # The below warning is a consequence of how pytest detects fixtures and how DefinedNamespace behaves when an undefined attribute is being accessed. "ignore:Code. _pytestfixturefunction is not defined in namespace .*:UserWarning", ] +markers = [ + "webtest: mark a test as using the internet", +] # log_cli = true # log_cli_level = "DEBUG" log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(name)-12s %(filename)s:%(lineno)s:%(funcName)s %(message)s" diff --git a/test/conftest.py b/test/conftest.py index 98fe47385..2f61c9fe3 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -5,10 +5,19 @@ pytest.register_assert_rewrite("test.utils") +from pathlib import Path # noqa: E402 from test.utils.audit import AuditHookDispatcher # noqa: E402 from test.utils.http import ctx_http_server # noqa: E402 from test.utils.httpfileserver import HTTPFileServer # noqa: E402 -from typing import Generator, Optional # noqa: E402 +from typing import ( # noqa: E402 + Collection, + Dict, + Generator, + Iterable, + Optional, + Tuple, + Union, +) from rdflib import Graph @@ -67,3 +76,31 @@ def audit_hook_dispatcher() -> Generator[Optional[AuditHookDispatcher], None, No def exit_stack() -> Generator[ExitStack, None, None]: with ExitStack() as stack: yield stack + + +EXTRA_MARKERS: Dict[ + Tuple[Optional[str], str], Collection[Union[pytest.MarkDecorator, str]] +] = { + ("rdflib/__init__.py", "rdflib"): [pytest.mark.webtest], + ("rdflib/term.py", "rdflib.term.Literal.normalize"): [pytest.mark.webtest], + ("rdflib/extras/infixowl.py", "rdflib.extras.infixowl"): [pytest.mark.webtest], +} + + +PROJECT_ROOT = Path(__file__).parent.parent + + +@pytest.hookimpl(tryfirst=True) +def pytest_collection_modifyitems(items: Iterable[pytest.Item]): + for item in items: + parent_name = ( + str(Path(item.parent.module.__file__).relative_to(PROJECT_ROOT)) + if item.parent is not None + and isinstance(item.parent, pytest.Module) + and item.parent.module is not None + else None + ) + if (parent_name, item.name) in EXTRA_MARKERS: + extra_markers = EXTRA_MARKERS[(parent_name, item.name)] + for extra_marker in extra_markers: + item.add_marker(extra_marker) diff --git a/test/jsonld/test_onedotone.py b/test/jsonld/test_onedotone.py index bfb30ef8e..4c555d1ec 100644 --- a/test/jsonld/test_onedotone.py +++ b/test/jsonld/test_onedotone.py @@ -231,6 +231,10 @@ def global_state(): chdir(old_cwd) +@pytest.mark.webtest +# TODO: apply webtest marker to individual tests +# Marking this whole function as webtest is too broad, as many tests don't +# require the web, but making it narrower requires more refactoring. @pytest.mark.parametrize( "rdf_test_uri, func, suite_base, cat, num, inputpath, expectedpath, context, options", get_test_suite_cases(), diff --git a/test/test_examples.py b/test/test_examples.py index d21d7cc00..9a85de6e2 100644 --- a/test/test_examples.py +++ b/test/test_examples.py @@ -19,6 +19,7 @@ def generate_example_cases() -> Iterable[ParameterSet]: yield pytest.param(example_file, id=f"{example_file.relative_to(EXAMPLES_DIR)}") +@pytest.mark.webtest @pytest.mark.parametrize(["example_file"], generate_example_cases()) def test_example(example_file: Path) -> None: """ diff --git a/test/test_extras/test_infixowl/test_basic.py b/test/test_extras/test_infixowl/test_basic.py index 139238ba8..af9545499 100644 --- a/test/test_extras/test_infixowl/test_basic.py +++ b/test/test_extras/test_infixowl/test_basic.py @@ -1,5 +1,7 @@ from test.data import context0 +import pytest + from rdflib import OWL, Graph, Literal, Namespace from rdflib.extras.infixowl import ( Class, @@ -79,6 +81,7 @@ def test_infixowl_serialization(): ) +@pytest.mark.webtest def test_infix_owl_example1(): g = Graph(identifier=context0) g.bind("ex", EXNS) diff --git a/test/test_extras/test_infixowl/test_context.py b/test/test_extras/test_infixowl/test_context.py index 927785b27..50365ee32 100644 --- a/test/test_extras/test_infixowl/test_context.py +++ b/test/test_extras/test_infixowl/test_context.py @@ -28,6 +28,7 @@ def graph(): del g +@pytest.mark.webtest def test_context(graph): # Now we have an empty graph, we can construct OWL classes in it # using the Python classes defined in this module diff --git a/test/test_sparql/test_service.py b/test/test_sparql/test_service.py index 284565f7e..d83ac32e0 100644 --- a/test/test_sparql/test_service.py +++ b/test/test_sparql/test_service.py @@ -25,6 +25,7 @@ from rdflib.term import BNode, Identifier +@pytest.mark.webtest def test_service(): g = Graph() q = """select ?sameAs ?dbpComment @@ -47,6 +48,7 @@ def test_service(): assert len(r) == 2 +@pytest.mark.webtest def test_service_with_bind(): g = Graph() q = """select ?sameAs ?dbpComment ?subject @@ -69,6 +71,7 @@ def test_service_with_bind(): assert len(r) == 3 +@pytest.mark.webtest def test_service_with_bound_solutions(): g = Graph() g.update( @@ -104,6 +107,7 @@ def test_service_with_bound_solutions(): assert len(r) == 3 +@pytest.mark.webtest def test_service_with_values(): g = Graph() q = """select ?sameAs ?dbpComment ?subject @@ -126,6 +130,7 @@ def test_service_with_values(): assert len(r) == 3 +@pytest.mark.webtest def test_service_with_implicit_select(): g = Graph() q = """select ?s ?p ?o @@ -142,6 +147,7 @@ def test_service_with_implicit_select(): assert len(r) == 3 +@pytest.mark.webtest def test_service_with_implicit_select_and_prefix(): g = Graph() q = """prefix ex: @@ -159,6 +165,7 @@ def test_service_with_implicit_select_and_prefix(): assert len(r) == 3 +@pytest.mark.webtest def test_service_with_implicit_select_and_base(): g = Graph() q = """base @@ -176,6 +183,7 @@ def test_service_with_implicit_select_and_base(): assert len(r) == 3 +@pytest.mark.webtest def test_service_with_implicit_select_and_allcaps(): g = Graph() q = """SELECT ?s @@ -199,6 +207,7 @@ def freeze_bindings( return frozenset(result) +@pytest.mark.webtest def test_simple_not_null(): """Test service returns simple literals not as NULL. @@ -216,6 +225,7 @@ def test_simple_not_null(): assert results.bindings[0].get(Variable("o")) == Literal("c") +@pytest.mark.webtest def test_service_node_types(): """Test if SERVICE properly returns different types of nodes: - URI; diff --git a/tox.ini b/tox.ini index d2207cc69..d2ecc891a 100644 --- a/tox.ini +++ b/tox.ini @@ -24,7 +24,7 @@ commands_pre = commands = {env:TOX_EXTRA_COMMAND:} {env:TOX_MYPY_COMMAND:poetry run python -m mypy --show-error-context --show-error-codes --junit-xml=test_reports/{env:TOX_JUNIT_XML_PREFIX:}mypy-junit.xml} - {posargs:poetry run pytest -ra --tb=native {env:TOX_PYTEST_ARGS:--junit-xml=test_reports/{env:TOX_JUNIT_XML_PREFIX:}pytest-junit.xml --cov --cov-report=}} + {posargs:poetry run {env:TOX_TEST_HARNESS:} pytest -ra --tb=native {env:TOX_PYTEST_ARGS:--junit-xml=test_reports/{env:TOX_JUNIT_XML_PREFIX:}pytest-junit.xml --cov --cov-report=} {env:TOX_PYTEST_EXTRA_ARGS:}} docs: poetry run sphinx-build -T -W -b html -d {envdir}/doctree docs docs/_build/html [testenv:covreport] From dfa40545adc3e701bc36d2f8cc4dba1d81a906c4 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Thu, 23 Mar 2023 18:48:14 +0100 Subject: [PATCH 021/114] fix: IRI to URI conversion (#2304) The URI to IRI conversion was percentage-quoting characters that should not have been quoted, like equals in the query string. It was also not quoting things that should have been quoted, like the username and password components of a URI. This change improves the conversion by only quoting characters that are not allowed in specific parts of the URI and quoting previously unquoted components. The safe characters for each segment are taken from [RFC3986](https://datatracker.ietf.org/doc/html/rfc3986). The new behavior is heavily inspired by [`werkzeug.urls.iri_to_uri`](https://github.com/pallets/werkzeug/blob/92c6380248c7272ee668e1f8bbd80447027ccce2/src/werkzeug/urls.py#L926-L931) though there are some differences. - Closes . --- rdflib/util.py | 90 +++++++++++++++++++++++++----- test/test_graph/test_graph_http.py | 43 ++++++++++++-- test/test_util.py | 18 ++++++ test/utils/http.py | 9 +++ test/utils/wildcard.py | 28 ++++++++++ 5 files changed, 169 insertions(+), 19 deletions(-) create mode 100644 test/utils/wildcard.py diff --git a/rdflib/util.py b/rdflib/util.py index 4485de2e0..2442b3728 100644 --- a/rdflib/util.py +++ b/rdflib/util.py @@ -522,32 +522,92 @@ def _coalesce( return default +_RFC3986_SUBDELIMS = "!$&'()*+,;=" +""" +``sub-delims`` production from `RFC 3986, section 2.2 +`_. +""" + +_RFC3986_PCHAR_NU = "%" + _RFC3986_SUBDELIMS + ":@" +""" +The non-unreserved characters in the ``pchar`` production from RFC 3986. +""" + +_QUERY_SAFE_CHARS = _RFC3986_PCHAR_NU + "/?" +""" +The non-unreserved characters that are safe to use in in the query and fragment +components. + +.. code-block:: + + pchar = unreserved / pct-encoded / sub-delims / ":" / "@" query + = *( pchar / "/" / "?" ) fragment = *( pchar / "/" / "?" ) +""" + +_USERNAME_SAFE_CHARS = _RFC3986_SUBDELIMS + "%" +""" +The non-unreserved characters that are safe to use in the username and password +components. + +.. code-block:: + + userinfo = *( unreserved / pct-encoded / sub-delims / ":" ) + +":" is excluded as this is only used for the username and password components, +and they are treated separately. +""" + +_PATH_SAFE_CHARS = _RFC3986_PCHAR_NU + "/" +""" +The non-unreserved characters that are safe to use in the path component. + + +This is based on various path-related productions from RFC 3986. +""" + + def _iri2uri(iri: str) -> str: """ - Convert an IRI to a URI (Python 3). - https://stackoverflow.com/a/42309027 - https://stackoverflow.com/a/40654295 - netloc should be encoded using IDNA; - non-ascii URL path should be encoded to UTF-8 and then percent-escaped; - non-ascii query parameters should be encoded to the encoding of a page - URL was extracted from (or to the encoding server uses), then - percent-escaped. + Prior art: + + * `iri_to_uri from Werkzeug `_ + >>> _iri2uri("https://dbpedia.org/resource/Almería") 'https://dbpedia.org/resource/Almer%C3%ADa' """ + # https://datatracker.ietf.org/doc/html/rfc3986 # https://datatracker.ietf.org/doc/html/rfc3305 - (scheme, netloc, path, query, fragment) = urlsplit(iri) + parts = urlsplit(iri) + (scheme, netloc, path, query, fragment) = parts - # Just support http/https, otherwise return the iri unmolested + # Just support http/https, otherwise return the iri unaltered if scheme not in ["http", "https"]: return iri - scheme = quote(scheme) - netloc = netloc.encode("idna").decode("utf-8") - path = quote(path) - query = quote(query) - fragment = quote(fragment) + path = quote(path, safe=_PATH_SAFE_CHARS) + query = quote(query, safe=_QUERY_SAFE_CHARS) + fragment = quote(fragment, safe=_QUERY_SAFE_CHARS) + + if parts.hostname: + netloc = parts.hostname.encode("idna").decode("ascii") + else: + netloc = "" + + if ":" in netloc: + # Quote IPv6 addresses + netloc = f"[{netloc}]" + + if parts.port: + netloc = f"{netloc}:{parts.port}" + + if parts.username: + auth = quote(parts.username, safe=_USERNAME_SAFE_CHARS) + if parts.password: + pass_quoted = quote(parts.password, safe=_USERNAME_SAFE_CHARS) + auth = f"{auth}:{pass_quoted}" + netloc = f"{auth}@{netloc}" + uri = urlunsplit((scheme, netloc, path, query, fragment)) if iri.endswith("#") and not uri.endswith("#"): diff --git a/test/test_graph/test_graph_http.py b/test/test_graph/test_graph_http.py index 762e3d5b3..97c64c3ac 100644 --- a/test/test_graph/test_graph_http.py +++ b/test/test_graph/test_graph_http.py @@ -1,14 +1,20 @@ +import logging import re from http.server import BaseHTTPRequestHandler from test.data import TEST_DATA_DIR from test.utils import GraphHelper from test.utils.graph import cached_graph -from test.utils.http import ctx_http_handler +from test.utils.http import ( + MOCK_HTTP_REQUEST_WILDCARD, + MockHTTPRequest, + ctx_http_handler, +) from test.utils.httpservermock import ( MethodName, MockHTTPResponse, ServedBaseHTTPServerMock, ) +from test.utils.wildcard import URL_PARSE_RESULT_WILDCARD from urllib.error import HTTPError import pytest @@ -235,7 +241,34 @@ def test_5xx(self): assert raised.value.code == 500 -def test_iri_source(function_httpmock: ServedBaseHTTPServerMock) -> None: +@pytest.mark.parametrize( + ["url_suffix", "expected_request"], + [ + ( + "/resource/Almería", + MOCK_HTTP_REQUEST_WILDCARD._replace( + path="/resource/Almer%C3%ADa", + parsed_path=URL_PARSE_RESULT_WILDCARD._replace( + path="/resource/Almer%C3%ADa" + ), + ), + ), + ( + "/resource/Almería?foo=bar", + MOCK_HTTP_REQUEST_WILDCARD._replace( + parsed_path=URL_PARSE_RESULT_WILDCARD._replace( + path="/resource/Almer%C3%ADa" + ), + path_query={"foo": ["bar"]}, + ), + ), + ], +) +def test_iri_source( + url_suffix: str, + expected_request: MockHTTPRequest, + function_httpmock: ServedBaseHTTPServerMock, +) -> None: diverse_triples_path = TEST_DATA_DIR / "variants/diverse_triples.ttl" function_httpmock.responses[MethodName.GET].append( @@ -247,9 +280,11 @@ def test_iri_source(function_httpmock: ServedBaseHTTPServerMock) -> None: ) ) g = Graph() - g.parse(f"{function_httpmock.url}/resource/Almería") + g.parse(f"{function_httpmock.url}{url_suffix}") assert function_httpmock.call_count == 1 GraphHelper.assert_triple_sets_equals(cached_graph((diverse_triples_path,)), g) + assert len(g) > 1 req = function_httpmock.requests[MethodName.GET].pop(0) - assert req.path == "/resource/Almer%C3%ADa" + logging.debug("req = %s", req) + assert expected_request == req diff --git a/test/test_util.py b/test/test_util.py index 3e60bbb86..c842bc928 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -635,6 +635,24 @@ def test_get_tree( "http://example.com:1231/", }, ), + ( + "http://example.com:1231/a=b", + { + "http://example.com:1231/a=b", + }, + ), + ( + "http://aé:aé@example.com:1231/bé/a=bé&c=d#a=bé&c=d", + { + "http://a%C3%A9:a%C3%A9@example.com:1231/b%C3%A9/a=b%C3%A9&c=d#a=b%C3%A9&c=d", + }, + ), + ( + "http://a%C3%A9:a%C3%A9@example.com:1231/b%C3%A9/a=b%C3%A9&c=d#a=b%C3%A9&c=d", + { + "http://a%C3%A9:a%C3%A9@example.com:1231/b%C3%A9/a=b%C3%A9&c=d#a=b%C3%A9&c=d", + }, + ), ], ) def test_iri2uri(iri: str, expected_result: Union[Set[str], Type[Exception]]) -> None: diff --git a/test/utils/http.py b/test/utils/http.py index af72e0157..fa13a2ed9 100644 --- a/test/utils/http.py +++ b/test/utils/http.py @@ -4,6 +4,7 @@ import random from contextlib import contextmanager from http.server import BaseHTTPRequestHandler, HTTPServer +from test.utils.wildcard import EQ_WILDCARD from threading import Thread from typing import ( Dict, @@ -62,6 +63,14 @@ class MockHTTPRequest(NamedTuple): body: Optional[bytes] +MOCK_HTTP_REQUEST_WILDCARD = MockHTTPRequest( + EQ_WILDCARD, EQ_WILDCARD, EQ_WILDCARD, EQ_WILDCARD, EQ_WILDCARD, EQ_WILDCARD +) +""" +This object should be equal to any `MockHTTPRequest` object. +""" + + class MockHTTPResponse(NamedTuple): status_code: int reason_phrase: str diff --git a/test/utils/wildcard.py b/test/utils/wildcard.py new file mode 100644 index 000000000..7444a24bd --- /dev/null +++ b/test/utils/wildcard.py @@ -0,0 +1,28 @@ +from typing import Any +from urllib.parse import ParseResult + + +class EqWildcard: + """ + An object that matches anything. + """ + + def __eq__(self, other: Any) -> Any: + return True + + def __req__(self, other: Any) -> Any: + return True + + def __repr__(self) -> str: + return "EqWildcard()" + + +EQ_WILDCARD: Any = EqWildcard() + + +URL_PARSE_RESULT_WILDCARD = ParseResult( + EQ_WILDCARD, EQ_WILDCARD, EQ_WILDCARD, EQ_WILDCARD, EQ_WILDCARD, EQ_WILDCARD +) +""" +This should be equal to any `ParseResult` object. +""" From 1ab4fc0533d8042e06d7201d7d80d2f41cda83d7 Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Thu, 23 Mar 2023 17:49:52 +0000 Subject: [PATCH 022/114] chore: Update CONTRIBUTORS from commit history (#2305) This ensures contributors are credited. Also added .mailmap to fix early misattributed contributions. --- .mailmap | 101 +++++++++++++++++++++++++++++++++++++ CONTRIBUTORS | 138 ++++++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 232 insertions(+), 7 deletions(-) create mode 100644 .mailmap diff --git a/.mailmap b/.mailmap new file mode 100644 index 000000000..d953d228f --- /dev/null +++ b/.mailmap @@ -0,0 +1,101 @@ +Aayush Gupta <32807623+aayush17002@users.noreply.github.com> aayush17002 <32807623+aayush17002@users.noreply.github.com> +Aditi Sejal <55681645+asejal@users.noreply.github.com> asejal <55681645+asejal@users.noreply.github.com> +Aditya Bhadoo <44553897+bhadoo-aditya@users.noreply.github.com> bhadoo-aditya <44553897+bhadoo-aditya@users.noreply.github.com> +Akash Sharma Akash-Sharma-1 +Artem Revenko artreven +Ashley Sommer Ashley Sommer +Ashley Sommer Ashley Sommer +Arushi Chauhan arushi019 +Bruno Cuconato bruno cuconato +Christian Amsüss chrysn +Cliff Xuan cliff xuan +Camille Maumet cmaumet +Chimezie Ogbuji chimezie +Conrad Leonard delocalizer +Cory Dodt corydodt +Daniel Krech convert-repo +Daniel Krech eikeon +Daniel Krech eikeon@eikeon.com +Daniel Krech testing +Daniel Krech unknown +Dmitriy Bastrak <68817666+DBastrak@users.noreply.github.com> Dmitriy <68817666+DBastrak@users.noreply.github.com> +Drew Perttula drewp +Drew Perttula drewpca +Ed Summers ed.summers +Edmond Chuc Edmond +Edmond Chuc Edmond Chuc <37032744+edmondchuc@users.noreply.github.com> +Edmond Chuc Edmond Chuc +Edmond Chuc Edmond Chuc +Elias Torres eliast +Filip Kovacevic Filip Kovacevic +Fredrik Aschehoug <15358786+fredrik-aschehoug@users.noreply.github.com> fredrik-aschehoug <15358786+fredrik-aschehoug@users.noreply.github.com> +Gerhard Weis Gerhard Weis +Gerhard Weis Gerhard Weis +Graham Higgins gjh +Graham Higgins gjhiggins +Graham Higgins Graham Higgins +Graham Higgins Graham Higgins +Graham Higgins Graham Higgins +Gunnar Aastrand Grimnes gromgull +Gunnar Aastrand Grimnes gromgull +Gunnar Aastrand Grimnes Gunnar Aastrand Grimnes +Harold Solbrig hsolbrig +Harold Solbrig hsolbrig +Hanno Jung angus +Jeroen van der Ham jeroenh +Jerven Bolleman Jerven bolleman +Jim Man jimman2003 +Jamie McCusker Jim McCusker +Jamie McCusker Ubuntu +John L. Clark John.L.Clark +Jörn Hees Jörn Hees +Jörn Hees Joern Hees +Jörn Hees Jörn Hees +Jörn Hees Joern Hees +Jörn Hees Jörn Hees +Josh Moore jmoore +Kern Cece kernc +Kushagr Arora kushagr08 <35035965+kushagr08@users.noreply.github.com> +Łukasz Jancewicz DzinX +Mark Hedley marqh +Mark van der Pas gitmpje <61799691+gitmpje@users.noreply.github.com> +Mark Watts Mark Watts +Martin van der Werff Martin van der Werff +Maurizio Nagni kusamau +Michel Pelletier michel +Mikael Nilsson mikael +Nathan Maynes Nathan M +Nicholas J. Car Nicholas Car +Nicholas J. Car Nicholas Car +Nicholas J. Car nicholascar +Niklas Lindström lindstream +Niklas Lindström Niklas Lindstrom +Olivier Grisel ogrisel +Filip Kovacevic GreenfishK +Ralf Grubenmann Ralf Grubenmann +Remi Chateauneu Primhill Computers +Richard Wallis dataliberate +Ritam Biswas devrb +Rory McCann Rory McCann +Rouzbeh Asghari rozbeh +Sean Fern gsf747 +Shawn Brown shawnb +Shreyas Nagare shreyasnagare +Stephen Thorne stephen.thorne@gmail.com +Thomas Bettler t0b3 +Thomas Bettler t0b3 +Thomas Köner-Daikan wssbck +Thomas Kluyver takowl@gmail.com +Thomas Tanon Tpt +Thomas Tanon Thomas Tanon +Timo Homburg Timo +Tom Baker tombaker +Veyndan Stuart veyndan +Vigten Stain vigten +Wataru Haruna eggplants +William Waites William Waites +William Waites wwaites +William Waites ww@epsilon.styx.org +Whit Morriss whit +Zach Lûster kernc +Zach Lûster Kernc \ No newline at end of file diff --git a/CONTRIBUTORS b/CONTRIBUTORS index acd2ccb01..506bda0e6 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -1,57 +1,181 @@ # See https://github.com/RDFLib/rdflib/graphs/contributors +Aaron Coburn Aaron Swartz -Andrew Eland +Aayush Gupta +Abhishek Vyas +Adam Ever Hadani +Aditi Sejal +Aditya Bhadoo +Akash Sharma +Alessandro Amici Alex Nelson +Alexander Dutton +Amade Nemes +Amit Beka +Anatoly Scherbakov +Andrew Eland Andrew Kuchling -Ashley Sommer +Anton Lodder +Anubhav Chaudhary +Arnoud Hilhorst +Artem Revenko +Arushi Chauhan Arve Knudsen +Ashley Sommer +Axel Nennker Benjamin Cogrel +Benoit Seguin +Bernhard Schandl +Bertrand Croq +Blake Regalia Boris Pelakh +Bruno Cuconato +Bruno P. Kinoshita +Camille Maumet Chimezie Ogbuji Chris Markiewicz Chris Mungall +Christian Amsüss +Christian Clauss +Cliff Xuan +Conrad Leonard +Cory Dodt Dan Scott Daniel Krech +Dann Martens +Darren Garvey +Dave Challis David H Jones +David Steinberg +Debabrata Deka +Diego Quintana +Dmitriy Bastrak Don Bowman Donny Winston Drew Perttula +Ed Summers Edmond Chuc Elias Torres +Elliot Ford +Enrico Bacis +Eric Peden +Filip Kovacevic Florian Ludwig +Fredrik Aschehoug Gabe Fierro Gerhard Weis +Graeme Stuart Graham Higgins Graham Klyne Gunnar AAstrand Grimnes +Hanno Jung Harold Solbrig Ivan Herman Iwan Aucamp Jamie McCusker +Jaimie Murdock +James Michael DuPont +Janus Troelsen +Jedrzej Potoniec Jeroen van der Ham +Jerome Dockes Jerven Bolleman -Joern Hees +Jim Man +Jodi Schneider +John L. Clark +Jon Michaelchuck +Jon Stroop +Josh Moore Juan José González +Justin Clark-Casey +Jörn Hees +Karthikeyan Singaravelan +Kempei Igarashi Kendall Clark +Kris Maglione +Kushagr Arora +Laurence Rowe Leandro López +Linus Lewandowski Lucio Torre +Łukasz Jancewicz +Marat Charlaganov +Mark Amery +Mark Hedley Mark Watts +Mark van der Pas +Martin Wendt +Martin van der Werff +Matthias Urban +Maurizio Nagni +Maxim Kolchin Michel Pelletier -Natanael Arndt +Miel Vander Sande +Mikael Nilsson +Minor Gordon Nacho Barrientos Arias +Natanael Arndt +Nate Prewitt +Nathan Maynes Nicholas J. Car Niklas Lindström -Pierre-Antoine Champin +Nolan Nichols +Olivier Bornet +Olivier Grisel +Osma Suominen +Otto Kruse +Paul Tremberth +Peter Cock Phil Dawes +Philippe Luickx Phillip Pearson -Ron Alford +Pierre-Antoine Champin +Ralf Grubenmann Remi Chateauneu +Richard Wallis +Rinke Hoekstra +Ritam Biswas +Robert Hall +Ron Alford +Rory McCann +Rouzbeh Asghari +Sam Thursfield +Sascha Peilicke +Satrajit Ghosh +Sean Fern +Sebastian Schuberth +Sebastian Trueg +Shawn Brown +Shreyas Nagare Sidnei da Silva Simon McVittie +Sina Ahmadi Stefan Niederhauser +Stephen Thorne +Steve Leak Stig B. Dørmænen -Tom Gillespie +Thomas Bettler Thomas Kluyver +Thomas Köner-Daikan +Thomas Tanon +Tim Gates +Timo Homburg +Tom Baker +Tom Gillespie +Tom Mitchell +Tomáš Hrnčiar +Tony Fast +Troy Sincomb Urs Holzer +Valentin Grouès +Veyndan Stuart +Vigten Stain +Wataru Haruna +Wes Turner +Whit Morriss William Waites +Wim Muskee +Yaroslav Halchenko +Yves-Marie Haussonne +Zach Lûster From 832e6931af6be32f71993088d70ae93a5cb9cb05 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 24 Mar 2023 19:17:16 +0100 Subject: [PATCH 023/114] fix: JSON-LD context construction from a `dict` (#2306) A variable was only being initialized for string-valued inputs, but if a `dict` input was passed the variable would still be accessed, resulting in a `UnboundLocalError`. This change initializes the variable always, instead of only when string-valued input is used to construct a JSON-LD context. - Closes . --- rdflib/plugins/shared/jsonld/context.py | 2 +- test/jsonld/test_context.py | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/rdflib/plugins/shared/jsonld/context.py b/rdflib/plugins/shared/jsonld/context.py index 23ab6db23..d0224d4a4 100644 --- a/rdflib/plugins/shared/jsonld/context.py +++ b/rdflib/plugins/shared/jsonld/context.py @@ -421,13 +421,13 @@ def _prep_sources( ): for source in inputs: source_url = in_source_url + new_base = base if isinstance(source, str): source_url = source source_doc_base = base or self.doc_base new_ctx = self._fetch_context( source, source_doc_base, referenced_contexts ) - new_base = base if new_ctx is None: continue else: diff --git a/test/jsonld/test_context.py b/test/jsonld/test_context.py index b7628fb3e..6578268a2 100644 --- a/test/jsonld/test_context.py +++ b/test/jsonld/test_context.py @@ -3,6 +3,7 @@ """ from functools import wraps +from pathlib import Path from typing import Any, Dict from rdflib.plugins.shared.jsonld import context, errors @@ -213,3 +214,23 @@ def test_invalid_remote_context(): ctx_url = "http://example.org/recursive.jsonld" SOURCES[ctx_url] = {"key": "value"} ctx = Context(ctx_url) + + +def test_file_source(tmp_path: Path) -> None: + """ + A file URI source to `Context` gets processed correctly. + """ + file = tmp_path / "context.jsonld" + file.write_text(r"""{ "@context": { "ex": "http://example.com/" } }""") + ctx = Context(source=file.as_uri()) + assert "http://example.com/" == ctx.terms["ex"].id + + +def test_dict_source(tmp_path: Path) -> None: + """ + A dictionary source to `Context` gets processed correctly. + """ + file = tmp_path / "context.jsonld" + file.write_text(r"""{ "@context": { "ex": "http://example.com/" } }""") + ctx = Context(source=[{"@context": file.as_uri()}]) + assert "http://example.com/" == ctx.terms["ex"].id From 47e6c37f9a0a1930447f120902ff276aa7fe48e8 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sat, 25 Mar 2023 18:45:37 +0100 Subject: [PATCH 024/114] test: remove xfail on SPARQL DESCRIBE test (#2309) SPARQL DESCRIBE was implemented in so there should be no `xfail` on it tests for it. This change removes one such `xfail` that was in place. --- test/test_w3c_spec/test_sparql10_w3c.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/test/test_w3c_spec/test_sparql10_w3c.py b/test/test_w3c_spec/test_sparql10_w3c.py index 1de6daa4d..73d06d89b 100644 --- a/test/test_w3c_spec/test_sparql10_w3c.py +++ b/test/test_w3c_spec/test_sparql10_w3c.py @@ -48,9 +48,6 @@ f"{REMOTE_BASE_IRI}syntax-sparql1/manifest#syntax-lit-08": pytest.mark.skip( reason="bad test, positive syntax has invalid syntax." ), - f"{REMOTE_BASE_IRI}syntax-sparql2/manifest#syntax-form-describe01": pytest.mark.xfail( - reason="Describe not supported." - ), f"{REMOTE_BASE_IRI}syntax-sparql2/manifest#syntax-general-08": pytest.mark.xfail( reason="Not parsing with no spaces." ), From 3faa01bf169166afa68be8084f16b537d850a070 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sat, 25 Mar 2023 18:46:05 +0100 Subject: [PATCH 025/114] fix: change the prefix for `https://schema.org/` back to `schema` (#2312) The default prefix for `https://schema.org/` registered with `rdflib.namespace.NamespaceManager` was inadvertently changed to `sdo` in 6.2.0, this however constitutes a breaking change, as code that was using the `schema` prefix would no longer have the same behaviour. This change changes the prefix back to `schema`. --- rdflib/namespace/__init__.py | 6 +++--- test/{ => test_namespace}/test_namespacemanager.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) rename test/{ => test_namespace}/test_namespacemanager.py (98%) diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index a68526e19..fb6d845b0 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -364,8 +364,8 @@ class NamespaceManager(object): * rdflib: * binds all the namespaces shipped with RDFLib as DefinedNamespace instances * all the core namespaces and all the following: brick, csvw, dc, dcat - * dcmitype, dcterms, dcam, doap, foaf, geo, odrl, org, prof, prov, qb, sdo - * sh, skos, sosa, ssn, time, vann, void, wgs + * dcmitype, dcterms, dcam, doap, foaf, geo, odrl, org, prof, prov, qb, schema + * sh, skos, sosa, ssn, time, vann, void * see the NAMESPACE_PREFIXES_RDFLIB object for the up-to-date list * none: * binds no namespaces to prefixes @@ -906,7 +906,7 @@ def get_longest_namespace(trie: Dict[str, Any], value: str) -> Optional[str]: "prof": PROF, "prov": PROV, "qb": QB, - "sdo": SDO, + "schema": SDO, "sh": SH, "skos": SKOS, "sosa": SOSA, diff --git a/test/test_namespacemanager.py b/test/test_namespace/test_namespacemanager.py similarity index 98% rename from test/test_namespacemanager.py rename to test/test_namespace/test_namespacemanager.py index d688834ef..d79f04194 100644 --- a/test/test_namespacemanager.py +++ b/test/test_namespace/test_namespacemanager.py @@ -17,8 +17,8 @@ sys.path.append(str(Path(__file__).parent.parent.absolute())) -from rdflib import Graph -from rdflib.namespace import ( +from rdflib import Graph # noqa: E402 +from rdflib.namespace import ( # noqa: E402 _NAMESPACE_PREFIXES_CORE, _NAMESPACE_PREFIXES_RDFLIB, OWL, @@ -196,7 +196,7 @@ def test_nman_bind_namespaces( "qb": "http://purl.org/linked-data/cube#", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "rdfs": "http://www.w3.org/2000/01/rdf-schema#", - "sdo": "https://schema.org/", + "schema": "https://schema.org/", "sh": "http://www.w3.org/ns/shacl#", "skos": "http://www.w3.org/2004/02/skos/core#", "sosa": "http://www.w3.org/ns/sosa/", From d7883eb494673d0fb09efa3bced983ebb48b6961 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sat, 25 Mar 2023 18:47:20 +0100 Subject: [PATCH 026/114] fix: Add `to_dict` method to the JSON-LD `Context` class. (#2310) `Context.to_dict` is used in JSON-LD serialization, but it was not implemented. This change adds the method. - Closes . --------- Co-authored-by: Marc-Antoine Parent --- rdflib/plugins/shared/jsonld/context.py | 37 +++++++++++++- test/jsonld/test_context.py | 51 +++++++++++++++++++ .../test_serializer_jsonld.py | 44 ++++++++++++++++ 3 files changed, 130 insertions(+), 2 deletions(-) create mode 100644 test/test_serializers/test_serializer_jsonld.py diff --git a/rdflib/plugins/shared/jsonld/context.py b/rdflib/plugins/shared/jsonld/context.py index d0224d4a4..b19f66737 100644 --- a/rdflib/plugins/shared/jsonld/context.py +++ b/rdflib/plugins/shared/jsonld/context.py @@ -85,7 +85,7 @@ def __init__( self.terms: Dict[str, Any] = {} # _alias maps NODE_KEY to list of aliases self._alias: Dict[str, List[str]] = {} - self._lookup: Dict[Tuple[str, Any, Union[Defined, str], bool], Any] = {} + self._lookup: Dict[Tuple[str, Any, Union[Defined, str], bool], Term] = {} self._prefixes: Dict[str, Any] = {} self.active = False self.parent: Optional[Context] = None @@ -243,8 +243,10 @@ def add_term( if isinstance(container, (list, set, tuple)): container = set(container) - else: + elif container is not UNDEF: container = set([container]) + else: + container = set() term = Term( idref, @@ -617,6 +619,37 @@ def _get_source_id(self, source: Dict[str, Any], key: str) -> Optional[str]: term = term.get(ID) return term + def _term_dict(self, term: Term) -> Union[Dict[str, Any], str]: + tdict: Dict[str, Any] = {} + if term.type != UNDEF: + tdict[TYPE] = self.shrink_iri(term.type) + if term.container: + tdict[CONTAINER] = list(term.container) + if term.language != UNDEF: + tdict[LANG] = term.language + if term.reverse: + tdict[REV] = term.id + else: + tdict[ID] = term.id + if tdict.keys() == {ID}: + return tdict[ID] + return tdict + + def to_dict(self) -> Dict[str, Any]: + """ + Returns a dictionary representation of the context that can be + serialized to JSON. + + :return: a dictionary representation of the context. + """ + r = {v: k for (k, v) in self._prefixes.items()} + r.update({term.name: self._term_dict(term) for term in self._lookup.values()}) + if self.base: + r[BASE] = self.base + if self.language: + r[LANG] = self.language + return r + Term = namedtuple( "Term", diff --git a/test/jsonld/test_context.py b/test/jsonld/test_context.py index 6578268a2..034936d28 100644 --- a/test/jsonld/test_context.py +++ b/test/jsonld/test_context.py @@ -2,10 +2,12 @@ JSON-LD Context Spec """ +import json from functools import wraps from pathlib import Path from typing import Any, Dict +from rdflib.namespace import PROV, XSD, Namespace from rdflib.plugins.shared.jsonld import context, errors from rdflib.plugins.shared.jsonld.context import Context @@ -234,3 +236,52 @@ def test_dict_source(tmp_path: Path) -> None: file.write_text(r"""{ "@context": { "ex": "http://example.com/" } }""") ctx = Context(source=[{"@context": file.as_uri()}]) assert "http://example.com/" == ctx.terms["ex"].id + + +EG = Namespace("https://example.com/") + +DIVERSE_CONTEXT = json.loads( + """ + { + "@context": { + "ex": "https://example.com/", + "generatedAt": { "@id": "http://www.w3.org/ns/prov#generatedAtTime", "@type": "http://www.w3.org/2001/XMLSchema#dateTime" }, + "graphMap": { "@id": "https://example.com/graphMap", "@container": ["@graph", "@id"] }, + "occupation_en": { "@id": "https://example.com/occupation", "@language": "en" }, + "children": { "@reverse": "https://example.com/parent" } + } + } + """ +) + + +def test_parsing() -> None: + """ + A `Context` can be parsed from a dict. + """ + ctx = Context(DIVERSE_CONTEXT) + assert f"{EG}" == ctx.terms["ex"].id + assert f"{PROV.generatedAtTime}" == ctx.terms["generatedAt"].id + assert f"{XSD.dateTime}" == ctx.terms["generatedAt"].type + assert f"{EG.graphMap}" == ctx.terms["graphMap"].id + assert {"@graph", "@id"} == ctx.terms["graphMap"].container + assert f"{EG.occupation}" == ctx.terms["occupation_en"].id + assert "en" == ctx.terms["occupation_en"].language + assert False is ctx.terms["occupation_en"].reverse + assert True is ctx.terms["children"].reverse + assert f"{EG.parent}" == ctx.terms["children"].id + + +def test_to_dict() -> None: + """ + A `Context` can be converted to a dictionary. + """ + ctx = Context() + ctx.add_term("ex", f"{EG}") + ctx.add_term("generatedAt", f"{PROV.generatedAtTime}", coercion=f"{XSD.dateTime}") + ctx.add_term("graphMap", f"{EG.graphMap}", container=["@graph", "@id"]) + ctx.add_term("occupation_en", f"{EG.occupation}", language="en") + ctx.add_term("children", f"{EG.parent}", reverse=True) + result = ctx.to_dict() + result["graphMap"]["@container"] = sorted(result["graphMap"]["@container"]) + assert DIVERSE_CONTEXT["@context"] == result diff --git a/test/test_serializers/test_serializer_jsonld.py b/test/test_serializers/test_serializer_jsonld.py new file mode 100644 index 000000000..aff0544e3 --- /dev/null +++ b/test/test_serializers/test_serializer_jsonld.py @@ -0,0 +1,44 @@ +import json +import logging +import pprint +from typing import Any, Dict, Union + +import pytest + +from rdflib import Graph +from rdflib.namespace import Namespace +from rdflib.plugins.shared.jsonld.context import Context + +EG = Namespace("http://example.org/") + + +@pytest.mark.parametrize( + ["input"], + [ + ( + Context( + { + "eg": f"{EG}", + } + ), + ), + ({"eg": f"{EG}"},), + ], +) +def test_serialize_context(input: Union[Dict[str, Any], Context]) -> None: + """ + The JSON-LD serializer accepts and correctly serializes the context argument to the output. + """ + graph = Graph() + graph.add((EG.subject, EG.predicate, EG.object0)) + graph.add((EG.subject, EG.predicate, EG.object1)) + context = Context( + { + "eg": f"{EG}", + } + ) + logging.debug("context = %s", pprint.pformat(vars(context))) + data = graph.serialize(format="json-ld", context=context) + logging.debug("data = %s", data) + obj = json.loads(data) + assert obj["@context"] == {"eg": f"{EG}"} From 4da67f9a17ffe8fa128afcdd2259e337bccafaa3 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sat, 25 Mar 2023 18:47:59 +0100 Subject: [PATCH 027/114] fix: reference to global inside `get_target_namespace_elements` (#2311) `get_target_namespace_elements` references the `args` global, which is not defined if the function is called from outside the module. This commit fixes that instead referencing the argument passed to the function. - Closes . --- rdflib/tools/defined_namespace_creator.py | 2 +- .../test_definednamespace_creator.py | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/rdflib/tools/defined_namespace_creator.py b/rdflib/tools/defined_namespace_creator.py index 1076cd6e0..dcc6a3be7 100644 --- a/rdflib/tools/defined_namespace_creator.py +++ b/rdflib/tools/defined_namespace_creator.py @@ -106,7 +106,7 @@ def get_target_namespace_elements( for e in elements: desc = e[1].replace("\n", " ") elements_strs.append( - f" {e[0].replace(args.target_namespace, '')}: URIRef # {desc}\n" + f" {e[0].replace(target_namespace, '')}: URIRef # {desc}\n" ) return elements, elements_strs diff --git a/test/test_namespace/test_definednamespace_creator.py b/test/test_namespace/test_definednamespace_creator.py index 8866a05d9..3a76dbc18 100644 --- a/test/test_namespace/test_definednamespace_creator.py +++ b/test/test_namespace/test_definednamespace_creator.py @@ -2,6 +2,9 @@ import sys from pathlib import Path +from rdflib.graph import Graph +from rdflib.tools.defined_namespace_creator import get_target_namespace_elements + def test_definednamespace_creator_qb(): """ @@ -163,3 +166,15 @@ def test_definednamespace_creator_multiple_comments(): # cleanup Path.unlink(Path("_MULTILINESTRINGEXAMPLE.py")) + + +def test_get_target_namespace_elements(rdfs_graph: Graph) -> None: + elements = get_target_namespace_elements( + rdfs_graph, "http://www.w3.org/2000/01/rdf-schema#" + ) + assert 2 == len(elements) + assert 16 == len(elements[0]) + assert ( + "http://www.w3.org/2000/01/rdf-schema#Class", + "The class of classes.", + ) in elements[0] From 57bb42886b57a37f1ba93a4d1b52651d978d049c Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sun, 26 Mar 2023 12:53:29 +0200 Subject: [PATCH 028/114] fix: restore the 6.1.1 default bound namespaces (#2313) The namespaces bound by default by `rdflib.graph.Graph` and `rdflib.namespace.NamespaceManager` was reduced in version 6.2.0 of RDFLib, however, this also would cause code that worked with 6.1.1 to break, so this constituted a breaking change. This change restores the previous behaviour, binding the same namespaces as was bound in 6.1.1. To bind a reduced set of namespaces, the `bind_namespaces` parameter of `rdflib.graph.Graph` or `rdflib.namespace.NamespaceManager` can be used. - Closes . --- rdflib/graph.py | 2 +- rdflib/namespace/__init__.py | 15 ++- test/test_graph/test_namespace_rebinding.py | 3 +- test/test_namespace/test_namespacemanager.py | 96 ++++++++++++++++++- test/test_serializers/test_xmlwriter_qname.py | 4 +- test/test_sparql/test_service.py | 16 ++-- test/utils/httpservermock.py | 5 +- 7 files changed, 121 insertions(+), 20 deletions(-) diff --git a/rdflib/graph.py b/rdflib/graph.py index 7d32ab38a..4a96e6d37 100644 --- a/rdflib/graph.py +++ b/rdflib/graph.py @@ -437,7 +437,7 @@ def __init__( identifier: Optional[Union[_ContextIdentifierType, str]] = None, namespace_manager: Optional[NamespaceManager] = None, base: Optional[str] = None, - bind_namespaces: "_NamespaceSetString" = "core", + bind_namespaces: "_NamespaceSetString" = "rdflib", ): super(Graph, self).__init__() self.base = base diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index fb6d845b0..c88fdedd4 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -360,13 +360,13 @@ class NamespaceManager(object): * core: * binds several core RDF prefixes only * owl, rdf, rdfs, xsd, xml from the NAMESPACE_PREFIXES_CORE object - * this is default * rdflib: * binds all the namespaces shipped with RDFLib as DefinedNamespace instances * all the core namespaces and all the following: brick, csvw, dc, dcat * dcmitype, dcterms, dcam, doap, foaf, geo, odrl, org, prof, prov, qb, schema * sh, skos, sosa, ssn, time, vann, void * see the NAMESPACE_PREFIXES_RDFLIB object for the up-to-date list + * this is default * none: * binds no namespaces to prefixes * note this is NOT default behaviour @@ -374,6 +374,14 @@ class NamespaceManager(object): * using prefix bindings from prefix.cc which is a online prefixes database * not implemented yet - this is aspirational + .. attention:: + + The namespaces bound for specific values of ``bind_namespaces`` + constitute part of RDFLib's public interface, so changes to them should + only be additive within the same minor version. Removing values, or + removing namespaces that are bound by default, constitutes a breaking + change. + See the Sample usage @@ -390,10 +398,11 @@ class NamespaceManager(object): >>> all_ns = [n for n in g.namespace_manager.namespaces()] >>> assert ('ex', rdflib.term.URIRef('http://example.com/')) in all_ns >>> - """ - def __init__(self, graph: "Graph", bind_namespaces: "_NamespaceSetString" = "core"): + def __init__( + self, graph: "Graph", bind_namespaces: "_NamespaceSetString" = "rdflib" + ): self.graph = graph self.__cache: Dict[str, Tuple[str, URIRef, str]] = {} self.__cache_strict: Dict[str, Tuple[str, URIRef, str]] = {} diff --git a/test/test_graph/test_namespace_rebinding.py b/test/test_graph/test_namespace_rebinding.py index 3125d57ef..15cf44730 100644 --- a/test/test_graph/test_namespace_rebinding.py +++ b/test/test_graph/test_namespace_rebinding.py @@ -3,7 +3,7 @@ import pytest from rdflib import ConjunctiveGraph, Graph, Literal -from rdflib.namespace import OWL, Namespace +from rdflib.namespace import OWL, Namespace, NamespaceManager from rdflib.plugins.stores.memory import Memory from rdflib.term import URIRef @@ -294,6 +294,7 @@ def test_multigraph_bindings(): # Including newly-created objects that use the store cg = ConjunctiveGraph(store=store) + cg.namespace_manager = NamespaceManager(cg, bind_namespaces="core") assert ("foaf", foaf1_uri) not in list(cg.namespaces()) assert ("friend-of-a-friend", foaf1_uri) in list(cg.namespaces()) diff --git a/test/test_namespace/test_namespacemanager.py b/test/test_namespace/test_namespacemanager.py index d79f04194..20cb9594f 100644 --- a/test/test_namespace/test_namespacemanager.py +++ b/test/test_namespace/test_namespacemanager.py @@ -33,9 +33,41 @@ def test_core_prefixes_bound(): g = Graph() # prefixes in Graph - assert len(list(g.namespaces())) == len(_NAMESPACE_PREFIXES_CORE) + assert len(list(g.namespaces())) == len( + {**_NAMESPACE_PREFIXES_RDFLIB, **_NAMESPACE_PREFIXES_CORE} + ) pre = sorted([x[0] for x in list(g.namespaces())]) - assert pre == ["owl", "rdf", "rdfs", "xml", "xsd"] + assert pre == [ + "brick", + "csvw", + "dc", + "dcam", + "dcat", + "dcmitype", + "dcterms", + "doap", + "foaf", + "geo", + "odrl", + "org", + "owl", + "prof", + "prov", + "qb", + "rdf", + "rdfs", + "schema", + "sh", + "skos", + "sosa", + "ssn", + "time", + "vann", + "void", + "wgs", + "xml", + "xsd", + ] def test_rdflib_prefixes_bound(): @@ -175,6 +207,40 @@ def test_nman_bind_namespaces( @pytest.mark.parametrize( ["selector", "expected_bindings"], [ + ( + None, + { + "brick": "https://brickschema.org/schema/Brick#", + "csvw": "http://www.w3.org/ns/csvw#", + "dc": "http://purl.org/dc/elements/1.1/", + "dcat": "http://www.w3.org/ns/dcat#", + "dcmitype": "http://purl.org/dc/dcmitype/", + "dcterms": "http://purl.org/dc/terms/", + "dcam": "http://purl.org/dc/dcam/", + "doap": "http://usefulinc.com/ns/doap#", + "foaf": "http://xmlns.com/foaf/0.1/", + "odrl": "http://www.w3.org/ns/odrl/2/", + "geo": "http://www.opengis.net/ont/geosparql#", + "org": "http://www.w3.org/ns/org#", + "owl": "http://www.w3.org/2002/07/owl#", + "prof": "http://www.w3.org/ns/dx/prof/", + "prov": "http://www.w3.org/ns/prov#", + "qb": "http://purl.org/linked-data/cube#", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "schema": "https://schema.org/", + "sh": "http://www.w3.org/ns/shacl#", + "skos": "http://www.w3.org/2004/02/skos/core#", + "sosa": "http://www.w3.org/ns/sosa/", + "ssn": "http://www.w3.org/ns/ssn/", + "time": "http://www.w3.org/2006/time#", + "vann": "http://purl.org/vocab/vann/", + "void": "http://rdfs.org/ns/void#", + "wgs": "https://www.w3.org/2003/01/geo/wgs84_pos#", + "xsd": "http://www.w3.org/2001/XMLSchema#", + "xml": "http://www.w3.org/XML/1998/namespace", + }, + ), ( "rdflib", { @@ -208,19 +274,39 @@ def test_nman_bind_namespaces( "xsd": "http://www.w3.org/2001/XMLSchema#", "xml": "http://www.w3.org/XML/1998/namespace", }, - ) + ), + ( + "core", + { + "owl": "http://www.w3.org/2002/07/owl#", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "xsd": "http://www.w3.org/2001/XMLSchema#", + "xml": "http://www.w3.org/XML/1998/namespace", + }, + ), ], ) def test_bound_namespaces_subset( - selector: Any, expected_bindings: Dict[str, str] + selector: Optional[Any], expected_bindings: Dict[str, str] ) -> None: - graph = Graph(bind_namespaces=selector) + if selector is not None: + graph = Graph(bind_namespaces=selector) + else: + graph = Graph() bound_namespaces = dict( (key, str(value)) for key, value in graph.namespace_manager.namespaces() ) assert ( expected_bindings.items() <= bound_namespaces.items() ), f"missing items {expected_bindings.items() - bound_namespaces.items()}" + empty_graph = Graph(bind_namespaces="none") + if selector is not None: + nman = NamespaceManager(empty_graph, bind_namespaces=selector) + else: + nman = NamespaceManager(empty_graph) + nman_bound_namespaces = dict((key, str(value)) for key, value in nman.namespaces()) + assert bound_namespaces == nman_bound_namespaces def test_compute_qname_no_generate() -> None: diff --git a/test/test_serializers/test_xmlwriter_qname.py b/test/test_serializers/test_xmlwriter_qname.py index 662d3f590..13ee84a0f 100644 --- a/test/test_serializers/test_xmlwriter_qname.py +++ b/test/test_serializers/test_xmlwriter_qname.py @@ -10,7 +10,7 @@ def test_xmlwriter_namespaces(): - g = rdflib.Graph() + g = rdflib.Graph(bind_namespaces="core") with tempfile.TemporaryFile() as fp: xmlwr = XMLWriter(fp, g.namespace_manager, extra_ns={"": TRIXNS, "ex": EXNS}) @@ -32,7 +32,7 @@ def test_xmlwriter_namespaces(): def test_xmlwriter_decl(): - g = rdflib.Graph() + g = rdflib.Graph(bind_namespaces="core") with tempfile.TemporaryFile() as fp: xmlwr = XMLWriter(fp, g.namespace_manager, decl=0, extra_ns={"": TRIXNS}) diff --git a/test/test_sparql/test_service.py b/test/test_sparql/test_service.py index d83ac32e0..3a8270545 100644 --- a/test/test_sparql/test_service.py +++ b/test/test_sparql/test_service.py @@ -330,14 +330,16 @@ def test_with_mock( "head": {"vars": ["var"]}, "results": {"bindings": [{"var": item} for item in response_bindings]}, } - function_httpmock.responses[MethodName.GET].append( - MockHTTPResponse( - 200, - "OK", - json.dumps(response).encode("utf-8"), - {"Content-Type": ["application/sparql-results+json"]}, - ) + mock_response = MockHTTPResponse( + 200, + "OK", + json.dumps(response).encode("utf-8"), + {"Content-Type": ["application/sparql-results+json"]}, ) + # Adding the same response for GET and POST as the method used by RDFLib is + # dependent on the size of the service query. + function_httpmock.responses[MethodName.GET].append(mock_response) + function_httpmock.responses[MethodName.POST].append(mock_response) catcher: Optional[pytest.ExceptionInfo[Exception]] = None with ExitStack() as xstack: diff --git a/test/utils/httpservermock.py b/test/utils/httpservermock.py index 54596febd..6a87bf19c 100644 --- a/test/utils/httpservermock.py +++ b/test/utils/httpservermock.py @@ -96,7 +96,10 @@ def do_handler(handler: BaseHTTPRequestHandler) -> None: logging.debug("headers %s", request.headers) requests[method_name].append(request) - response = responses[method_name].pop(0) + try: + response = responses[method_name].pop(0) + except IndexError as error: + raise ValueError(f"No response for {method_name} request") from error handler.send_response(response.status_code, response.reason_phrase) apply_headers_to(response.headers, handler) handler.end_headers() From af179169a8bdf7270613e92ff7a92adf5b844681 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sun, 26 Mar 2023 12:55:44 +0200 Subject: [PATCH 029/114] fix: `ROUND`, `ENCODE_FOR_URI` and `SECONDS` SPARQL functions (#2314) `ROUND` was not correctly rounding negative numbers towards positive infinity, `ENCODE_FOR_URI` incorrectly treated `/` as safe, and `SECONDS` did not include fractional seconds. This change corrects these issues. - Closes . --- rdflib/plugins/sparql/operators.py | 11 +- test/test_sparql/test_functions.py | 189 +++++++++++++++++++++++++++++ 2 files changed, 196 insertions(+), 4 deletions(-) create mode 100644 test/test_sparql/test_functions.py diff --git a/rdflib/plugins/sparql/operators.py b/rdflib/plugins/sparql/operators.py index 7e9e4d1ca..908b1d5c5 100644 --- a/rdflib/plugins/sparql/operators.py +++ b/rdflib/plugins/sparql/operators.py @@ -16,7 +16,7 @@ import re import uuid import warnings -from decimal import ROUND_HALF_UP, Decimal, InvalidOperation +from decimal import ROUND_HALF_DOWN, ROUND_HALF_UP, Decimal, InvalidOperation from functools import reduce from typing import Any, Callable, Dict, NoReturn, Optional, Tuple, Union, overload from urllib.parse import quote @@ -205,7 +205,7 @@ def Builtin_ROUND(expr: Expr, ctx) -> Literal: # this is an ugly work-around l_ = expr.arg v = numeric(l_) - v = int(Decimal(v).quantize(1, ROUND_HALF_UP)) + v = int(Decimal(v).quantize(1, ROUND_HALF_UP if v > 0 else ROUND_HALF_DOWN)) return Literal(v, datatype=l_.datatype) @@ -381,7 +381,7 @@ def Builtin_CONTAINS(expr: Expr, ctx) -> Literal: def Builtin_ENCODE_FOR_URI(expr: Expr, ctx) -> Literal: - return Literal(quote(string(expr.arg).encode("utf-8"))) + return Literal(quote(string(expr.arg).encode("utf-8"), safe="")) def Builtin_SUBSTR(expr: Expr, ctx) -> Literal: @@ -471,7 +471,10 @@ def Builtin_SECONDS(e: Expr, ctx) -> Literal: http://www.w3.org/TR/sparql11-query/#func-seconds """ d = datetime(e.arg) - return Literal(d.second, datatype=XSD.decimal) + result_value = Decimal(d.second) + if d.microsecond: + result_value += Decimal(d.microsecond) / Decimal(1000000) + return Literal(result_value, datatype=XSD.decimal) def Builtin_TIMEZONE(e: Expr, ctx) -> Literal: diff --git a/test/test_sparql/test_functions.py b/test/test_sparql/test_functions.py new file mode 100644 index 000000000..fb544142c --- /dev/null +++ b/test/test_sparql/test_functions.py @@ -0,0 +1,189 @@ +import logging +from decimal import Decimal + +import pytest + +from rdflib.graph import Graph +from rdflib.namespace import XSD, Namespace +from rdflib.plugins.sparql.operators import _lang_range_check +from rdflib.term import BNode, Identifier, Literal, URIRef + +EG = Namespace("https://example.com/") + + +@pytest.mark.parametrize( + ["expression", "expected_result"], + [ + (r"isIRI('eg:IRI')", Literal(False)), + (r"isIRI(eg:IRI)", Literal(True)), + (r"isURI('eg:IRI')", Literal(False)), + (r"isURI(eg:IRI)", Literal(True)), + (r"isBLANK(eg:IRI)", Literal(False)), + (r"isBLANK(BNODE())", Literal(True)), + (r"isLITERAL(eg:IRI)", Literal(False)), + (r"isLITERAL('eg:IRI')", Literal(True)), + (r"isNumeric(eg:IRI)", Literal(False)), + (r"isNumeric(1)", Literal(True)), + (r"STR(eg:IRI)", Literal("https://example.com/IRI")), + (r"STR(1)", Literal("1")), + (r'LANG("Robert"@en)', Literal("en")), + (r'LANG("Robert")', Literal("")), + (r'DATATYPE("Robert")', XSD.string), + (r'DATATYPE("42"^^xsd:integer)', XSD.integer), + (r'IRI("http://example/")', URIRef("http://example/")), + (r'BNODE("example")', BNode), + (r'STRDT("123", xsd:integer)', Literal("123", datatype=XSD.integer)), + (r'STRLANG("cats and dogs", "en")', Literal("cats and dogs", lang="en")), + (r"UUID()", URIRef), + (r"STRUUID()", Literal), + (r'STRLEN("chat")', Literal(4)), + (r'SUBSTR("foobar", 4)', Literal("bar")), + (r'UCASE("foo")', Literal("FOO")), + (r'LCASE("BAR")', Literal("bar")), + (r'strStarts("foobar", "foo")', Literal(True)), + (r'strStarts("foobar", "bar")', Literal(False)), + (r'strEnds("foobar", "bar")', Literal(True)), + (r'strEnds("foobar", "foo")', Literal(False)), + (r'contains("foobar", "bar")', Literal(True)), + (r'contains("foobar", "barfoo")', Literal(False)), + (r'strbefore("abc","b")', Literal("a")), + (r'strbefore("abc","xyz")', Literal("")), + (r'strafter("abc","b")', Literal("c")), + (r'strafter("abc","xyz")', Literal("")), + (r"ENCODE_FOR_URI('this/is/a/test')", Literal("this%2Fis%2Fa%2Ftest")), + (r"ENCODE_FOR_URI('this is a test')", Literal("this%20is%20a%20test")), + ( + r"ENCODE_FOR_URI('AAA~~0123456789~~---~~___~~...~~ZZZ')", + Literal("AAA~~0123456789~~---~~___~~...~~ZZZ"), + ), + (r'CONCAT("foo", "bar")', Literal("foobar")), + (r'langMatches(lang("That Seventies Show"@en), "en")', Literal(True)), + ( + r'langMatches(lang("Cette Série des Années Soixante-dix"@fr), "en")', + Literal(False), + ), + ( + r'langMatches(lang("Cette Série des Années Septante"@fr-BE), "en")', + Literal(False), + ), + (r'langMatches(lang("Il Buono, il Bruto, il Cattivo"), "en")', Literal(False)), + (r'langMatches(lang("That Seventies Show"@en), "FR")', Literal(False)), + ( + r'langMatches(lang("Cette Série des Années Soixante-dix"@fr), "FR")', + Literal(True), + ), + ( + r'langMatches(lang("Cette Série des Années Septante"@fr-BE), "FR")', + Literal(True), + ), + (r'langMatches(lang("Il Buono, il Bruto, il Cattivo"), "FR")', Literal(False)), + (r'langMatches(lang("That Seventies Show"@en), "*")', Literal(True)), + ( + r'langMatches(lang("Cette Série des Années Soixante-dix"@fr), "*")', + Literal(True), + ), + ( + r'langMatches(lang("Cette Série des Années Septante"@fr-BE), "*")', + Literal(True), + ), + (r'langMatches(lang("Il Buono, il Bruto, il Cattivo"), "*")', Literal(False)), + (r'langMatches(lang("abc"@en-gb), "en-GB")', Literal(True)), + (r'regex("Alice", "^ali", "i")', Literal(True)), + (r'regex("Bob", "^ali", "i")', Literal(False)), + (r'replace("abcd", "b", "Z")', Literal("aZcd")), + (r"abs(-1.5)", Literal("1.5", datatype=XSD.decimal)), + (r"round(2.4999)", Literal("2", datatype=XSD.decimal)), + (r"round(2.5)", Literal("3", datatype=XSD.decimal)), + (r"round(-2.5)", Literal("-2", datatype=XSD.decimal)), + (r"round(0.1)", Literal("0", datatype=XSD.decimal)), + (r"round(-0.1)", Literal("0", datatype=XSD.decimal)), + (r"RAND()", Literal), + (r"now()", Literal), + (r'month("2011-01-10T14:45:13.815-05:00"^^xsd:dateTime)', Literal(1)), + (r'day("2011-01-10T14:45:13.815-05:00"^^xsd:dateTime)', Literal(10)), + (r'hours("2011-01-10T14:45:13.815-05:00"^^xsd:dateTime)', Literal(14)), + (r'minutes("2011-01-10T14:45:13.815-05:00"^^xsd:dateTime)', Literal(45)), + ( + r'seconds("2011-01-10T14:45:13.815-05:00"^^xsd:dateTime)', + Literal(Decimal("13.815")), + ), + ( + r'timezone("2011-01-10T14:45:13.815-05:00"^^xsd:dateTime)', + Literal("-PT5H", datatype=XSD.dayTimeDuration), + ), + ( + r'timezone("2011-01-10T14:45:13.815Z"^^xsd:dateTime)', + Literal("PT0S", datatype=XSD.dayTimeDuration), + ), + ( + r'tz("2011-01-10T14:45:13.815-05:00"^^xsd:dateTime) ', + Literal("-05:00"), + ), + ( + r'tz("2011-01-10T14:45:13.815Z"^^xsd:dateTime) ', + Literal("Z"), + ), + ( + r'tz("2011-01-10T14:45:13.815"^^xsd:dateTime) ', + Literal(""), + ), + (r'MD5("abc")', Literal("900150983cd24fb0d6963f7d28e17f72")), + (r'SHA1("abc")', Literal("a9993e364706816aba3e25717850c26c9cd0d89d")), + ( + r'SHA256("abc")', + Literal("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"), + ), + ( + r'SHA384("abc")', + Literal( + "cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7" + ), + ), + ( + r'SHA512("abc")', + Literal( + "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f" + ), + ), + ], +) +def test_function(expression: str, expected_result: Identifier) -> None: + graph = Graph() + query_string = """ + PREFIX eg: + PREFIX xsd: + CONSTRUCT { eg:subject eg:predicate ?o } + WHERE { + BIND(???EXPRESSION_PLACEHOLDER??? AS ?o) + } + """.replace( + "???EXPRESSION_PLACEHOLDER???", expression + ) + result = graph.query(query_string) + assert result.type == "CONSTRUCT" + assert isinstance(result.graph, Graph) + logging.debug("result = %s", list(result.graph.triples((None, None, None)))) + actual_result = result.graph.value(EG.subject, EG.predicate, any=False) + if isinstance(expected_result, type): + assert isinstance(actual_result, expected_result) + else: + assert expected_result == actual_result + + +@pytest.mark.parametrize( + ["literal", "range", "expected_result"], + [ + (Literal("en"), Literal("en"), True), + (Literal("en"), Literal("EN"), True), + (Literal("EN"), Literal("en"), True), + (Literal("EN"), Literal("EN"), True), + (Literal("en"), Literal("en-US"), False), + (Literal("en-US"), Literal("en-US"), True), + (Literal("en-gb"), Literal("en-GB"), True), + ], +) +def test_lang_range_check( + literal: Literal, range: Literal, expected_result: bool +) -> None: + actual_result = _lang_range_check(range, literal) + assert expected_result == actual_result From 0593c75753b0b80256d448d56328dbc3967cb055 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sun, 26 Mar 2023 13:48:59 +0200 Subject: [PATCH 030/114] chore: prepare for release 6.3.2 (#2315) --- CHANGELOG.md | 180 ++++++++++++++++++++++++++++++++++++++++++++- README.md | 2 +- pyproject.toml | 2 +- rdflib/__init__.py | 2 +- 4 files changed, 182 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d72e4104b..666be380f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,183 @@ -# 2023-03-18 RELEASE 6.3.1 +# 2023-03-26 RELEASE 6.3.2 + +## fix: `ROUND`, `ENCODE_FOR_URI` and `SECONDS` SPARQL functions (#2314) + +Commit [af17916](https://github.com/RDFLib/rdflib/commit/af17916), closes [#2314](https://github.com/RDFLib/rdflib/issues/2314). + + +`ROUND` was not correctly rounding negative numbers towards positive infinity, +`ENCODE_FOR_URI` incorrectly treated `/` as safe, and `SECONDS` did not include +fractional seconds. + +This change corrects these issues. + +- Closes . + + +## fix: add `__hash__` and `__eq__` back to `rdflib.paths.Path` (#2292) + +Commit [fe1a8f8](https://github.com/RDFLib/rdflib/commit/fe1a8f8), closes [#2292](https://github.com/RDFLib/rdflib/issues/2292). + + +These methods were removed when `@total_ordering` was added, but +`@total_ordering` does not add them, so removing them essentially +removes functionality. + +This change adds the methods back and adds tests to ensure they work +correctly. + +All path related tests are also moved into one file. + +- Closes . +- Closes . + + +## fix: Add `to_dict` method to the JSON-LD `Context` class. (#2310) + +Commit [d7883eb](https://github.com/RDFLib/rdflib/commit/d7883eb), closes [#2310](https://github.com/RDFLib/rdflib/issues/2310). + + +`Context.to_dict` is used in JSON-LD serialization, but it was not implemented. +This change adds the method. + +- Closes . + + +## fix: add the `wgs` namespace binding back (#2294) + +Commit [adf8eb2](https://github.com/RDFLib/rdflib/commit/adf8eb2), closes [#2294](https://github.com/RDFLib/rdflib/issues/2294). + + + inadvertently removed the `wgs` prefix. +This change adds it back. + +- Closes . + + +## fix: change the prefix for `https://schema.org/` back to `schema` (#2312) + +Commit [3faa01b](https://github.com/RDFLib/rdflib/commit/3faa01b), closes [#2312](https://github.com/RDFLib/rdflib/issues/2312). + + +The default prefix for `https://schema.org/` registered with +`rdflib.namespace.NamespaceManager` was inadvertently changed to `sdo` in 6.2.0, +this however constitutes a breaking change, as code that was using the `schema` +prefix would no longer have the same behaviour. This change changes the prefix +back to `schema`. + + +## fix: include docs and examples in the sdist tarball (#2289) + +Commit [394fb50](https://github.com/RDFLib/rdflib/commit/394fb50), closes [#2289](https://github.com/RDFLib/rdflib/issues/2289). + + +The sdists generated by setuptools included the `docs` and `examples` +directories, and they are needed for building docs and running tests using the +sdist. + +This change includes these directories in the sdist tarball. + +A `test:sdist` task is also added to `Taskfile.yml` which uses the sdists to run +pytest and build docs. + + +## fix: IRI to URI conversion (#2304) + +Commit [dfa4054](https://github.com/RDFLib/rdflib/commit/dfa4054), closes [#2304](https://github.com/RDFLib/rdflib/issues/2304). + +The URI to IRI conversion was percentage-quoting characters that should not have +been quoted, like equals in the query string. It was also not quoting things +that should have been quoted, like the username and password components of a +URI. + +This change improves the conversion by only quoting characters that are not +allowed in specific parts of the URI and quoting previously unquoted components. +The safe characters for each segment are taken from +[RFC3986](https://datatracker.ietf.org/doc/html/rfc3986). + +The new behavior is heavily inspired by + +[`werkzeug.urls.iri_to_uri`](https://github.com/pallets/werkzeug/blob/92c6380248c7272ee668e1f8bbd80447027ccce2/src/werkzeug/urls.py#L926-L931) +though there are some differences. + +- Closes . + +## fix: JSON-LD context construction from a `dict` (#2306) + +Commit [832e693](https://github.com/RDFLib/rdflib/commit/832e693), closes [#2306](https://github.com/RDFLib/rdflib/issues/2306). + + +A variable was only being initialized for string-valued inputs, but if a `dict` +input was passed the variable would still be accessed, resulting in a +`UnboundLocalError`. + +This change initializes the variable always, instead of only when string-valued +input is used to construct a JSON-LD context. + +- Closes . + + +## fix: reference to global inside `get_target_namespace_elements` (#2311) + +Commit [4da67f9](https://github.com/RDFLib/rdflib/commit/4da67f9), closes [#2311](https://github.com/RDFLib/rdflib/issues/2311). + + +`get_target_namespace_elements` references the `args` global, which is not +defined if the function is called from outside the module. This commit fixes +that instead referencing the argument passed to the function. + +- Closes . + + +## fix: restore the 6.1.1 default bound namespaces (#2313) + +Commit [57bb428](https://github.com/RDFLib/rdflib/commit/57bb428), closes [#2313](https://github.com/RDFLib/rdflib/issues/2313). + + +The namespaces bound by default by `rdflib.graph.Graph` and +`rdflib.namespace.NamespaceManager` was reduced in version 6.2.0 of RDFLib, +however, this also would cause code that worked with 6.1.1 to break, so this +constituted a breaking change. This change restores the previous behaviour, +binding the same namespaces as was bound in 6.1.1. + +To bind a reduced set of namespaces, the `bind_namespaces` parameter of +`rdflib.graph.Graph` or `rdflib.namespace.NamespaceManager` can be used. + +- Closes . + + +## test: add `webtest` marker to tests that use the internet (#2295) + +Commit [cfe6e37](https://github.com/RDFLib/rdflib/commit/cfe6e37), closes [#2295](https://github.com/RDFLib/rdflib/issues/2295). + + +This is being done so that it is easier for downstream packagers to run the test +suite without requiring internet access. + +To run only tests that does not use the internet, run `pytest -m "not webtest"`. + +The validation workflow validates that test run without internet access by +running the tests inside `firejail --net=none`. + +- Closes . + +## chore: Update CONTRIBUTORS from commit history (#2305) + +Commit [1ab4fc0](https://github.com/RDFLib/rdflib/commit/1ab4fc0), closes [#2305](https://github.com/RDFLib/rdflib/issues/2305). + + +This ensures contributors are credited. Also added .mailmap to fix early misattributed contributions. + +## docs: fix typo in NamespaceManager documentation (#2291) + +Commit [7a05c15](https://github.com/RDFLib/rdflib/commit/7a05c15), closes [#2291](https://github.com/RDFLib/rdflib/issues/2291). + + +Changed `cdterms` to `dcterms`, see for more info. + + +# 2023-03-18 RELEASE 6.3.1 This is a patch release that includes a singular user facing fix, which is the inclusion of the `test` directory in the `sdist` release artifact. diff --git a/README.md b/README.md index 172413e8b..6157ef8ef 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ The stable release of RDFLib may be installed with Python's package management t Alternatively manually download the package from the Python Package Index (PyPI) at https://pypi.python.org/pypi/rdflib -The current version of RDFLib is 6.3.1, see the ``CHANGELOG.md`` file for what's new in this release. +The current version of RDFLib is 6.3.2, see the ``CHANGELOG.md`` file for what's new in this release. ### Installation of the current main branch (for developers) diff --git a/pyproject.toml b/pyproject.toml index ddbe2700e..4003c3b71 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "rdflib" -version = "6.4.0a0" +version = "6.3.2" description = """RDFLib is a Python library for working with RDF, \ a simple yet powerful language for representing information.""" authors = ["Daniel 'eikeon' Krech "] diff --git a/rdflib/__init__.py b/rdflib/__init__.py index bc49dc489..0a7610f38 100644 --- a/rdflib/__init__.py +++ b/rdflib/__init__.py @@ -56,7 +56,7 @@ __docformat__ = "restructuredtext en" __version__: str = _DISTRIBUTION_METADATA["Version"] -__date__ = "2023-03-18" +__date__ = "2023-03-26" __all__ = [ "URIRef", From 0d07f9bc014562d77121768414ec20fd9382ed0a Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sun, 26 Mar 2023 14:26:58 +0200 Subject: [PATCH 031/114] chore: post release cleanup (#2317) Prepares the main branch for normal usage. --- docker/latest/requirements.in | 2 +- docker/latest/requirements.txt | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/latest/requirements.in b/docker/latest/requirements.in index 841a97e4d..93e26ac09 100644 --- a/docker/latest/requirements.in +++ b/docker/latest/requirements.in @@ -1,4 +1,4 @@ # This file is used for building a docker image of hte latest rdflib release. It # will be updated by dependabot when new releases are made. -rdflib==6.3.1 +rdflib==6.3.2 html5lib diff --git a/docker/latest/requirements.txt b/docker/latest/requirements.txt index d0ad24aef..5b3bae5d3 100644 --- a/docker/latest/requirements.txt +++ b/docker/latest/requirements.txt @@ -10,7 +10,7 @@ isodate==0.6.1 # via rdflib pyparsing==3.0.9 # via rdflib -rdflib==6.3.1 +rdflib==6.3.2 # via -r docker/latest/requirements.in six==1.16.0 # via diff --git a/pyproject.toml b/pyproject.toml index 4003c3b71..ddbe2700e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "rdflib" -version = "6.3.2" +version = "6.4.0a0" description = """RDFLib is a Python library for working with RDF, \ a simple yet powerful language for representing information.""" authors = ["Daniel 'eikeon' Krech "] From ae6b859c9dbf08d3385f44fc348454891dbf520c Mon Sep 17 00:00:00 2001 From: "Jeffrey C. Lerman" Date: Mon, 27 Mar 2023 14:52:28 -0700 Subject: [PATCH 032/114] fix: eliminate file intermediary in translate algebra (#2267) Previously, `rdflib.plugins.sparql.algebra.translateAlgebra()` maintained state via a file, with a fixed filename `query.txt`. With this change, use of that file is eliminated; state is now maintained in memory so that multiple concurrent `translateAlgebra()` calls, for example, should no longer interfere with each other. The change is accomplished with no change to the client interface. Basically, the actual functionality has been moved into a class, which is instantiated and used as needed (once per call to `algrebra.translateAlgebra()`). --- rdflib/plugins/sparql/algebra.py | 454 ++++++++++++++++++------------- 1 file changed, 265 insertions(+), 189 deletions(-) diff --git a/rdflib/plugins/sparql/algebra.py b/rdflib/plugins/sparql/algebra.py index 5fd9e59bc..52aa92a7f 100644 --- a/rdflib/plugins/sparql/algebra.py +++ b/rdflib/plugins/sparql/algebra.py @@ -955,31 +955,39 @@ class ExpressionNotCoveredException(Exception): # noqa: N818 pass -def translateAlgebra(query_algebra: Query) -> str: +class _AlgebraTranslator: """ + Translator of a Query's algebra to its equivalent SPARQL (string). - :param query_algebra: An algebra returned by the function call algebra.translateQuery(parse_tree). - :return: The query form generated from the SPARQL 1.1 algebra tree for select queries. + Coded as a class to support storage of state during the translation process, + without use of a file. - """ - import os + Anticipated Usage: + + .. code-block:: python - def overwrite(text: str): - file = open("query.txt", "w+") - file.write(text) - file.close() + translated_query = _AlgebraTranslator(query).translateAlgebra() + + An external convenience function which wraps the above call, + `translateAlgebra`, is supplied, so this class does not need to be + referenced by client code at all in normal use. + """ - def replace( - old, - new, + def __init__(self, query_algebra: Query): + self.query_algebra = query_algebra + self.aggr_vars: DefaultDict[ + Identifier, List[Identifier] + ] = collections.defaultdict(list) + self._alg_translation: str = "" + + def _replace( + self, + old: str, + new: str, search_from_match: str = None, search_from_match_occurrence: int = None, count: int = 1, ): - # Read in the file - with open("query.txt", "r") as file: - filedata = file.read() - def find_nth(haystack, needle, n): start = haystack.lower().find(needle) while start >= 0 and n > 1: @@ -989,27 +997,21 @@ def find_nth(haystack, needle, n): if search_from_match and search_from_match_occurrence: position = find_nth( - filedata, search_from_match, search_from_match_occurrence + self._alg_translation, search_from_match, search_from_match_occurrence ) - filedata_pre = filedata[:position] - filedata_post = filedata[position:].replace(old, new, count) - filedata = filedata_pre + filedata_post + filedata_pre = self._alg_translation[:position] + filedata_post = self._alg_translation[position:].replace(old, new, count) + self._alg_translation = filedata_pre + filedata_post else: - filedata = filedata.replace(old, new, count) - - # Write the file out again - with open("query.txt", "w") as file: - file.write(filedata) - - aggr_vars: DefaultDict[Identifier, List[Identifier]] = collections.defaultdict(list) + self._alg_translation = self._alg_translation.replace(old, new, count) def convert_node_arg( - node_arg: typing.Union[Identifier, CompValue, Expr, str] + self, node_arg: typing.Union[Identifier, CompValue, Expr, str] ) -> str: if isinstance(node_arg, Identifier): - if node_arg in aggr_vars.keys(): + if node_arg in self.aggr_vars.keys(): # type error: "Identifier" has no attribute "n3" - grp_var = aggr_vars[node_arg].pop(0).n3() # type: ignore[attr-defined] + grp_var = self.aggr_vars[node_arg].pop(0).n3() # type: ignore[attr-defined] return grp_var else: # type error: "Identifier" has no attribute "n3" @@ -1025,7 +1027,7 @@ def convert_node_arg( "The expression {0} might not be covered yet.".format(node_arg) ) - def sparql_query_text(node): + def sparql_query_text(self, node): """ https://www.w3.org/TR/sparql11-query/#sparqlSyntax @@ -1036,7 +1038,7 @@ def sparql_query_text(node): if isinstance(node, CompValue): # 18.2 Query Forms if node.name == "SelectQuery": - overwrite("-*-SELECT-*- " + "{" + node.p.name + "}") + self._alg_translation = "-*-SELECT-*- " + "{" + node.p.name + "}" # 18.2 Graph Patterns elif node.name == "BGP": @@ -1046,18 +1048,20 @@ def sparql_query_text(node): triple[0].n3() + " " + triple[1].n3() + " " + triple[2].n3() + "." for triple in node.triples ) - replace("{BGP}", triples) + self._replace("{BGP}", triples) # The dummy -*-SELECT-*- is placed during a SelectQuery or Multiset pattern in order to be able # to match extended variables in a specific Select-clause (see "Extend" below) - replace("-*-SELECT-*-", "SELECT", count=-1) + self._replace("-*-SELECT-*-", "SELECT", count=-1) # If there is no "Group By" clause the placeholder will simply be deleted. Otherwise there will be # no matching {GroupBy} placeholder because it has already been replaced by "group by variables" - replace("{GroupBy}", "", count=-1) - replace("{Having}", "", count=-1) + self._replace("{GroupBy}", "", count=-1) + self._replace("{Having}", "", count=-1) elif node.name == "Join": - replace("{Join}", "{" + node.p1.name + "}{" + node.p2.name + "}") # + self._replace( + "{Join}", "{" + node.p1.name + "}{" + node.p2.name + "}" + ) # elif node.name == "LeftJoin": - replace( + self._replace( "{LeftJoin}", "{" + node.p1.name + "}OPTIONAL{{" + node.p2.name + "}}", ) @@ -1071,35 +1075,39 @@ def sparql_query_text(node): if node.p: # Filter with p=AggregateJoin = Having if node.p.name == "AggregateJoin": - replace("{Filter}", "{" + node.p.name + "}") - replace("{Having}", "HAVING({" + expr + "})") + self._replace("{Filter}", "{" + node.p.name + "}") + self._replace("{Having}", "HAVING({" + expr + "})") else: - replace( + self._replace( "{Filter}", "FILTER({" + expr + "}) {" + node.p.name + "}" ) else: - replace("{Filter}", "FILTER({" + expr + "})") + self._replace("{Filter}", "FILTER({" + expr + "})") elif node.name == "Union": - replace( + self._replace( "{Union}", "{{" + node.p1.name + "}}UNION{{" + node.p2.name + "}}" ) elif node.name == "Graph": expr = "GRAPH " + node.term.n3() + " {{" + node.p.name + "}}" - replace("{Graph}", expr) + self._replace("{Graph}", expr) elif node.name == "Extend": - query_string = open("query.txt", "r").read().lower() + query_string = self._alg_translation.lower() select_occurrences = query_string.count("-*-select-*-") - replace( + self._replace( node.var.n3(), - "(" + convert_node_arg(node.expr) + " as " + node.var.n3() + ")", + "(" + + self.convert_node_arg(node.expr) + + " as " + + node.var.n3() + + ")", search_from_match="-*-select-*-", search_from_match_occurrence=select_occurrences, ) - replace("{Extend}", "{" + node.p.name + "}") + self._replace("{Extend}", "{" + node.p.name + "}") elif node.name == "Minus": expr = "{" + node.p1.name + "}MINUS{{" + node.p2.name + "}}" - replace("{Minus}", expr) + self._replace("{Minus}", expr) elif node.name == "Group": group_by_vars = [] if node.expr: @@ -1110,12 +1118,14 @@ def sparql_query_text(node): raise ExpressionNotCoveredException( "This expression might not be covered yet." ) - replace("{Group}", "{" + node.p.name + "}") - replace("{GroupBy}", "GROUP BY " + " ".join(group_by_vars) + " ") + self._replace("{Group}", "{" + node.p.name + "}") + self._replace( + "{GroupBy}", "GROUP BY " + " ".join(group_by_vars) + " " + ) else: - replace("{Group}", "{" + node.p.name + "}") + self._replace("{Group}", "{" + node.p.name + "}") elif node.name == "AggregateJoin": - replace("{AggregateJoin}", "{" + node.p.name + "}") + self._replace("{AggregateJoin}", "{" + node.p.name + "}") for agg_func in node.A: if isinstance(agg_func.res, Identifier): identifier = agg_func.res.n3() @@ -1123,14 +1133,14 @@ def sparql_query_text(node): raise ExpressionNotCoveredException( "This expression might not be covered yet." ) - aggr_vars[agg_func.res].append(agg_func.vars) + self.aggr_vars[agg_func.res].append(agg_func.vars) agg_func_name = agg_func.name.split("_")[1] distinct = "" if agg_func.distinct: distinct = agg_func.distinct + " " if agg_func_name == "GroupConcat": - replace( + self._replace( identifier, "GROUP_CONCAT" + "(" @@ -1141,30 +1151,32 @@ def sparql_query_text(node): + ")", ) else: - replace( + self._replace( identifier, agg_func_name.upper() + "(" + distinct - + convert_node_arg(agg_func.vars) + + self.convert_node_arg(agg_func.vars) + ")", ) # For non-aggregated variables the aggregation function "sample" is automatically assigned. # However, we do not want to have "sample" wrapped around non-aggregated variables. That is # why we replace it. If "sample" is used on purpose it will not be replaced as the alias # must be different from the variable in this case. - replace( - "(SAMPLE({0}) as {0})".format(convert_node_arg(agg_func.vars)), - convert_node_arg(agg_func.vars), + self._replace( + "(SAMPLE({0}) as {0})".format( + self.convert_node_arg(agg_func.vars) + ), + self.convert_node_arg(agg_func.vars), ) elif node.name == "GroupGraphPatternSub": - replace( + self._replace( "GroupGraphPatternSub", - " ".join([convert_node_arg(pattern) for pattern in node.part]), + " ".join([self.convert_node_arg(pattern) for pattern in node.part]), ) elif node.name == "TriplesBlock": print("triplesblock") - replace( + self._replace( "{TriplesBlock}", "".join( triple[0].n3() @@ -1196,8 +1208,8 @@ def sparql_query_text(node): raise ExpressionNotCoveredException( "This expression might not be covered yet." ) - replace("{OrderBy}", "{" + node.p.name + "}") - replace("{OrderConditions}", " ".join(order_conditions) + " ") + self._replace("{OrderBy}", "{" + node.p.name + "}") + self._replace("{OrderConditions}", " ".join(order_conditions) + " ") elif node.name == "Project": project_variables = [] for var in node.PV: @@ -1210,7 +1222,7 @@ def sparql_query_text(node): order_by_pattern = "" if node.p.name == "OrderBy": order_by_pattern = "ORDER BY {OrderConditions}" - replace( + self._replace( "{Project}", " ".join(project_variables) + "{{" @@ -1221,17 +1233,17 @@ def sparql_query_text(node): + "{Having}", ) elif node.name == "Distinct": - replace("{Distinct}", "DISTINCT {" + node.p.name + "}") + self._replace("{Distinct}", "DISTINCT {" + node.p.name + "}") elif node.name == "Reduced": - replace("{Reduced}", "REDUCED {" + node.p.name + "}") + self._replace("{Reduced}", "REDUCED {" + node.p.name + "}") elif node.name == "Slice": slice = "OFFSET " + str(node.start) + " LIMIT " + str(node.length) - replace("{Slice}", "{" + node.p.name + "}" + slice) + self._replace("{Slice}", "{" + node.p.name + "}" + slice) elif node.name == "ToMultiSet": if node.p.name == "values": - replace("{ToMultiSet}", "{{" + node.p.name + "}}") + self._replace("{ToMultiSet}", "{{" + node.p.name + "}}") else: - replace( + self._replace( "{ToMultiSet}", "{-*-SELECT-*- " + "{" + node.p.name + "}" + "}" ) @@ -1240,71 +1252,73 @@ def sparql_query_text(node): # 17 Expressions and Testing Values # # 17.3 Operator Mapping elif node.name == "RelationalExpression": - expr = convert_node_arg(node.expr) + expr = self.convert_node_arg(node.expr) op = node.op if isinstance(list, type(node.other)): other = ( "(" - + ", ".join(convert_node_arg(expr) for expr in node.other) + + ", ".join(self.convert_node_arg(expr) for expr in node.other) + ")" ) else: - other = convert_node_arg(node.other) + other = self.convert_node_arg(node.other) condition = "{left} {operator} {right}".format( left=expr, operator=op, right=other ) - replace("{RelationalExpression}", condition) + self._replace("{RelationalExpression}", condition) elif node.name == "ConditionalAndExpression": inner_nodes = " && ".join( - [convert_node_arg(expr) for expr in node.other] + [self.convert_node_arg(expr) for expr in node.other] ) - replace( + self._replace( "{ConditionalAndExpression}", - convert_node_arg(node.expr) + " && " + inner_nodes, + self.convert_node_arg(node.expr) + " && " + inner_nodes, ) elif node.name == "ConditionalOrExpression": inner_nodes = " || ".join( - [convert_node_arg(expr) for expr in node.other] + [self.convert_node_arg(expr) for expr in node.other] ) - replace( + self._replace( "{ConditionalOrExpression}", - "(" + convert_node_arg(node.expr) + " || " + inner_nodes + ")", + "(" + self.convert_node_arg(node.expr) + " || " + inner_nodes + ")", ) elif node.name == "MultiplicativeExpression": - left_side = convert_node_arg(node.expr) + left_side = self.convert_node_arg(node.expr) multiplication = left_side for i, operator in enumerate(node.op): # noqa: F402 multiplication += ( - operator + " " + convert_node_arg(node.other[i]) + " " + operator + " " + self.convert_node_arg(node.other[i]) + " " ) - replace("{MultiplicativeExpression}", multiplication) + self._replace("{MultiplicativeExpression}", multiplication) elif node.name == "AdditiveExpression": - left_side = convert_node_arg(node.expr) + left_side = self.convert_node_arg(node.expr) addition = left_side for i, operator in enumerate(node.op): - addition += operator + " " + convert_node_arg(node.other[i]) + " " - replace("{AdditiveExpression}", addition) + addition += ( + operator + " " + self.convert_node_arg(node.other[i]) + " " + ) + self._replace("{AdditiveExpression}", addition) elif node.name == "UnaryNot": - replace("{UnaryNot}", "!" + convert_node_arg(node.expr)) + self._replace("{UnaryNot}", "!" + self.convert_node_arg(node.expr)) # # 17.4 Function Definitions # # # 17.4.1 Functional Forms elif node.name.endswith("BOUND"): - bound_var = convert_node_arg(node.arg) - replace("{Builtin_BOUND}", "bound(" + bound_var + ")") + bound_var = self.convert_node_arg(node.arg) + self._replace("{Builtin_BOUND}", "bound(" + bound_var + ")") elif node.name.endswith("IF"): - arg2 = convert_node_arg(node.arg2) - arg3 = convert_node_arg(node.arg3) + arg2 = self.convert_node_arg(node.arg2) + arg3 = self.convert_node_arg(node.arg3) if_expression = ( "IF(" + "{" + node.arg1.name + "}, " + arg2 + ", " + arg3 + ")" ) - replace("{Builtin_IF}", if_expression) + self._replace("{Builtin_IF}", if_expression) elif node.name.endswith("COALESCE"): - replace( + self._replace( "{Builtin_COALESCE}", "COALESCE(" - + ", ".join(convert_node_arg(arg) for arg in node.arg) + + ", ".join(self.convert_node_arg(arg) for arg in node.arg) + ")", ) elif node.name.endswith("Builtin_EXISTS"): @@ -1312,8 +1326,10 @@ def sparql_query_text(node): # According to https://www.w3.org/TR/2013/REC-sparql11-query-20130321/#rExistsFunc # ExistsFunc can only have a GroupGraphPattern as parameter. However, when we print the query algebra # we get a GroupGraphPatternSub - replace("{Builtin_EXISTS}", "EXISTS " + "{{" + node.graph.name + "}}") - traverse(node.graph, visitPre=sparql_query_text) + self._replace( + "{Builtin_EXISTS}", "EXISTS " + "{{" + node.graph.name + "}}" + ) + traverse(node.graph, visitPre=self.sparql_query_text) return node.graph elif node.name.endswith("Builtin_NOTEXISTS"): # The node's name which we get with node.graph.name returns "Join" instead of GroupGraphPatternSub @@ -1321,21 +1337,21 @@ def sparql_query_text(node): # NotExistsFunc can only have a GroupGraphPattern as parameter. However, when we print the query algebra # we get a GroupGraphPatternSub print(node.graph.name) - replace( + self._replace( "{Builtin_NOTEXISTS}", "NOT EXISTS " + "{{" + node.graph.name + "}}" ) - traverse(node.graph, visitPre=sparql_query_text) + traverse(node.graph, visitPre=self.sparql_query_text) return node.graph # # # # 17.4.1.5 logical-or: Covered in "RelationalExpression" # # # # 17.4.1.6 logical-and: Covered in "RelationalExpression" # # # # 17.4.1.7 RDFterm-equal: Covered in "RelationalExpression" elif node.name.endswith("sameTerm"): - replace( + self._replace( "{Builtin_sameTerm}", "SAMETERM(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) # # # # IN: Covered in "RelationalExpression" @@ -1343,205 +1359,253 @@ def sparql_query_text(node): # # # 17.4.2 Functions on RDF Terms elif node.name.endswith("Builtin_isIRI"): - replace("{Builtin_isIRI}", "isIRI(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_isIRI}", "isIRI(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name.endswith("Builtin_isBLANK"): - replace( - "{Builtin_isBLANK}", "isBLANK(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_isBLANK}", + "isBLANK(" + self.convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_isLITERAL"): - replace( + self._replace( "{Builtin_isLITERAL}", - "isLITERAL(" + convert_node_arg(node.arg) + ")", + "isLITERAL(" + self.convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_isNUMERIC"): - replace( + self._replace( "{Builtin_isNUMERIC}", - "isNUMERIC(" + convert_node_arg(node.arg) + ")", + "isNUMERIC(" + self.convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_STR"): - replace("{Builtin_STR}", "STR(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_STR}", "STR(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name.endswith("Builtin_LANG"): - replace("{Builtin_LANG}", "LANG(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_LANG}", "LANG(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name.endswith("Builtin_DATATYPE"): - replace( - "{Builtin_DATATYPE}", "DATATYPE(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_DATATYPE}", + "DATATYPE(" + self.convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_IRI"): - replace("{Builtin_IRI}", "IRI(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_IRI}", "IRI(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name.endswith("Builtin_BNODE"): - replace("{Builtin_BNODE}", "BNODE(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_BNODE}", "BNODE(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name.endswith("STRDT"): - replace( + self._replace( "{Builtin_STRDT}", "STRDT(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_STRLANG"): - replace( + self._replace( "{Builtin_STRLANG}", "STRLANG(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_UUID"): - replace("{Builtin_UUID}", "UUID()") + self._replace("{Builtin_UUID}", "UUID()") elif node.name.endswith("Builtin_STRUUID"): - replace("{Builtin_STRUUID}", "STRUUID()") + self._replace("{Builtin_STRUUID}", "STRUUID()") # # # 17.4.3 Functions on Strings elif node.name.endswith("Builtin_STRLEN"): - replace( - "{Builtin_STRLEN}", "STRLEN(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_STRLEN}", + "STRLEN(" + self.convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_SUBSTR"): - args = [convert_node_arg(node.arg), node.start] + args = [self.convert_node_arg(node.arg), node.start] if node.length: args.append(node.length) expr = "SUBSTR(" + ", ".join(args) + ")" - replace("{Builtin_SUBSTR}", expr) + self._replace("{Builtin_SUBSTR}", expr) elif node.name.endswith("Builtin_UCASE"): - replace("{Builtin_UCASE}", "UCASE(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_UCASE}", "UCASE(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name.endswith("Builtin_LCASE"): - replace("{Builtin_LCASE}", "LCASE(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_LCASE}", "LCASE(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name.endswith("Builtin_STRSTARTS"): - replace( + self._replace( "{Builtin_STRSTARTS}", "STRSTARTS(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_STRENDS"): - replace( + self._replace( "{Builtin_STRENDS}", "STRENDS(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_CONTAINS"): - replace( + self._replace( "{Builtin_CONTAINS}", "CONTAINS(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_STRBEFORE"): - replace( + self._replace( "{Builtin_STRBEFORE}", "STRBEFORE(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_STRAFTER"): - replace( + self._replace( "{Builtin_STRAFTER}", "STRAFTER(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_ENCODE_FOR_URI"): - replace( + self._replace( "{Builtin_ENCODE_FOR_URI}", - "ENCODE_FOR_URI(" + convert_node_arg(node.arg) + ")", + "ENCODE_FOR_URI(" + self.convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_CONCAT"): expr = "CONCAT({vars})".format( - vars=", ".join(convert_node_arg(elem) for elem in node.arg) + vars=", ".join(self.convert_node_arg(elem) for elem in node.arg) ) - replace("{Builtin_CONCAT}", expr) + self._replace("{Builtin_CONCAT}", expr) elif node.name.endswith("Builtin_LANGMATCHES"): - replace( + self._replace( "{Builtin_LANGMATCHES}", "LANGMATCHES(" - + convert_node_arg(node.arg1) + + self.convert_node_arg(node.arg1) + ", " - + convert_node_arg(node.arg2) + + self.convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("REGEX"): - args = [convert_node_arg(node.text), convert_node_arg(node.pattern)] + args = [ + self.convert_node_arg(node.text), + self.convert_node_arg(node.pattern), + ] expr = "REGEX(" + ", ".join(args) + ")" - replace("{Builtin_REGEX}", expr) + self._replace("{Builtin_REGEX}", expr) elif node.name.endswith("REPLACE"): - replace( + self._replace( "{Builtin_REPLACE}", "REPLACE(" - + convert_node_arg(node.arg) + + self.convert_node_arg(node.arg) + ", " - + convert_node_arg(node.pattern) + + self.convert_node_arg(node.pattern) + ", " - + convert_node_arg(node.replacement) + + self.convert_node_arg(node.replacement) + ")", ) # # # 17.4.4 Functions on Numerics elif node.name == "Builtin_ABS": - replace("{Builtin_ABS}", "ABS(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_ABS}", "ABS(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_ROUND": - replace("{Builtin_ROUND}", "ROUND(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_ROUND}", "ROUND(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_CEIL": - replace("{Builtin_CEIL}", "CEIL(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_CEIL}", "CEIL(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_FLOOR": - replace("{Builtin_FLOOR}", "FLOOR(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_FLOOR}", "FLOOR(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_RAND": - replace("{Builtin_RAND}", "RAND()") + self._replace("{Builtin_RAND}", "RAND()") # # # 17.4.5 Functions on Dates and Times elif node.name == "Builtin_NOW": - replace("{Builtin_NOW}", "NOW()") + self._replace("{Builtin_NOW}", "NOW()") elif node.name == "Builtin_YEAR": - replace("{Builtin_YEAR}", "YEAR(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_YEAR}", "YEAR(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_MONTH": - replace("{Builtin_MONTH}", "MONTH(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_MONTH}", "MONTH(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_DAY": - replace("{Builtin_DAY}", "DAY(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_DAY}", "DAY(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_HOURS": - replace("{Builtin_HOURS}", "HOURS(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_HOURS}", "HOURS(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_MINUTES": - replace( - "{Builtin_MINUTES}", "MINUTES(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_MINUTES}", + "MINUTES(" + self.convert_node_arg(node.arg) + ")", ) elif node.name == "Builtin_SECONDS": - replace( - "{Builtin_SECONDS}", "SECONDS(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_SECONDS}", + "SECONDS(" + self.convert_node_arg(node.arg) + ")", ) elif node.name == "Builtin_TIMEZONE": - replace( - "{Builtin_TIMEZONE}", "TIMEZONE(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_TIMEZONE}", + "TIMEZONE(" + self.convert_node_arg(node.arg) + ")", ) elif node.name == "Builtin_TZ": - replace("{Builtin_TZ}", "TZ(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_TZ}", "TZ(" + self.convert_node_arg(node.arg) + ")" + ) # # # 17.4.6 Hash functions elif node.name == "Builtin_MD5": - replace("{Builtin_MD5}", "MD5(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_MD5}", "MD5(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_SHA1": - replace("{Builtin_SHA1}", "SHA1(" + convert_node_arg(node.arg) + ")") + self._replace( + "{Builtin_SHA1}", "SHA1(" + self.convert_node_arg(node.arg) + ")" + ) elif node.name == "Builtin_SHA256": - replace( - "{Builtin_SHA256}", "SHA256(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_SHA256}", + "SHA256(" + self.convert_node_arg(node.arg) + ")", ) elif node.name == "Builtin_SHA384": - replace( - "{Builtin_SHA384}", "SHA384(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_SHA384}", + "SHA384(" + self.convert_node_arg(node.arg) + ")", ) elif node.name == "Builtin_SHA512": - replace( - "{Builtin_SHA512}", "SHA512(" + convert_node_arg(node.arg) + ")" + self._replace( + "{Builtin_SHA512}", + "SHA512(" + self.convert_node_arg(node.arg) + ")", ) # Other @@ -1574,25 +1638,37 @@ def sparql_query_text(node): ) rows += "(" + " ".join(row) + ")" - replace("values", values + "{" + rows + "}") + self._replace("values", values + "{" + rows + "}") elif node.name == "ServiceGraphPattern": - replace( + self._replace( "{ServiceGraphPattern}", "SERVICE " - + convert_node_arg(node.term) + + self.convert_node_arg(node.term) + "{" + node.graph.name + "}", ) - traverse(node.graph, visitPre=sparql_query_text) + traverse(node.graph, visitPre=self.sparql_query_text) return node.graph # else: # raise ExpressionNotCoveredException("The expression {0} might not be covered yet.".format(node.name)) - traverse(query_algebra.algebra, visitPre=sparql_query_text) - query_from_algebra = open("query.txt", "r").read() - os.remove("query.txt") + def translateAlgebra(self) -> str: + traverse(self.query_algebra.algebra, visitPre=self.sparql_query_text) + return self._alg_translation + +def translateAlgebra(query_algebra: Query) -> str: + """ + Translates a SPARQL 1.1 algebra tree into the corresponding query string. + + :param query_algebra: An algebra returned by `translateQuery`. + :return: The query form generated from the SPARQL 1.1 algebra tree for + SELECT queries. + """ + query_from_algebra = _AlgebraTranslator( + query_algebra=query_algebra + ).translateAlgebra() return query_from_algebra From 6a48c3e5c339087824cabf6b1601d8d433708c0b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 23:52:58 +0200 Subject: [PATCH 033/114] build(deps): bump library/python in /docker/unstable (#2319) Bumps library/python from `1d2b710` to `2f749ef`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/unstable/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/unstable/Dockerfile b/docker/unstable/Dockerfile index a858e5b6d..f70e3a04d 100644 --- a/docker/unstable/Dockerfile +++ b/docker/unstable/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.2-slim@sha256:1d2b7101658e795e4d878d3f54f3354838630e1d16f5868ea18b338c12bb92c9 +FROM docker.io/library/python:3.11.2-slim@sha256:2f749ef90f54fd4b3c77cde78eec23ab5b8199d9ac84e4ced6ae523ef223ef7b # This file is generated from docker:unstable in Taskfile.yml COPY var/requirements.txt /var/tmp/build/ From 081a974bc91d31d55087f3c6c516b95960ae8bfa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 23:53:15 +0200 Subject: [PATCH 034/114] build(deps): bump importlib-metadata from 4.13.0 to 6.1.0 (#2318) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 4.13.0 to 6.1.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/CHANGES.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v4.13.0...v6.1.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 14eabafee..014a7f25e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -427,14 +427,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "4.13.0" +version = "6.1.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, - {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, + {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, + {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, ] [package.dependencies] @@ -442,7 +442,7 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] @@ -1469,4 +1469,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "d58fda334d4ffe3e0d4ad1752ff57134f99436d2f66839358ecdd0912a36ea73" +content-hash = "9c119fd46d7acfac3d3b09e44e23e5e15458c790b957db9b38032df71353d1aa" diff --git a/pyproject.toml b/pyproject.toml index ddbe2700e..63f87a593 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ rdfgraphisomorphism = 'rdflib.tools.graphisomorphism:main' python = "^3.7" isodate = "^0.6.0" pyparsing = ">=2.1.0,<4" -importlib-metadata = {version = "^4.0.0", python = ">=3.7,<3.8"} +importlib-metadata = {version = ">=4,<7", python = ">=3.7,<3.8"} berkeleydb = {version = "^18.1.0", optional = true} networkx = {version = "^2.0.0", optional = true} html5lib = {version = "^1.0", optional = true} From 1c45ec4e46d6257011b6e55888c5efb8470e4049 Mon Sep 17 00:00:00 2001 From: Elliot Ford Date: Mon, 3 Apr 2023 09:29:03 +0100 Subject: [PATCH 035/114] fix: widen `Graph.__contains__` type-hints to accept `Path` values (#2323) Change the type-hints for `Graph.__contains__` to also accept `Path` values as the parameter is passed to the `Graph.triples` function, which accepts `Path` values. --- rdflib/graph.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/rdflib/graph.py b/rdflib/graph.py index 4a96e6d37..809241df4 100644 --- a/rdflib/graph.py +++ b/rdflib/graph.py @@ -105,8 +105,6 @@ _TripleOrQuadSelectorType = Union["_TripleSelectorType", "_QuadSelectorType"] _TriplePathType = Tuple["_SubjectType", Path, "_ObjectType"] _TripleOrTriplePathType = Union["_TripleType", "_TriplePathType"] -# _QuadPathType = Tuple["_SubjectType", Path, "_ObjectType", "_ContextType"] -# _QuadOrQuadPathType = Union["_QuadType", "_QuadPathType"] _GraphT = TypeVar("_GraphT", bound="Graph") _ConjunctiveGraphT = TypeVar("_ConjunctiveGraphT", bound="ConjunctiveGraph") @@ -677,7 +675,7 @@ def __iter__(self) -> Generator["_TripleType", None, None]: """Iterates over all triples in the store""" return self.triples((None, None, None)) - def __contains__(self, triple: _TriplePatternType) -> bool: + def __contains__(self, triple: _TripleSelectorType) -> bool: """Support for 'triple in graph' syntax""" for triple in self.triples(triple): return True @@ -1979,7 +1977,7 @@ def _spoc( c = self._graph(c) return s, p, o, c - def __contains__(self, triple_or_quad: _TripleOrQuadPatternType) -> bool: + def __contains__(self, triple_or_quad: _TripleOrQuadSelectorType) -> bool: """Support for 'triple/quad in graph' syntax""" s, p, o, c = self._spoc(triple_or_quad) for t in self.triples((s, p, o), context=c): @@ -2753,7 +2751,7 @@ def triples( for s1, p1, o1 in graph.triples((s, p, o)): yield s1, p1, o1 - def __contains__(self, triple_or_quad: _TripleOrQuadPatternType) -> bool: + def __contains__(self, triple_or_quad: _TripleOrQuadSelectorType) -> bool: context = None if len(triple_or_quad) == 4: # type error: Tuple index out of range From 93d876ceccb96145fd0cf9c2fcead24ef2060cc6 Mon Sep 17 00:00:00 2001 From: Elliot Ford Date: Fri, 7 Apr 2023 10:22:07 +0100 Subject: [PATCH 036/114] docs: fix typo in security considerations documentation Replaced "access" with "accessed". --- docs/security_considerations.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/security_considerations.rst b/docs/security_considerations.rst index 7e25695b7..c6edb5ddc 100644 --- a/docs/security_considerations.rst +++ b/docs/security_considerations.rst @@ -8,7 +8,7 @@ RDFLib is designed to access arbitrary network and file resources, in some cases these are directly requested resources, in other cases they are indirectly referenced resources. -An example of where indirect resources are access is JSON-LD processing, where +An example of where indirect resources are accessed is JSON-LD processing, where network or file resources referenced by ``@context`` values will be loaded and processed. From 5fcf20770a2c964fb8ddb260e89737da9ab352f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Apr 2023 11:23:16 +0200 Subject: [PATCH 037/114] build(deps-dev): bump types-setuptools from 67.6.0.5 to 67.6.0.6 (#2325) Bumps [types-setuptools](https://github.com/python/typeshed) from 67.6.0.5 to 67.6.0.6. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 014a7f25e..1277cb510 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1393,14 +1393,14 @@ files = [ [[package]] name = "types-setuptools" -version = "67.6.0.5" +version = "67.6.0.6" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.6.0.5.tar.gz", hash = "sha256:3a708e66c7bdc620e4d0439f344c750c57a4340c895a4c3ed2d0fc4ae8eb9962"}, - {file = "types_setuptools-67.6.0.5-py3-none-any.whl", hash = "sha256:dae5a4a659dbb6dba57773440f6e2dbdd8ef282dc136a174a8a59bd33d949945"}, + {file = "types-setuptools-67.6.0.6.tar.gz", hash = "sha256:a5efd019b53c5793a112e0112b91bb402df749f75ee7abe681b6240841650ce4"}, + {file = "types_setuptools-67.6.0.6-py3-none-any.whl", hash = "sha256:fb46e651c6b5880ec8932a8876aa832071d612b2954fc5156b09cd263927d85b"}, ] [[package]] From 4fb468d970bb188981f2259d2c4d731b95fa8504 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 7 Apr 2023 11:23:44 +0200 Subject: [PATCH 038/114] build(deps-dev): bump setuptools from 67.6.0 to 67.6.1 (#2326) Bumps [setuptools](https://github.com/pypa/setuptools) from 67.6.0 to 67.6.1. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/CHANGES.rst) - [Commits](https://github.com/pypa/setuptools/compare/v67.6.0...v67.6.1) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1277cb510..97c160c0f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1127,14 +1127,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "67.6.0" +version = "67.6.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, - {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, + {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, + {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, ] [package.extras] From 89982f836780650389fb893d7d94e57a0d512cf5 Mon Sep 17 00:00:00 2001 From: Charles Tapley Hoyt Date: Fri, 7 Apr 2023 13:39:43 +0200 Subject: [PATCH 039/114] fix: eliminate some mutable default arguments in SPARQL code (#2301) This change eliminates some situations where a mutable object (i.e., a dictionary) was used as the default value for functions in the `rdflib.plugins.sparql.processor` module and related code. It replaces these situations with `typing.Optinal` that defaults to None, and is then handled within the function. Luckily, some of the code that the SPARQL Processor relied on already had this style, meaning not a lot of changes had to be made. This change also makes a small update to the logic in the SPARQL Processor's query function to simplify the if/else statement. This better mirrors the implementation in the `UpdateProcessor`. --- rdflib/plugins/sparql/evaluate.py | 4 ++-- rdflib/plugins/sparql/processor.py | 34 ++++++++++++++++++------------ rdflib/plugins/sparql/update.py | 6 ++++-- 3 files changed, 26 insertions(+), 18 deletions(-) diff --git a/rdflib/plugins/sparql/evaluate.py b/rdflib/plugins/sparql/evaluate.py index 4f8d687b4..764250c8c 100644 --- a/rdflib/plugins/sparql/evaluate.py +++ b/rdflib/plugins/sparql/evaluate.py @@ -642,7 +642,7 @@ def evalDescribeQuery(ctx: QueryContext, query) -> Dict[str, Union[str, Graph]]: def evalQuery( graph: Graph, query: Query, - initBindings: Mapping[str, Identifier], + initBindings: Optional[Mapping[str, Identifier]] = None, base: Optional[str] = None, ) -> Mapping[Any, Any]: """ @@ -661,7 +661,7 @@ def evalQuery( documentation. """ - initBindings = dict((Variable(k), v) for k, v in initBindings.items()) + initBindings = dict((Variable(k), v) for k, v in (initBindings or {}).items()) ctx = QueryContext(graph, initBindings=initBindings) diff --git a/rdflib/plugins/sparql/processor.py b/rdflib/plugins/sparql/processor.py index c2fb7e54b..f10f372bc 100644 --- a/rdflib/plugins/sparql/processor.py +++ b/rdflib/plugins/sparql/processor.py @@ -19,22 +19,30 @@ def prepareQuery( - queryString: str, initNs: Mapping[str, Any] = {}, base: Optional[str] = None + queryString: str, + initNs: Optional[Mapping[str, Any]] = None, + base: Optional[str] = None, ) -> Query: """ Parse and translate a SPARQL Query """ + if initNs is None: + initNs = {} ret = translateQuery(parseQuery(queryString), base, initNs) ret._original_args = (queryString, initNs, base) return ret def prepareUpdate( - updateString: str, initNs: Mapping[str, Any] = {}, base: Optional[str] = None + updateString: str, + initNs: Optional[Mapping[str, Any]] = None, + base: Optional[str] = None, ) -> Update: """ Parse and translate a SPARQL Update """ + if initNs is None: + initNs = {} ret = translateUpdate(parseUpdate(updateString), base, initNs) ret._original_args = (updateString, initNs, base) return ret @@ -43,8 +51,8 @@ def prepareUpdate( def processUpdate( graph: Graph, updateString: str, - initBindings: Mapping[str, Identifier] = {}, - initNs: Mapping[str, Any] = {}, + initBindings: Optional[Mapping[str, Identifier]] = None, + initNs: Optional[Mapping[str, Any]] = None, base: Optional[str] = None, ) -> None: """ @@ -73,8 +81,8 @@ def __init__(self, graph): def update( self, strOrQuery: Union[str, Update], - initBindings: Mapping[str, Identifier] = {}, - initNs: Mapping[str, Any] = {}, + initBindings: Optional[Mapping[str, Identifier]] = None, + initNs: Optional[Mapping[str, Any]] = None, ) -> None: """ .. caution:: @@ -108,8 +116,8 @@ def __init__(self, graph): def query( # type: ignore[override] self, strOrQuery: Union[str, Query], - initBindings: Mapping[str, Identifier] = {}, - initNs: Mapping[str, Any] = {}, + initBindings: Optional[Mapping[str, Identifier]] = None, + initNs: Optional[Mapping[str, Any]] = None, base: Optional[str] = None, DEBUG: bool = False, ) -> Mapping[str, Any]: @@ -132,9 +140,7 @@ def query( # type: ignore[override] documentation. """ - if not isinstance(strOrQuery, Query): - parsetree = parseQuery(strOrQuery) - query = translateQuery(parsetree, base, initNs) - else: - query = strOrQuery - return evalQuery(self.graph, query, initBindings, base) + if isinstance(strOrQuery, str): + strOrQuery = translateQuery(parseQuery(strOrQuery), base, initNs) + + return evalQuery(self.graph, strOrQuery, initBindings, base) diff --git a/rdflib/plugins/sparql/update.py b/rdflib/plugins/sparql/update.py index f27ee9b36..5ce86f393 100644 --- a/rdflib/plugins/sparql/update.py +++ b/rdflib/plugins/sparql/update.py @@ -280,7 +280,9 @@ def evalCopy(ctx: QueryContext, u: CompValue) -> None: def evalUpdate( - graph: Graph, update: Update, initBindings: Mapping[str, Identifier] = {} + graph: Graph, + update: Update, + initBindings: Optional[Mapping[str, Identifier]] = None, ) -> None: """ @@ -315,7 +317,7 @@ def evalUpdate( """ for u in update.algebra: - initBindings = dict((Variable(k), v) for k, v in initBindings.items()) + initBindings = dict((Variable(k), v) for k, v in (initBindings or {}).items()) ctx = QueryContext(graph, initBindings=initBindings) ctx.prologue = u.prologue From 950e60a6635a00039628b08a4c66b0b2af95525f Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sun, 9 Apr 2023 15:25:43 +0200 Subject: [PATCH 040/114] build: unify poetry version source (#2334) Change the GitHub actions workflows to use the Poetry version specified in `devtools/requirements-poetry.in` so that it becomes simpler to upgrade Poetry. Also upgrade the version of Poetry from 1.4.0 to 1.4.2 --- .github/workflows/docker-images.yaml | 14 ++++++-------- .github/workflows/validate.yaml | 18 ++++++++---------- .pre-commit-config.yaml | 2 +- devtools/requirements-poetry.in | 2 +- 4 files changed, 16 insertions(+), 20 deletions(-) diff --git a/.github/workflows/docker-images.yaml b/.github/workflows/docker-images.yaml index 7d755b3f0..81c73673c 100644 --- a/.github/workflows/docker-images.yaml +++ b/.github/workflows/docker-images.yaml @@ -44,10 +44,9 @@ jobs: uses: arduino/setup-task@v1 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - - name: Python Poetry Action - uses: abatilo/actions-poetry@v2.3.0 - with: - poetry-version: 1.4.0 + - name: Install poetry + run: | + pip install -r devtools/requirements-poetry.in - name: Build images shell: bash run: | @@ -70,10 +69,9 @@ jobs: uses: arduino/setup-task@v1 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - - name: Python Poetry Action - uses: abatilo/actions-poetry@v2.3.0 - with: - poetry-version: 1.4.0 + - name: Install poetry + run: | + pip install -r devtools/requirements-poetry.in - name: Login to GitHub Container Registry uses: docker/login-action@v2 with: diff --git a/.github/workflows/validate.yaml b/.github/workflows/validate.yaml index 9f65e91a7..dc95a79b4 100644 --- a/.github/workflows/validate.yaml +++ b/.github/workflows/validate.yaml @@ -65,7 +65,7 @@ jobs: uses: actions/cache@v3 with: path: ${{ env.XDG_CACHE_HOME }} - key: ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml', '**/poetry.lock', '**/with-fuseki.sh', '**/*requirements*.txt') }} + key: ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml', '**/poetry.lock', '**/with-fuseki.sh', '**/*requirements*.txt', '**/*requirements*.in') }} restore-keys: | ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}- ${{ github.job }}-xdg-v1-${{ matrix.os }}- @@ -73,10 +73,9 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - name: Python Poetry Action - uses: abatilo/actions-poetry@v2.3.0 - with: - poetry-version: 1.4.0 + - name: Install poetry + run: | + pip install -r devtools/requirements-poetry.in - uses: actions/setup-java@v3 if: ${{ matrix.extensive-tests }} with: @@ -133,7 +132,7 @@ jobs: uses: actions/cache@v3 with: path: ${{ env.XDG_CACHE_HOME }} - key: ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml', '**/poetry.lock', '**/with-fuseki.sh', '**/*requirements*.txt') }} + key: ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml', '**/poetry.lock', '**/with-fuseki.sh', '**/*requirements*.txt', '**/*requirements*.in') }} restore-keys: | ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}- ${{ github.job }}-xdg-v1-${{ matrix.os }}- @@ -141,10 +140,9 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - name: Python Poetry Action - uses: abatilo/actions-poetry@v2.3.0 - with: - poetry-version: 1.4.0 + - name: Install poetry + run: | + pip install -r devtools/requirements-poetry.in - name: Install Task uses: arduino/setup-task@v1 with: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6e05165b1..b4dfbddc0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: require_serial: true args: ["."] - repo: https://github.com/python-poetry/poetry - rev: 1.4.0 + rev: 1.4.2 hooks: - id: poetry-check - id: poetry-lock diff --git a/devtools/requirements-poetry.in b/devtools/requirements-poetry.in index 55e389c80..b01ce00a4 100644 --- a/devtools/requirements-poetry.in +++ b/devtools/requirements-poetry.in @@ -1,3 +1,3 @@ # Fixing this here as readthedocs can't use the compiled requirements-poetry.txt # due to conflicts. -poetry==1.4.0 +poetry==1.4.2 From 4940798d09b282d47370606ede94fd16e4cb235f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 9 Apr 2023 16:49:36 +0200 Subject: [PATCH 041/114] build(deps-dev): bump black from 23.1.0 to 23.3.0 (#2328) Bumps [black](https://github.com/psf/black) from 23.1.0 to 23.3.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.1.0...23.3.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Iwan Aucamp --- .pre-commit-config.yaml | 2 +- poetry.lock | 54 ++++++++++++++++++++--------------------- pyproject.toml | 4 +-- 3 files changed, 30 insertions(+), 30 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b4dfbddc0..04a9693f0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: - repo: https://github.com/psf/black # WARNING: version should be the same as in `pyproject.toml` # Using git ref spec because of https://github.com/psf/black/issues/2493 - rev: 'refs/tags/23.1.0:refs/tags/23.1.0' + rev: 'refs/tags/23.3.0:refs/tags/23.3.0' hooks: - id: black pass_filenames: false diff --git a/poetry.lock b/poetry.lock index 97c160c0f..ba7b16850 100644 --- a/poetry.lock +++ b/poetry.lock @@ -59,37 +59,37 @@ files = [ [[package]] name = "black" -version = "23.1.0" +version = "23.3.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, - {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, - {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, - {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, - {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, - {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, - {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, - {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, - {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, - {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, - {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, - {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, - {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, - {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, - {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, - {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, + {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, + {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, + {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, + {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, + {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, + {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, + {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, + {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, + {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, + {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, + {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, + {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, + {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, ] [package.dependencies] @@ -1469,4 +1469,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "9c119fd46d7acfac3d3b09e44e23e5e15458c790b957db9b38032df71353d1aa" +content-hash = "36084be60ae6a80f19b7aab7044c7c7d6fb11a304dae08992060f46f1c457213" diff --git a/pyproject.toml b/pyproject.toml index 63f87a593..87bc3cf70 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ html5lib = {version = "^1.0", optional = true} lxml = {version = "^4.3.0", optional = true} [tool.poetry.group.dev.dependencies] -black = "23.1.0" +black = "23.3.0" isort = "^5.10.0" mypy = "^1.1.0" lxml-stubs = "^0.4.0" @@ -119,7 +119,7 @@ pep8-naming = ["-N802"] pep8-naming = ["-N802"] [tool.black] -required-version = "23.1.0" +required-version = "23.3.0" line-length = "88" target-version = ['py37'] include = '\.pyi?$' From 6e544f5afc365b8e0fc8ad7f98c4c27b9daa755d Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Sun, 9 Apr 2023 21:47:26 +0200 Subject: [PATCH 042/114] refactor: eliminate unneeded `rdflib.compat` imports (#2336) Compatibility handling for `collections.abc.Mapping` and `collections.abc.MutableMapping` is not needed as RDFLib currently only support Python 3.7 and newer, and those classes are available from `collections.abc` in Python 3.7. --- rdflib/compat.py | 7 ------- rdflib/plugins/sparql/sparql.py | 2 +- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/rdflib/compat.py b/rdflib/compat.py index cba3a5696..1cc4adacd 100644 --- a/rdflib/compat.py +++ b/rdflib/compat.py @@ -97,10 +97,3 @@ def decodeUnicodeEscape(escaped: str) -> str: # Most of times, there are no backslashes in strings. return escaped return _turtle_escape_pattern.sub(_turtle_escape_subber, escaped) - - -# Migration to abc in Python 3.8 -try: - from collections.abc import Mapping, MutableMapping -except: - from collections import Mapping, MutableMapping diff --git a/rdflib/plugins/sparql/sparql.py b/rdflib/plugins/sparql/sparql.py index 8f6a002da..0332d4e7c 100644 --- a/rdflib/plugins/sparql/sparql.py +++ b/rdflib/plugins/sparql/sparql.py @@ -4,6 +4,7 @@ import datetime import itertools import typing as t +from collections.abc import Mapping, MutableMapping from typing import ( TYPE_CHECKING, Any, @@ -21,7 +22,6 @@ import isodate import rdflib.plugins.sparql -from rdflib.compat import Mapping, MutableMapping from rdflib.graph import ConjunctiveGraph, Graph from rdflib.namespace import NamespaceManager from rdflib.plugins.sparql.parserutils import CompValue From b2f3987240ebb9edf03fea9bd94f8760ef99f419 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Mon, 10 Apr 2023 12:06:21 +0200 Subject: [PATCH 043/114] refactor: eliminate unused imports (#2337) `XMLNS` was seen as unused in `rdflib/__init__.py` but this change adds it to `__all__` in that module so that it is not unused. This change also removes actual unused imports in `rdflib/namespace/__init__.py`. --- rdflib/__init__.py | 1 + rdflib/namespace/__init__.py | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/rdflib/__init__.py b/rdflib/__init__.py index 0a7610f38..dc32be8ee 100644 --- a/rdflib/__init__.py +++ b/rdflib/__init__.py @@ -92,6 +92,7 @@ "TIME", "VANN", "VOID", + "XMLNS", "XSD", "util", "plugin", diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index c88fdedd4..57f52f336 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -1,6 +1,4 @@ -import json import logging -import sys import warnings from functools import lru_cache from pathlib import Path From 61f8a8d16a0812a6ebbe13ccaa3557f9fc4d0618 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Mon, 10 Apr 2023 12:07:29 +0200 Subject: [PATCH 044/114] refactor: narrow imports (#2338) This change narrows import so that things are imported from the Python module where they are defined instead of importing them from a module that re-exports them, e.g. change import of `Graph` to import from the `rdflib.graph` module instead of from the `rdflib` module. This helps avoid problems with circular imports. --- rdflib/extras/infixowl.py | 5 ++--- rdflib/plugins/sparql/parserutils.py | 3 +-- rdflib/void.py | 3 ++- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/rdflib/extras/infixowl.py b/rdflib/extras/infixowl.py index f4daab776..fc3e24aee 100644 --- a/rdflib/extras/infixowl.py +++ b/rdflib/extras/infixowl.py @@ -110,11 +110,10 @@ import itertools import logging -from rdflib import OWL, RDF, RDFS, XSD, BNode, Literal, Namespace, URIRef, Variable from rdflib.collection import Collection from rdflib.graph import Graph -from rdflib.namespace import NamespaceManager -from rdflib.term import Identifier +from rdflib.namespace import OWL, RDF, RDFS, XSD, Namespace, NamespaceManager +from rdflib.term import BNode, Identifier, Literal, URIRef, Variable from rdflib.util import first logger = logging.getLogger(__name__) diff --git a/rdflib/plugins/sparql/parserutils.py b/rdflib/plugins/sparql/parserutils.py index 5b3df78be..1f4109c82 100644 --- a/rdflib/plugins/sparql/parserutils.py +++ b/rdflib/plugins/sparql/parserutils.py @@ -16,8 +16,7 @@ from pyparsing import ParseResults, TokenConverter, originalTextFor -from rdflib import BNode, Variable -from rdflib.term import Identifier +from rdflib.term import BNode, Identifier, Variable if TYPE_CHECKING: from rdflib.plugins.sparql.sparql import FrozenBindings diff --git a/rdflib/void.py b/rdflib/void.py index ff81e2477..8a123e5f5 100644 --- a/rdflib/void.py +++ b/rdflib/void.py @@ -1,7 +1,8 @@ import collections -from rdflib import Graph, Literal, URIRef +from rdflib.graph import Graph from rdflib.namespace import RDF, VOID +from rdflib.term import Literal, URIRef def generateVoID( # noqa: N802 From 20f1235d5f3b639a47cad59967fe93ed9e41dd90 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Mon, 10 Apr 2023 12:08:55 +0200 Subject: [PATCH 045/114] refactor: eliminate inheritance from object (#2339) This change removes the redundant inheritance from `object` (i.e. `class Foo(object): pass`) that is no longer needed in Python 3 and is a relic from Python 2. --- rdflib/collection.py | 2 +- rdflib/compare.py | 6 +++--- rdflib/container.py | 2 +- rdflib/events.py | 4 ++-- rdflib/extras/describer.py | 4 ++-- rdflib/extras/infixowl.py | 4 ++-- rdflib/graph.py | 4 ++-- rdflib/namespace/__init__.py | 2 +- rdflib/parser.py | 2 +- rdflib/paths.py | 2 +- rdflib/plugins/parsers/jsonld.py | 2 +- rdflib/plugins/parsers/notation3.py | 4 ++-- rdflib/plugins/parsers/ntriples.py | 6 +++--- rdflib/plugins/parsers/rdfxml.py | 2 +- rdflib/plugins/serializers/jsonld.py | 2 +- rdflib/plugins/serializers/xmlwriter.py | 2 +- rdflib/plugins/shared/jsonld/context.py | 2 +- rdflib/plugins/sparql/aggregates.py | 4 ++-- rdflib/plugins/sparql/parserutils.py | 2 +- rdflib/plugins/sparql/sparql.py | 2 +- rdflib/plugins/stores/concurrent.py | 4 ++-- rdflib/plugins/stores/sparqlconnector.py | 2 +- rdflib/query.py | 12 ++++++------ rdflib/resource.py | 2 +- rdflib/store.py | 4 ++-- rdflib/term.py | 2 +- rdflib/tools/csv2rdf.py | 4 ++-- test/test_graph/test_batch_add.py | 2 +- test/test_nt_misc.py | 2 +- test/test_parsers/test_swap_n3.py | 2 +- test/test_serializers/test_prettyxml.py | 2 +- test/test_serializers/test_serializer_xml.py | 2 +- 32 files changed, 50 insertions(+), 50 deletions(-) diff --git a/rdflib/collection.py b/rdflib/collection.py index b9c76107c..fd64ab20b 100644 --- a/rdflib/collection.py +++ b/rdflib/collection.py @@ -11,7 +11,7 @@ __all__ = ["Collection"] -class Collection(object): +class Collection: __doc__ = """ See "Emulating container types": https://docs.python.org/reference/datamodel.html#emulating-container-types diff --git a/rdflib/compare.py b/rdflib/compare.py index 4b8473c7b..30f52d973 100644 --- a/rdflib/compare.py +++ b/rdflib/compare.py @@ -118,7 +118,7 @@ def _total_seconds(td): return result -class _runtime(object): # noqa: N801 +class _runtime: # noqa: N801 def __init__(self, label): self.label = label @@ -137,7 +137,7 @@ def wrapped_f(*args, **kwargs): return wrapped_f -class _call_count(object): # noqa: N801 +class _call_count: # noqa: N801 def __init__(self, label): self.label = label @@ -284,7 +284,7 @@ def copy(self): _HashT = Callable[[], "HASH"] -class _TripleCanonicalizer(object): +class _TripleCanonicalizer: def __init__(self, graph: Graph, hashfunc: _HashT = sha256): self.graph = graph diff --git a/rdflib/container.py b/rdflib/container.py index b5c0ebd56..56554df04 100644 --- a/rdflib/container.py +++ b/rdflib/container.py @@ -6,7 +6,7 @@ __all__ = ["Container", "Bag", "Seq", "Alt", "NoElementException"] -class Container(object): +class Container: """A class for constructing RDF containers, as per https://www.w3.org/TR/rdf11-mt/#rdf-containers Basic usage, creating a ``Bag`` and adding to it:: diff --git a/rdflib/events.py b/rdflib/events.py index e973c3082..d0290d5cd 100644 --- a/rdflib/events.py +++ b/rdflib/events.py @@ -26,7 +26,7 @@ __all__ = ["Event", "Dispatcher"] -class Event(object): +class Event: """ An event is a container for attributes. The source of an event creates this object, or a subclass, gives it any kind of data that @@ -47,7 +47,7 @@ def __repr__(self): return "" % ([a for a in attrs],) -class Dispatcher(object): +class Dispatcher: """ An object that can dispatch events to a privately managed group of subscribers. diff --git a/rdflib/extras/describer.py b/rdflib/extras/describer.py index aa318c46d..023970555 100644 --- a/rdflib/extras/describer.py +++ b/rdflib/extras/describer.py @@ -20,7 +20,7 @@ >>> >>> CV = Namespace("http://purl.org/captsolo/resume-rdf/0.2/cv#") >>> - >>> class Person(object): + >>> class Person: ... def __init__(self): ... self.first_name = u"Some" ... self.last_name = u"Body" @@ -112,7 +112,7 @@ from rdflib.term import BNode, Identifier, Literal, URIRef -class Describer(object): +class Describer: def __init__(self, graph=None, about=None, base=None): if graph is None: graph = Graph() diff --git a/rdflib/extras/infixowl.py b/rdflib/extras/infixowl.py index fc3e24aee..9c75345bb 100644 --- a/rdflib/extras/infixowl.py +++ b/rdflib/extras/infixowl.py @@ -357,7 +357,7 @@ def _remover(inst): return _remover -class Individual(object): +class Individual: """ A typed individual """ @@ -1382,7 +1382,7 @@ def __repr__(self, full=False, normalization=True): ) + klassdescr -class OWLRDFListProxy(object): +class OWLRDFListProxy: def __init__(self, rdf_list, members=None, graph=None): if graph: self.graph = graph diff --git a/rdflib/graph.py b/rdflib/graph.py index 809241df4..c6f0fd36c 100644 --- a/rdflib/graph.py +++ b/rdflib/graph.py @@ -2588,7 +2588,7 @@ def __reduce__(self) -> Tuple[Type[Graph], Tuple[Store, _ContextIdentifierType]] rdflib.term._ORDERING[QuotedGraph] = 11 -class Seq(object): +class Seq: """Wrapper around an RDF Seq resource It implements a container type in Python with the order of the items @@ -2894,7 +2894,7 @@ def _assertnode(*terms: Any) -> bool: return True -class BatchAddGraph(object): +class BatchAddGraph: """ Wrapper around graph that turns batches of calls to Graph's add (and optionally, addN) into calls to batched calls to addN`. diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index 57f52f336..618cab78f 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -348,7 +348,7 @@ def _ipython_key_completions_(self) -> List[str]: _with_bind_override_fix = True -class NamespaceManager(object): +class NamespaceManager: """Class for managing prefix => namespace mappings This class requires an RDFlib Graph as an input parameter and may optionally have diff --git a/rdflib/parser.py b/rdflib/parser.py index 6f23dd342..6cf6f1da6 100644 --- a/rdflib/parser.py +++ b/rdflib/parser.py @@ -53,7 +53,7 @@ ] -class Parser(object): +class Parser: __slots__ = () def __init__(self): diff --git a/rdflib/paths.py b/rdflib/paths.py index 6ca42d74c..df7136178 100644 --- a/rdflib/paths.py +++ b/rdflib/paths.py @@ -214,7 +214,7 @@ @total_ordering -class Path(object): +class Path: __or__: Callable[["Path", Union["URIRef", "Path"]], "AlternativePath"] __invert__: Callable[["Path"], "InvPath"] __neg__: Callable[["Path"], "NegatedPath"] diff --git a/rdflib/plugins/parsers/jsonld.py b/rdflib/plugins/parsers/jsonld.py index 716b80f4f..4eb05fcee 100644 --- a/rdflib/plugins/parsers/jsonld.py +++ b/rdflib/plugins/parsers/jsonld.py @@ -138,7 +138,7 @@ def to_rdf( return parser.parse(data, context, dataset) -class Parser(object): +class Parser: def __init__( self, generalized_rdf: bool = False, allow_lists_of_lists: Optional[bool] = None ): diff --git a/rdflib/plugins/parsers/notation3.py b/rdflib/plugins/parsers/notation3.py index 25ea0c747..08798076a 100755 --- a/rdflib/plugins/parsers/notation3.py +++ b/rdflib/plugins/parsers/notation3.py @@ -1773,7 +1773,7 @@ def message(self) -> str: ############################################################################### -class Formula(object): +class Formula: number = 0 def __init__(self, parent: Graph): @@ -1815,7 +1815,7 @@ def close(self) -> QuotedGraph: r_hibyte = re.compile(r"([\x80-\xff])") -class RDFSink(object): +class RDFSink: def __init__(self, graph: Graph): self.rootFormula: Optional[Formula] = None self.uuid = uuid4().hex diff --git a/rdflib/plugins/parsers/ntriples.py b/rdflib/plugins/parsers/ntriples.py index 564a2cf1b..09656faff 100644 --- a/rdflib/plugins/parsers/ntriples.py +++ b/rdflib/plugins/parsers/ntriples.py @@ -60,7 +60,7 @@ validate = False -class DummySink(object): +class DummySink: def __init__(self): self.length = 0 @@ -126,7 +126,7 @@ def uriquote(uri: str) -> str: _BNodeContextType = MutableMapping[str, bNode] -class W3CNTriplesParser(object): +class W3CNTriplesParser: """An N-Triples Parser. This is a legacy-style Triples parser for NTriples provided by W3C Usage:: @@ -334,7 +334,7 @@ def literal(self) -> Union["te.Literal[False]", Literal]: return False -class NTGraphSink(object): +class NTGraphSink: __slots__ = ("g",) def __init__(self, graph: "Graph"): diff --git a/rdflib/plugins/parsers/rdfxml.py b/rdflib/plugins/parsers/rdfxml.py index 76775f003..03650fc98 100644 --- a/rdflib/plugins/parsers/rdfxml.py +++ b/rdflib/plugins/parsers/rdfxml.py @@ -95,7 +95,7 @@ def next_li(self): return RDFNS["_%s" % self.li] -class ElementHandler(object): +class ElementHandler: __slots__ = [ "start", "char", diff --git a/rdflib/plugins/serializers/jsonld.py b/rdflib/plugins/serializers/jsonld.py index e9ff401b7..e5d9b0384 100644 --- a/rdflib/plugins/serializers/jsonld.py +++ b/rdflib/plugins/serializers/jsonld.py @@ -138,7 +138,7 @@ def from_rdf( return result -class Converter(object): +class Converter: def __init__(self, context, use_native_types, use_rdf_type): self.context = context self.use_native_types = context.active or use_native_types diff --git a/rdflib/plugins/serializers/xmlwriter.py b/rdflib/plugins/serializers/xmlwriter.py index 9ed10f48f..88cebdeda 100644 --- a/rdflib/plugins/serializers/xmlwriter.py +++ b/rdflib/plugins/serializers/xmlwriter.py @@ -6,7 +6,7 @@ ESCAPE_ENTITIES = {"\r": " "} -class XMLWriter(object): +class XMLWriter: def __init__(self, stream, namespace_manager, encoding=None, decl=1, extra_ns=None): encoding = encoding or "utf-8" encoder, decoder, stream_reader, stream_writer = codecs.lookup(encoding) diff --git a/rdflib/plugins/shared/jsonld/context.py b/rdflib/plugins/shared/jsonld/context.py index b19f66737..2f6cedbdd 100644 --- a/rdflib/plugins/shared/jsonld/context.py +++ b/rdflib/plugins/shared/jsonld/context.py @@ -69,7 +69,7 @@ class Defined(int): URI_GEN_DELIMS = (":", "/", "?", "#", "[", "]", "@") -class Context(object): +class Context: def __init__( self, source: Optional[Any] = None, diff --git a/rdflib/plugins/sparql/aggregates.py b/rdflib/plugins/sparql/aggregates.py index fb2dffedd..67e143a7e 100644 --- a/rdflib/plugins/sparql/aggregates.py +++ b/rdflib/plugins/sparql/aggregates.py @@ -30,7 +30,7 @@ """ -class Accumulator(object): +class Accumulator: """abstract base class for different aggregation functions""" def __init__(self, aggregation: CompValue): @@ -268,7 +268,7 @@ def get_value(self) -> Literal: return Literal(self.separator.join(str(v) for v in self.value)) -class Aggregator(object): +class Aggregator: """combines different Accumulator objects""" accumulator_classes = { diff --git a/rdflib/plugins/sparql/parserutils.py b/rdflib/plugins/sparql/parserutils.py index 1f4109c82..b625f3646 100644 --- a/rdflib/plugins/sparql/parserutils.py +++ b/rdflib/plugins/sparql/parserutils.py @@ -99,7 +99,7 @@ def value( return val -class ParamValue(object): +class ParamValue: """ The result of parsing a Param This just keeps the name/value diff --git a/rdflib/plugins/sparql/sparql.py b/rdflib/plugins/sparql/sparql.py index 0332d4e7c..64230a645 100644 --- a/rdflib/plugins/sparql/sparql.py +++ b/rdflib/plugins/sparql/sparql.py @@ -246,7 +246,7 @@ def remember(self, these) -> FrozenBindings: return FrozenBindings(self.ctx, (x for x in self.items() if x[0] in these)) -class QueryContext(object): +class QueryContext: """ Query context - passed along when evaluating the query """ diff --git a/rdflib/plugins/stores/concurrent.py b/rdflib/plugins/stores/concurrent.py index cdf41ba0e..c07867958 100644 --- a/rdflib/plugins/stores/concurrent.py +++ b/rdflib/plugins/stores/concurrent.py @@ -1,7 +1,7 @@ from threading import Lock -class ResponsibleGenerator(object): +class ResponsibleGenerator: """A generator that will help clean up when it is done being used.""" __slots__ = ["cleanup", "gen"] @@ -20,7 +20,7 @@ def __next__(self): return next(self.gen) -class ConcurrentStore(object): +class ConcurrentStore: def __init__(self, store): self.store = store diff --git a/rdflib/plugins/stores/sparqlconnector.py b/rdflib/plugins/stores/sparqlconnector.py index 79f9c54ae..faf575384 100644 --- a/rdflib/plugins/stores/sparqlconnector.py +++ b/rdflib/plugins/stores/sparqlconnector.py @@ -30,7 +30,7 @@ class SPARQLConnectorException(Exception): # noqa: N818 } -class SPARQLConnector(object): +class SPARQLConnector: """ this class deals with nitty gritty details of talking to a SPARQL server """ diff --git a/rdflib/query.py b/rdflib/query.py index 155c490e6..e9c189017 100644 --- a/rdflib/query.py +++ b/rdflib/query.py @@ -40,7 +40,7 @@ from rdflib.term import Identifier, Variable -class Processor(object): +class Processor: """ Query plugin interface. @@ -64,7 +64,7 @@ def query( # type: ignore[empty-body] pass -class UpdateProcessor(object): +class UpdateProcessor: """ Update plugin interface. @@ -93,7 +93,7 @@ class ResultException(Exception): pass -class EncodeOnlyUnicode(object): +class EncodeOnlyUnicode: """ This is a crappy work-around for http://bugs.python.org/issue11649 @@ -202,7 +202,7 @@ def asdict(self) -> Dict[str, "Identifier"]: return dict((v, self[v]) for v in self.labels if self[v] is not None) -class Result(object): +class Result: """ A common class for representing query result. @@ -413,7 +413,7 @@ def __eq__(self, other: Any) -> bool: return False -class ResultParser(object): +class ResultParser: def __init__(self): pass @@ -423,7 +423,7 @@ def parse(self, source: IO, **kwargs: Any) -> Result: # type: ignore[empty-body pass # abstract -class ResultSerializer(object): +class ResultSerializer: def __init__(self, result: Result): self.result = result diff --git a/rdflib/resource.py b/rdflib/resource.py index 49c196dd8..0620c13d9 100644 --- a/rdflib/resource.py +++ b/rdflib/resource.py @@ -293,7 +293,7 @@ __all__ = ["Resource"] -class Resource(object): +class Resource: def __init__(self, graph, subject): self._graph = graph self._identifier = subject diff --git a/rdflib/store.py b/rdflib/store.py index ca6f92611..e3c9f7ab2 100644 --- a/rdflib/store.py +++ b/rdflib/store.py @@ -113,7 +113,7 @@ class TripleRemovedEvent(Event): """ -class NodePickler(object): +class NodePickler: def __init__(self) -> None: self._objects: Dict[str, Any] = {} self._ids: Dict[Any, str] = {} @@ -165,7 +165,7 @@ def __setstate__(self, state: Mapping[str, Any]) -> None: self._get_object = self._objects.__getitem__ -class Store(object): +class Store: # Properties context_aware: bool = False formula_aware: bool = False diff --git a/rdflib/term.py b/rdflib/term.py index a42d524aa..bdfbec111 100644 --- a/rdflib/term.py +++ b/rdflib/term.py @@ -119,7 +119,7 @@ def _is_valid_unicode(value: Union[str, bytes]) -> bool: return True -class Node(object): +class Node: """ A Node in the Graph. """ diff --git a/rdflib/tools/csv2rdf.py b/rdflib/tools/csv2rdf.py index 2bf7dc861..267483ed5 100644 --- a/rdflib/tools/csv2rdf.py +++ b/rdflib/tools/csv2rdf.py @@ -139,7 +139,7 @@ def prefixuri(x, prefix, class_=None): # meta-language for config -class NodeMaker(object): +class NodeMaker: def range(self): return rdflib.RDFS.Literal @@ -296,7 +296,7 @@ def column(v): return eval(v, config_functions) -class CSV2RDF(object): +class CSV2RDF: def __init__(self): self.CLASS = None self.BASE = None diff --git a/test/test_graph/test_batch_add.py b/test/test_graph/test_batch_add.py index b8d037e95..112a8903c 100644 --- a/test/test_graph/test_batch_add.py +++ b/test/test_graph/test_batch_add.py @@ -72,7 +72,7 @@ def test_no_addN_on_exception(self): assert 10 == len(g) def test_addN_batching_addN(self): - class MockGraph(object): + class MockGraph: def __init__(self): self.counts = [] diff --git a/test/test_nt_misc.py b/test/test_nt_misc.py index f2b650e7d..90a6e93a2 100644 --- a/test/test_nt_misc.py +++ b/test/test_nt_misc.py @@ -268,7 +268,7 @@ def test_bnode_shared_across_instances_with_parse_option(self): assert len(my_sink.subs) == 1 -class FakeSink(object): +class FakeSink: def __init__(self): self.subs = set() diff --git a/test/test_parsers/test_swap_n3.py b/test/test_parsers/test_swap_n3.py index dc8d9a8a8..cebb55ad9 100644 --- a/test/test_parsers/test_swap_n3.py +++ b/test/test_parsers/test_swap_n3.py @@ -60,7 +60,7 @@ ] -class Envelope(object): +class Envelope: def __init__(self, n, f): self.name = n self.file = f diff --git a/test/test_serializers/test_prettyxml.py b/test/test_serializers/test_prettyxml.py index 4d406a6e4..0084aa249 100644 --- a/test/test_serializers/test_prettyxml.py +++ b/test/test_serializers/test_prettyxml.py @@ -7,7 +7,7 @@ from rdflib.term import BNode, Literal, URIRef -class SerializerTestBase(object): +class SerializerTestBase: repeats = 8 def setup_method(self): diff --git a/test/test_serializers/test_serializer_xml.py b/test/test_serializers/test_serializer_xml.py index bac0169d0..ad9012939 100644 --- a/test/test_serializers/test_serializer_xml.py +++ b/test/test_serializers/test_serializer_xml.py @@ -6,7 +6,7 @@ from rdflib.term import BNode, URIRef -class SerializerTestBase(object): +class SerializerTestBase: repeats = 8 def setup_method(self): From 00cdcc6e612a477b1a08b14b93ad4ffea36a7869 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 12:09:18 +0200 Subject: [PATCH 046/114] build(deps-dev): bump coverage from 7.2.2 to 7.2.3 (#2341) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.2.2 to 7.2.3. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.2.2...7.2.3) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 104 ++++++++++++++++++++++++++-------------------------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/poetry.lock b/poetry.lock index ba7b16850..cea4fd181 100644 --- a/poetry.lock +++ b/poetry.lock @@ -235,63 +235,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.2" +version = "7.2.3" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7"}, - {file = "coverage-7.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d"}, - {file = "coverage-7.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5"}, - {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169"}, - {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6"}, - {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137"}, - {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90"}, - {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2"}, - {file = "coverage-7.2.2-cp310-cp310-win32.whl", hash = "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292"}, - {file = "coverage-7.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab"}, - {file = "coverage-7.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b"}, - {file = "coverage-7.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5"}, - {file = "coverage-7.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731"}, - {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd"}, - {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d"}, - {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212"}, - {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54"}, - {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57"}, - {file = "coverage-7.2.2-cp311-cp311-win32.whl", hash = "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d"}, - {file = "coverage-7.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512"}, - {file = "coverage-7.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9"}, - {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e"}, - {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69"}, - {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0"}, - {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f"}, - {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67"}, - {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9"}, - {file = "coverage-7.2.2-cp37-cp37m-win32.whl", hash = "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8"}, - {file = "coverage-7.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25"}, - {file = "coverage-7.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6"}, - {file = "coverage-7.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5"}, - {file = "coverage-7.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4"}, - {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd"}, - {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84"}, - {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540"}, - {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88"}, - {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2"}, - {file = "coverage-7.2.2-cp38-cp38-win32.whl", hash = "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3"}, - {file = "coverage-7.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8"}, - {file = "coverage-7.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d"}, - {file = "coverage-7.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005"}, - {file = "coverage-7.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988"}, - {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149"}, - {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8"}, - {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140"}, - {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016"}, - {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be"}, - {file = "coverage-7.2.2-cp39-cp39-win32.whl", hash = "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc"}, - {file = "coverage-7.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef"}, - {file = "coverage-7.2.2-pp37.pp38.pp39-none-any.whl", hash = "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968"}, - {file = "coverage-7.2.2.tar.gz", hash = "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2"}, + {file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"}, + {file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"}, + {file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"}, + {file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"}, + {file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"}, + {file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"}, + {file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"}, + {file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"}, + {file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"}, + {file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"}, + {file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"}, + {file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"}, + {file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"}, + {file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"}, + {file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"}, + {file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"}, + {file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"}, + {file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"}, + {file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"}, + {file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"}, + {file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"}, ] [package.dependencies] From d5c4d206a781241189aa8992bbac15e19dd327f8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 12:09:36 +0200 Subject: [PATCH 047/114] build(deps): bump library/python in /docker/latest (#2342) Bumps library/python from 3.11.2-slim to 3.11.3-slim. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/latest/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/latest/Dockerfile b/docker/latest/Dockerfile index 96ac404af..5d82c7a58 100644 --- a/docker/latest/Dockerfile +++ b/docker/latest/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.2-slim@sha256:1d2b7101658e795e4d878d3f54f3354838630e1d16f5868ea18b338c12bb92c9 +FROM docker.io/library/python:3.11.3-slim@sha256:5a67c38a7c28ad09d08f4e153280023a2df77189b55af7804d7ceb96fee6a68f COPY docker/latest/requirements.txt /var/tmp/build/ From ebde6adc5ff52e7b71e6cd9bce8582d35d202ce3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 12:09:46 +0200 Subject: [PATCH 048/114] build(deps): bump importlib-metadata from 6.1.0 to 6.3.0 (#2343) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 6.1.0 to 6.3.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/CHANGES.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v6.1.0...v6.3.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index cea4fd181..1bef44538 100644 --- a/poetry.lock +++ b/poetry.lock @@ -427,14 +427,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.1.0" +version = "6.3.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, - {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, + {file = "importlib_metadata-6.3.0-py3-none-any.whl", hash = "sha256:8f8bd2af397cf33bd344d35cfe7f489219b7d14fc79a3f854b75b8417e9226b0"}, + {file = "importlib_metadata-6.3.0.tar.gz", hash = "sha256:23c2bcae4762dfb0bbe072d358faec24957901d75b6c4ab11172c0c982532402"}, ] [package.dependencies] From 0bfeea228f4a428c18c5cf6d17d30f6a0b1e6cfc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 12:10:02 +0200 Subject: [PATCH 049/114] build(deps-dev): bump pytest from 7.2.2 to 7.3.0 (#2344) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.2.2 to 7.3.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.2.2...7.3.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 28 ++++------------------------ 1 file changed, 4 insertions(+), 24 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1bef44538..a58be2d0d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -12,25 +12,6 @@ files = [ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - [[package]] name = "babel" version = "2.12.1" @@ -999,18 +980,17 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.2.2" +version = "7.3.0" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, - {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, + {file = "pytest-7.3.0-py3-none-any.whl", hash = "sha256:933051fa1bfbd38a21e73c3960cebdad4cf59483ddba7696c48509727e17f201"}, + {file = "pytest-7.3.0.tar.gz", hash = "sha256:58ecc27ebf0ea643ebfdf7fb1249335da761a00c9f955bcd922349bcb68ee57d"}, ] [package.dependencies] -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -1020,7 +1000,7 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-cov" From 2d8609da6e95c9ac48ba6ae6c2b1e262e91037c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 12:10:34 +0200 Subject: [PATCH 050/114] build(deps-dev): bump mypy from 1.1.1 to 1.2.0 (#2346) Bumps [mypy](https://github.com/python/mypy) from 1.1.1 to 1.2.0. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v1.1.1...v1.2.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 54 ++++++++++++++++++++++++++--------------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/poetry.lock b/poetry.lock index a58be2d0d..592b63005 100644 --- a/poetry.lock +++ b/poetry.lock @@ -730,38 +730,38 @@ files = [ [[package]] name = "mypy" -version = "1.1.1" +version = "1.2.0" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"}, - {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"}, - {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"}, - {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"}, - {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"}, - {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"}, - {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"}, - {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"}, - {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"}, - {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"}, - {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"}, - {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"}, - {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"}, - {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"}, - {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"}, - {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"}, - {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"}, - {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"}, - {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"}, - {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"}, - {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"}, - {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"}, - {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"}, - {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"}, - {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"}, - {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"}, + {file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"}, + {file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"}, + {file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"}, + {file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"}, + {file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"}, + {file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"}, + {file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"}, + {file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"}, + {file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"}, + {file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"}, + {file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"}, + {file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"}, + {file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"}, + {file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"}, + {file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"}, + {file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"}, + {file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"}, + {file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"}, + {file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"}, + {file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"}, + {file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"}, + {file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"}, + {file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"}, + {file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"}, + {file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"}, + {file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"}, ] [package.dependencies] From 0efe55d3c61238dc9c403bef34fd76bcad95007d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 12:10:52 +0200 Subject: [PATCH 051/114] build(deps-dev): bump types-setuptools from 67.6.0.6 to 67.6.0.7 (#2345) Bumps [types-setuptools](https://github.com/python/typeshed) from 67.6.0.6 to 67.6.0.7. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 592b63005..4a693ab95 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1373,14 +1373,14 @@ files = [ [[package]] name = "types-setuptools" -version = "67.6.0.6" +version = "67.6.0.7" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.6.0.6.tar.gz", hash = "sha256:a5efd019b53c5793a112e0112b91bb402df749f75ee7abe681b6240841650ce4"}, - {file = "types_setuptools-67.6.0.6-py3-none-any.whl", hash = "sha256:fb46e651c6b5880ec8932a8876aa832071d612b2954fc5156b09cd263927d85b"}, + {file = "types-setuptools-67.6.0.7.tar.gz", hash = "sha256:f46b11773b1aeddbd2ef32fd6a6091ef33aa9b32daa124f6ce63f616de59ae51"}, + {file = "types_setuptools-67.6.0.7-py3-none-any.whl", hash = "sha256:ea2873dc8dd9e8421929dc50617ac7c2054c9a873942c5b5b606e2effef5db12"}, ] [[package]] From 5c3c78af33530298a7db2350304ce253370e91c1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Apr 2023 12:11:16 +0200 Subject: [PATCH 052/114] build(deps): bump library/python in /docker/unstable (#2347) Bumps library/python from 3.11.2-slim to 3.11.3-slim. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/unstable/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/unstable/Dockerfile b/docker/unstable/Dockerfile index f70e3a04d..f73094bf4 100644 --- a/docker/unstable/Dockerfile +++ b/docker/unstable/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.2-slim@sha256:2f749ef90f54fd4b3c77cde78eec23ab5b8199d9ac84e4ced6ae523ef223ef7b +FROM docker.io/library/python:3.11.3-slim@sha256:5a67c38a7c28ad09d08f4e153280023a2df77189b55af7804d7ceb96fee6a68f # This file is generated from docker:unstable in Taskfile.yml COPY var/requirements.txt /var/tmp/build/ From 81d13d432b7e49b557b5de11691bdeaed31a9b06 Mon Sep 17 00:00:00 2001 From: Matt Goldberg <59745812+mgberg@users.noreply.github.com> Date: Tue, 11 Apr 2023 17:12:27 -0400 Subject: [PATCH 053/114] feat: add optional `target_graph` argument to `Graph.cbd` and use it for DESCRIBE queries (#2322) Add optional keyword only `target_graph` argument to `rdflib.graph.Graph.cbd` and use this new argument in `evalDescribeQuery`. This makes it possible to compute a concise bounded description without creating a new graph to hold the result, and also without potentially having to copy it to another final graph. Co-authored-by: Iwan Aucamp --- rdflib/graph.py | 12 +++++++++--- rdflib/plugins/sparql/evaluate.py | 2 +- test/test_graph/test_graph_cbd.py | 27 ++++++++++++++++++++++++++- 3 files changed, 36 insertions(+), 5 deletions(-) diff --git a/rdflib/graph.py b/rdflib/graph.py index c6f0fd36c..6e2e50aff 100644 --- a/rdflib/graph.py +++ b/rdflib/graph.py @@ -1814,7 +1814,9 @@ def do_de_skolemize2(t: _TripleType) -> _TripleType: return retval - def cbd(self, resource: _SubjectType) -> Graph: + def cbd( + self, resource: _SubjectType, *, target_graph: Optional[Graph] = None + ) -> Graph: """Retrieves the Concise Bounded Description of a Resource from a Graph Concise Bounded Description (CBD) is defined in [1] as: @@ -1840,10 +1842,14 @@ def cbd(self, resource: _SubjectType) -> Graph: [1] https://www.w3.org/Submission/CBD/ :param resource: a URIRef object, of the Resource for queried for - :return: a Graph, subgraph of self + :param target_graph: Optionally, a graph to add the CBD to; otherwise, a new graph is created for the CBD + :return: a Graph, subgraph of self if no graph was provided otherwise the provided graph """ - subgraph = Graph() + if target_graph is None: + subgraph = Graph() + else: + subgraph = target_graph def add_to_cbd(uri: _SubjectType) -> None: for s, p, o in self.triples((uri, None, None)): diff --git a/rdflib/plugins/sparql/evaluate.py b/rdflib/plugins/sparql/evaluate.py index 764250c8c..08dd02d57 100644 --- a/rdflib/plugins/sparql/evaluate.py +++ b/rdflib/plugins/sparql/evaluate.py @@ -630,7 +630,7 @@ def evalDescribeQuery(ctx: QueryContext, query) -> Dict[str, Union[str, Graph]]: # Get a CBD for all resources identified to describe for resource in to_describe: # type error: Item "None" of "Optional[Graph]" has no attribute "cbd" - graph += ctx.graph.cbd(resource) # type: ignore[union-attr] + ctx.graph.cbd(resource, target_graph=graph) # type: ignore[union-attr] res: Dict[str, Union[str, Graph]] = {} res["type_"] = "DESCRIBE" diff --git a/test/test_graph/test_graph_cbd.py b/test/test_graph/test_graph_cbd.py index 66861241a..cb9e3761b 100644 --- a/test/test_graph/test_graph_cbd.py +++ b/test/test_graph/test_graph_cbd.py @@ -4,7 +4,8 @@ import pytest from rdflib import Graph, Namespace -from rdflib.term import URIRef +from rdflib.namespace import RDF, RDFS +from rdflib.term import Literal, URIRef EXAMPLE_GRAPH_FILE_PATH = TEST_DATA_DIR / "spec" / "cbd" / "example_graph.rdf" EXAMPLE_GRAPH_CBD_FILE_PATH = TEST_DATA_DIR / "spec" / "cbd" / "example_graph_cbd.rdf" @@ -134,3 +135,27 @@ def test_cbd_example(): assert len(g.cbd(URIRef(query))) == ( 21 ), "cbd() for aReallyGreatBook should return 21 triples" + + +def test_cbd_target(rdfs_graph: Graph): + """ + `Graph.cbd` places the Concise Bounded Description in the target graph. + """ + + target = Graph() + result = rdfs_graph.cbd(RDFS.Literal, target_graph=target) + + expected_result = { + (RDFS.Literal, RDFS.subClassOf, RDFS.Resource), + (RDFS.Literal, RDF.type, RDFS.Class), + (RDFS.Literal, RDFS.label, Literal("Literal")), + ( + RDFS.Literal, + RDFS.comment, + Literal("The class of literal values, eg. textual strings and integers."), + ), + (RDFS.Literal, RDFS.isDefinedBy, URIRef(f"{RDFS}")), + } + + assert result is target + assert expected_result == set(result.triples((None, None, None))) From 7df77cd3fa0381ae2b309981230eaa0d42e90b79 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Wed, 12 Apr 2023 22:10:43 +0200 Subject: [PATCH 054/114] fix: correct imports and `__all__` (#2340) Disable [`implicit_reexport`](https://mypy.readthedocs.io/en/stable/config_file.html#confval-implicit_reexport) and eliminate all errors reported by mypy after this. This helps ensure that import statements import from the right module and that the `__all__` variable is correct. --- docs/conf.py | 50 +++++++++++++++++++ docs/rdf_terms.rst | 2 +- pyproject.toml | 1 + rdflib/namespace/_GEO.py | 26 +++++----- rdflib/namespace/__init__.py | 28 +++++++++++ rdflib/plugins/stores/sparqlconnector.py | 3 ++ rdflib/plugins/stores/sparqlstore.py | 3 ++ test/jsonld/runner.py | 6 +-- test/jsonld/test_api.py | 4 +- test/jsonld/test_compaction.py | 3 +- test/jsonld/test_context.py | 3 +- test/jsonld/test_named_graphs.py | 3 +- test/test_dataset/test_dataset.py | 6 ++- test/test_graph/test_graph.py | 4 +- test/test_graph/test_graph_context.py | 3 +- test/test_graph/test_graph_http.py | 8 ++- test/test_namespace/test_namespace.py | 4 +- test/test_roundtrip.py | 9 ++-- test/test_sparql/test_service.py | 7 +-- test/test_store/test_store_sparqlstore.py | 9 ++-- .../test_store_sparqlstore_query.py | 7 +-- .../test_store_sparqlstore_sparqlconnector.py | 7 +-- .../test_store_sparqlupdatestore_mock.py | 7 +-- test/utils/httpfileserver.py | 3 +- test/utils/test/test_httpservermock.py | 9 +--- 25 files changed, 143 insertions(+), 72 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 785c6fc65..1e2b7ef46 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -16,8 +16,10 @@ import os import re import sys +from typing import Any, Dict import sphinx +import sphinx.application import rdflib @@ -47,6 +49,7 @@ "sphinx.ext.autosectionlabel", ] +# https://github.com/sphinx-contrib/apidoc/blob/master/README.rst#configuration apidoc_module_dir = "../rdflib" apidoc_output_dir = "apidocs" @@ -328,3 +331,50 @@ def find_version(filename): if sys.version_info < (3, 8): nitpick_ignore.extend([("py:class", "importlib_metadata.EntryPoint")]) + + +def autodoc_skip_member_handler( + app: sphinx.application.Sphinx, + what: str, + name: str, + obj: Any, + skip: bool, + options: Dict[str, Any], +): + """ + This function will be called by Sphinx when it is deciding whether to skip a + member of a class or module. + """ + # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#event-autodoc-skip-member + if ( + app.env.docname == "apidocs/rdflib" + and what == "module" + and type(obj).__name__.endswith("DefinedNamespaceMeta") + ): + # Don't document namespaces in the `rdflib` module, they will be + # documented in the `rdflib.namespace` module instead and Sphinx does + # not like when these are documented in two places. + # + # An example of the WARNINGS that occur without this is: + # + # "WARNING: duplicate object description of rdflib.namespace._SDO.SDO, + # other instance in apidocs/rdflib, use :noindex: for one of them" + logging.info( + "Skipping %s %s in %s, it will be documented in ", + what, + name, + app.env.docname, + ) + return True + return None + + +# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#skipping-members +def setup(app: sphinx.application.Sphinx) -> None: + """ + Setup the Sphinx application. + """ + + # Register a autodoc-skip-member handler so that certain members can be + # skipped. + app.connect("autodoc-skip-member", autodoc_skip_member_handler) diff --git a/docs/rdf_terms.rst b/docs/rdf_terms.rst index 66abd1838..b44b0a584 100644 --- a/docs/rdf_terms.rst +++ b/docs/rdf_terms.rst @@ -150,7 +150,7 @@ Common XSD datatypes ^^^^^^^^^^^^^^^^^^^^ Most simple literals such as *string* or *integer* have XML Schema (XSD) datatypes defined for them, see the figure -below. Additionally, these XSD datatypes are listed in the :class:`XSD Namespace class ` that +below. Additionally, these XSD datatypes are listed in the :class:`XSD Namespace class ` that ships with RDFLib, so many Python code editors will prompt you with autocomplete for them when using it. Remember, you don't *have* to use XSD datatypes and can always make up your own, as GeoSPARQL does, as described above. diff --git a/pyproject.toml b/pyproject.toml index 87bc3cf70..5dee7655b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -211,6 +211,7 @@ disallow_subclassing_any = false warn_unreachable = true warn_unused_ignores = true no_implicit_optional = false +implicit_reexport = false [[tool.mypy.overrides]] module = "pyparsing.*" diff --git a/rdflib/namespace/_GEO.py b/rdflib/namespace/_GEO.py index 7f316fcbc..c890973ca 100644 --- a/rdflib/namespace/_GEO.py +++ b/rdflib/namespace/_GEO.py @@ -9,18 +9,20 @@ class GEO(DefinedNamespace): Generated from: http://schemas.opengis.net/geosparql/1.0/geosparql_vocab_all.rdf Date: 2021-12-27 17:38:15.101187 - dc:creator "Open Geospatial Consortium"^^xsd:string - dc:date "2012-04-30"^^xsd:date - dc:source - "OGC GeoSPARQL – A Geographic Query Language for RDF Data OGC 11-052r5"^^xsd:string - rdfs:seeAlso - - - owl:imports dc: - - - - owl:versionInfo "OGC GeoSPARQL 1.0"^^xsd:string + .. code-block:: Turtle + + dc:creator "Open Geospatial Consortium"^^xsd:string + dc:date "2012-04-30"^^xsd:date + dc:source + "OGC GeoSPARQL – A Geographic Query Language for RDF Data OGC 11-052r5"^^xsd:string + rdfs:seeAlso + + + owl:imports dc: + + + + owl:versionInfo "OGC GeoSPARQL 1.0"^^xsd:string """ # http://www.w3.org/2000/01/rdf-schema#Datatype diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index 618cab78f..d96c21f35 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -91,6 +91,34 @@ "ClosedNamespace", "DefinedNamespace", "NamespaceManager", + "BRICK", + "CSVW", + "DC", + "DCAM", + "DCAT", + "DCMITYPE", + "DCTERMS", + "DOAP", + "FOAF", + "GEO", + "ODRL2", + "ORG", + "OWL", + "PROF", + "PROV", + "QB", + "RDF", + "RDFS", + "SDO", + "SH", + "SKOS", + "SOSA", + "SSN", + "TIME", + "VANN", + "VOID", + "WGS", + "XSD", ] logger = logging.getLogger(__name__) diff --git a/rdflib/plugins/stores/sparqlconnector.py b/rdflib/plugins/stores/sparqlconnector.py index faf575384..cbf7bd92a 100644 --- a/rdflib/plugins/stores/sparqlconnector.py +++ b/rdflib/plugins/stores/sparqlconnector.py @@ -185,3 +185,6 @@ def update( self.update_endpoint + qsa, data=query.encode(), headers=args["headers"] ) ) + + +__all__ = ["SPARQLConnector", "SPARQLConnectorException"] diff --git a/rdflib/plugins/stores/sparqlstore.py b/rdflib/plugins/stores/sparqlstore.py index 47bb57f97..cfffbd768 100644 --- a/rdflib/plugins/stores/sparqlstore.py +++ b/rdflib/plugins/stores/sparqlstore.py @@ -1011,3 +1011,6 @@ def predicate_objects( """A generator of (predicate, object) tuples for the given subject""" for t, c in self.triples((subject, None, None)): yield t[1], t[2] + + +__all__ = ["SPARQLUpdateStore", "SPARQLStore"] diff --git a/test/jsonld/runner.py b/test/jsonld/runner.py index 13afc0851..77a80ed4f 100644 --- a/test/jsonld/runner.py +++ b/test/jsonld/runner.py @@ -1,13 +1,13 @@ # -*- coding: UTF-8 -*- import json -from rdflib import ConjunctiveGraph +from rdflib import BNode, ConjunctiveGraph from rdflib.compare import isomorphic from rdflib.parser import InputSource from rdflib.plugins.parsers.jsonld import JsonLDParser, to_rdf # monkey-patch N-Quads parser via it's underlying W3CNTriplesParser to keep source bnode id:s .. -from rdflib.plugins.parsers.ntriples import W3CNTriplesParser, bNode, r_nodeid +from rdflib.plugins.parsers.ntriples import W3CNTriplesParser, r_nodeid from rdflib.plugins.serializers.jsonld import from_rdf from rdflib.plugins.shared.jsonld.keys import CONTEXT, GRAPH @@ -15,7 +15,7 @@ def _preserving_nodeid(self, bnode_context=None): if not self.peek("_"): return False - return bNode(self.eat(r_nodeid).group(1)) + return BNode(self.eat(r_nodeid).group(1)) DEFAULT_PARSER_VERSION = 1.0 diff --git a/test/jsonld/test_api.py b/test/jsonld/test_api.py index 265c9fd5a..7879bfda6 100644 --- a/test/jsonld/test_api.py +++ b/test/jsonld/test_api.py @@ -1,5 +1,7 @@ # -*- coding: UTF-8 -*- -from rdflib.plugin import Parser, Serializer, register +from rdflib.parser import Parser +from rdflib.plugin import register +from rdflib.serializer import Serializer register("json-ld", Parser, "rdflib.plugins.parsers.jsonld", "JsonLDParser") register("json-ld", Serializer, "rdflib.plugins.serializers.jsonld", "JsonLDSerializer") diff --git a/test/jsonld/test_compaction.py b/test/jsonld/test_compaction.py index 88bcce875..e76de5580 100644 --- a/test/jsonld/test_compaction.py +++ b/test/jsonld/test_compaction.py @@ -7,7 +7,8 @@ import pytest from rdflib import Graph -from rdflib.plugin import Serializer, register +from rdflib.plugin import register +from rdflib.serializer import Serializer register("json-ld", Serializer, "rdflib.plugins.serializers.jsonld", "JsonLDSerializer") diff --git a/test/jsonld/test_context.py b/test/jsonld/test_context.py index 034936d28..c26fcb0ca 100644 --- a/test/jsonld/test_context.py +++ b/test/jsonld/test_context.py @@ -134,7 +134,8 @@ def test_prefix_like_vocab(): # Mock external sources loading SOURCES: Dict[str, Dict[str, Any]] = {} -_source_to_json = context.source_to_json +# type error: Module "rdflib.plugins.shared.jsonld.context" does not explicitly export attribute "source_to_json" +_source_to_json = context.source_to_json # type: ignore[attr-defined] def _mock_source_loader(f): diff --git a/test/jsonld/test_named_graphs.py b/test/jsonld/test_named_graphs.py index 4c5446210..ca25b7954 100644 --- a/test/jsonld/test_named_graphs.py +++ b/test/jsonld/test_named_graphs.py @@ -1,6 +1,7 @@ # -*- coding: UTF-8 -*- from rdflib import * -from rdflib.plugin import Parser, register +from rdflib.parser import Parser +from rdflib.plugin import register register("json-ld", Parser, "rdflib.plugins.parsers.jsonld", "JsonLDParser") register("application/ld+json", Parser, "rdflib.plugins.parsers.jsonld", "JsonLDParser") diff --git a/test/test_dataset/test_dataset.py b/test/test_dataset/test_dataset.py index 3733a5568..373383f7c 100644 --- a/test/test_dataset/test_dataset.py +++ b/test/test_dataset/test_dataset.py @@ -7,7 +7,9 @@ import pytest from rdflib import URIRef, plugin -from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph, Namespace +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph +from rdflib.namespace import Namespace +from rdflib.store import Store # Will also run SPARQLUpdateStore tests against local SPARQL1.1 endpoint if # available. This assumes SPARQL1.1 query/update endpoints running locally at @@ -26,7 +28,7 @@ pluginstores = [] -for s in plugin.plugins(None, plugin.Store): +for s in plugin.plugins(None, Store): if s.name in ("Memory", "Auditable", "Concurrent", "SPARQLStore"): continue # these are tested by default diff --git a/test/test_graph/test_graph.py b/test/test_graph/test_graph.py index 33898d97d..b133c2b54 100644 --- a/test/test_graph/test_graph.py +++ b/test/test_graph/test_graph.py @@ -10,7 +10,7 @@ import pytest -from rdflib import Graph, URIRef, plugin +from rdflib import Graph, URIRef from rdflib.exceptions import ParserError from rdflib.namespace import Namespace, NamespaceManager from rdflib.plugin import PluginException @@ -62,7 +62,7 @@ def test_property_namespace_manager() -> None: def get_store_names() -> Set[Optional[str]]: - names: Set[Optional[str]] = {*get_unique_plugin_names(plugin.Store)} + names: Set[Optional[str]] = {*get_unique_plugin_names(Store)} names.difference_update( { "default", diff --git a/test/test_graph/test_graph_context.py b/test/test_graph/test_graph_context.py index 9e0b712a0..f6ef5c3e4 100644 --- a/test/test_graph/test_graph_context.py +++ b/test/test_graph/test_graph_context.py @@ -7,6 +7,7 @@ import pytest from rdflib import BNode, ConjunctiveGraph, Graph, URIRef, plugin +from rdflib.store import Store class ContextTestCase(unittest.TestCase): @@ -367,7 +368,7 @@ def testTriples(self): pluginname = sys.argv[1] tests = 0 -for s in plugin.plugins(pluginname, plugin.Store): +for s in plugin.plugins(pluginname, Store): if s.name in ( "default", "Memory", diff --git a/test/test_graph/test_graph_http.py b/test/test_graph/test_graph_http.py index 97c64c3ac..4d5ed09e0 100644 --- a/test/test_graph/test_graph_http.py +++ b/test/test_graph/test_graph_http.py @@ -6,14 +6,12 @@ from test.utils.graph import cached_graph from test.utils.http import ( MOCK_HTTP_REQUEST_WILDCARD, - MockHTTPRequest, - ctx_http_handler, -) -from test.utils.httpservermock import ( MethodName, + MockHTTPRequest, MockHTTPResponse, - ServedBaseHTTPServerMock, + ctx_http_handler, ) +from test.utils.httpservermock import ServedBaseHTTPServerMock from test.utils.wildcard import URL_PARSE_RESULT_WILDCARD from urllib.error import HTTPError diff --git a/test/test_namespace/test_namespace.py b/test/test_namespace/test_namespace.py index db06b51fb..cb3f26f50 100644 --- a/test/test_namespace/test_namespace.py +++ b/test/test_namespace/test_namespace.py @@ -5,7 +5,7 @@ import pytest from rdflib import DCTERMS -from rdflib.graph import BNode, Graph, Literal +from rdflib.graph import Graph from rdflib.namespace import ( FOAF, OWL, @@ -17,7 +17,7 @@ Namespace, URIPattern, ) -from rdflib.term import URIRef +from rdflib.term import BNode, Literal, URIRef class TestNamespace: diff --git a/test/test_roundtrip.py b/test/test_roundtrip.py index f06db6b85..5f233ea5a 100644 --- a/test/test_roundtrip.py +++ b/test/test_roundtrip.py @@ -14,8 +14,9 @@ import rdflib.compare from rdflib.graph import ConjunctiveGraph, Graph from rdflib.namespace import XSD -from rdflib.parser import create_input_source +from rdflib.parser import Parser, create_input_source from rdflib.plugins.parsers.notation3 import BadSyntax +from rdflib.serializer import Serializer from rdflib.util import guess_format logger = logging.getLogger(__name__) @@ -302,10 +303,8 @@ def roundtrip( def get_formats() -> Set[str]: global _formats if not _formats: - serializers = set( - x.name for x in rdflib.plugin.plugins(None, rdflib.plugin.Serializer) - ) - parsers = set(x.name for x in rdflib.plugin.plugins(None, rdflib.plugin.Parser)) + serializers = set(x.name for x in rdflib.plugin.plugins(None, Serializer)) + parsers = set(x.name for x in rdflib.plugin.plugins(None, Parser)) _formats = { format for format in parsers.intersection(serializers) if "/" not in format } diff --git a/test/test_sparql/test_service.py b/test/test_sparql/test_service.py index 3a8270545..61c317ac6 100644 --- a/test/test_sparql/test_service.py +++ b/test/test_sparql/test_service.py @@ -1,11 +1,8 @@ import json from contextlib import ExitStack from test.utils import helper -from test.utils.httpservermock import ( - MethodName, - MockHTTPResponse, - ServedBaseHTTPServerMock, -) +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock from typing import ( Dict, FrozenSet, diff --git a/test/test_store/test_store_sparqlstore.py b/test/test_store/test_store_sparqlstore.py index 625420473..5d8629354 100644 --- a/test/test_store/test_store_sparqlstore.py +++ b/test/test_store/test_store_sparqlstore.py @@ -3,11 +3,8 @@ import socket from http.server import BaseHTTPRequestHandler, HTTPServer from test.utils import helper -from test.utils.httpservermock import ( - MethodName, - MockHTTPResponse, - ServedBaseHTTPServerMock, -) +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock from threading import Thread from typing import Callable, ClassVar, Type from unittest.mock import patch @@ -16,7 +13,7 @@ from rdflib import Graph, Literal, URIRef from rdflib.namespace import FOAF, RDF, RDFS, XMLNS, XSD -from rdflib.plugins.stores.sparqlstore import SPARQLConnector +from rdflib.plugins.stores.sparqlconnector import SPARQLConnector class TestSPARQLStoreGraph: diff --git a/test/test_store/test_store_sparqlstore_query.py b/test/test_store/test_store_sparqlstore_query.py index da59f5447..b22585921 100644 --- a/test/test_store/test_store_sparqlstore_query.py +++ b/test/test_store/test_store_sparqlstore_query.py @@ -3,11 +3,8 @@ import itertools import logging from test.utils import GraphHelper -from test.utils.httpservermock import ( - MethodName, - MockHTTPResponse, - ServedBaseHTTPServerMock, -) +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock from typing import Dict, Iterable, List, Optional, Set, Tuple import pytest diff --git a/test/test_store/test_store_sparqlstore_sparqlconnector.py b/test/test_store/test_store_sparqlstore_sparqlconnector.py index b0bba9b75..992ef2b07 100644 --- a/test/test_store/test_store_sparqlstore_sparqlconnector.py +++ b/test/test_store/test_store_sparqlstore_sparqlconnector.py @@ -2,11 +2,8 @@ import json import logging -from test.utils.httpservermock import ( - MethodName, - MockHTTPResponse, - ServedBaseHTTPServerMock, -) +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock from typing import Optional import pytest diff --git a/test/test_store/test_store_sparqlupdatestore_mock.py b/test/test_store/test_store_sparqlupdatestore_mock.py index 1e8246be1..16af87743 100644 --- a/test/test_store/test_store_sparqlupdatestore_mock.py +++ b/test/test_store/test_store_sparqlupdatestore_mock.py @@ -1,8 +1,5 @@ -from test.utils.httpservermock import ( - MethodName, - MockHTTPResponse, - ServedBaseHTTPServerMock, -) +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock from typing import ClassVar from rdflib import Namespace diff --git a/test/utils/httpfileserver.py b/test/utils/httpfileserver.py index c9a9dc5a8..1989070a9 100644 --- a/test/utils/httpfileserver.py +++ b/test/utils/httpfileserver.py @@ -7,8 +7,7 @@ from functools import lru_cache from http.server import BaseHTTPRequestHandler, HTTPServer from pathlib import Path -from test.utils.http import HeadersT, MethodName, apply_headers_to -from test.utils.httpservermock import MockHTTPRequest +from test.utils.http import HeadersT, MethodName, MockHTTPRequest, apply_headers_to from typing import Dict, List, Optional, Sequence, Type from urllib.parse import parse_qs, urljoin, urlparse from uuid import uuid4 diff --git a/test/utils/test/test_httpservermock.py b/test/utils/test/test_httpservermock.py index e7d6e291f..fe147c9ec 100644 --- a/test/utils/test/test_httpservermock.py +++ b/test/utils/test/test_httpservermock.py @@ -1,10 +1,5 @@ -from test.utils.http import ctx_http_handler -from test.utils.httpservermock import ( - BaseHTTPServerMock, - MethodName, - MockHTTPResponse, - ServedBaseHTTPServerMock, -) +from test.utils.http import MethodName, MockHTTPResponse, ctx_http_handler +from test.utils.httpservermock import BaseHTTPServerMock, ServedBaseHTTPServerMock from urllib.error import HTTPError from urllib.request import Request, urlopen From 4ea14367ffa2c05e5c288842982dcc3cdd194dd6 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Wed, 12 Apr 2023 22:11:02 +0200 Subject: [PATCH 055/114] fix: eliminate bare `except:` (#2350) Replace bare `except:` with `except Exception`, there are some cases where it can be narrowed further, but this is already an improvement over the current situation. This is somewhat pursuant to eliminating [flakeheaven](https://github.com/flakeheaven/flakeheaven), as it no longer supports the latest version of flake8 [[ref](https://github.com/flakeheaven/flakeheaven/issues/132)]. But it also is just the right thing to do as bare exceptions can cause problems. --- rdflib/namespace/__init__.py | 2 +- rdflib/plugins/parsers/notation3.py | 4 ++-- rdflib/plugins/parsers/trix.py | 4 ++-- rdflib/plugins/serializers/longturtle.py | 4 ++-- rdflib/plugins/serializers/rdfxml.py | 2 +- rdflib/plugins/serializers/turtle.py | 4 ++-- rdflib/query.py | 2 +- rdflib/tools/csv2rdf.py | 2 +- rdflib/tools/rdf2dot.py | 4 ++-- rdflib/tools/rdfs2dot.py | 2 +- 10 files changed, 15 insertions(+), 15 deletions(-) diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index d96c21f35..8455e2b63 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -508,7 +508,7 @@ def normalizeUri(self, rdfTerm: str) -> str: if namespace not in self.__strie: insert_strie(self.__strie, self.__trie, str(namespace)) namespace = URIRef(str(namespace)) - except: + except Exception: if isinstance(rdfTerm, Variable): return "?%s" % rdfTerm else: diff --git a/rdflib/plugins/parsers/notation3.py b/rdflib/plugins/parsers/notation3.py index 08798076a..2a64be24f 100755 --- a/rdflib/plugins/parsers/notation3.py +++ b/rdflib/plugins/parsers/notation3.py @@ -353,7 +353,7 @@ def becauseSubexpression(*args: Any, **kargs: Any) -> None: def unicodeExpand(m: Match) -> str: try: return chr(int(m.group(1), 16)) - except: + except Exception: raise Exception("Invalid unicode code point: " + m.group(1)) @@ -1711,7 +1711,7 @@ def _unicodeEscape( ) try: return i + n, reg.sub(unicodeExpand, "\\" + prefix + argstr[i : i + n]) - except: + except Exception: raise BadSyntax( self._thisDoc, startline, diff --git a/rdflib/plugins/parsers/trix.py b/rdflib/plugins/parsers/trix.py index 187c6d45d..8baaf5ca4 100644 --- a/rdflib/plugins/parsers/trix.py +++ b/rdflib/plugins/parsers/trix.py @@ -105,7 +105,7 @@ def startElementNS( try: self.lang = attrs.getValue((str(XMLNS), "lang")) - except: + except Exception: # language not required - ignore pass try: @@ -122,7 +122,7 @@ def startElementNS( self.datatype = None try: self.lang = attrs.getValue((str(XMLNS), "lang")) - except: + except Exception: # language not required - ignore pass diff --git a/rdflib/plugins/serializers/longturtle.py b/rdflib/plugins/serializers/longturtle.py index 263604fac..ac2febdcf 100644 --- a/rdflib/plugins/serializers/longturtle.py +++ b/rdflib/plugins/serializers/longturtle.py @@ -124,7 +124,7 @@ def getQName(self, uri, gen_prefix=True): try: parts = self.store.compute_qname(uri, generate=gen_prefix) - except: + except Exception: # is the uri a namespace in itself? pfx = self.store.store.prefix(uri) @@ -245,7 +245,7 @@ def isValidList(self, l_): try: if self.store.value(l_, RDF.first) is None: return False - except: + except Exception: return False while l_: if l_ != RDF.nil and len(list(self.store.predicate_objects(l_))) != 2: diff --git a/rdflib/plugins/serializers/rdfxml.py b/rdflib/plugins/serializers/rdfxml.py index e3d9ec777..c5acc74ad 100644 --- a/rdflib/plugins/serializers/rdfxml.py +++ b/rdflib/plugins/serializers/rdfxml.py @@ -253,7 +253,7 @@ def subject(self, subject: IdentifiedNode, depth: int = 1): try: # type error: Argument 1 to "qname" of "NamespaceManager" has incompatible type "Optional[Node]"; expected "str" self.nm.qname(type) # type: ignore[arg-type] - except: + except Exception: type = None element = type or RDFVOC.Description diff --git a/rdflib/plugins/serializers/turtle.py b/rdflib/plugins/serializers/turtle.py index ff4cd164f..ad1182474 100644 --- a/rdflib/plugins/serializers/turtle.py +++ b/rdflib/plugins/serializers/turtle.py @@ -273,7 +273,7 @@ def getQName(self, uri, gen_prefix=True): try: parts = self.store.compute_qname(uri, generate=gen_prefix) - except: + except Exception: # is the uri a namespace in itself? pfx = self.store.store.prefix(uri) @@ -397,7 +397,7 @@ def isValidList(self, l_): try: if self.store.value(l_, RDF.first) is None: return False - except: + except Exception: return False while l_: if l_ != RDF.nil and len(list(self.store.predicate_objects(l_))) != 2: diff --git a/rdflib/query.py b/rdflib/query.py index e9c189017..261ffde9a 100644 --- a/rdflib/query.py +++ b/rdflib/query.py @@ -409,7 +409,7 @@ def __eq__(self, other: Any) -> bool: return self.vars == other.vars and self.bindings == other.bindings else: return self.graph == other.graph - except: + except Exception: return False diff --git a/rdflib/tools/csv2rdf.py b/rdflib/tools/csv2rdf.py index 267483ed5..fe740356a 100644 --- a/rdflib/tools/csv2rdf.py +++ b/rdflib/tools/csv2rdf.py @@ -414,7 +414,7 @@ def convert(self, csvreader): "%d rows, %d triples, elapsed %.2fs.\n" % (rows, self.triples, time.time() - start) ) - except: + except Exception: sys.stderr.write("Error processing line: %d\n" % rows) raise diff --git a/rdflib/tools/rdf2dot.py b/rdflib/tools/rdf2dot.py index 1a33ee264..0ca1fa1e0 100644 --- a/rdflib/tools/rdf2dot.py +++ b/rdflib/tools/rdf2dot.py @@ -98,7 +98,7 @@ def label(x, g): return l_ try: return g.namespace_manager.compute_qname(x)[2] - except: + except Exception: return x def formatliteral(l, g): @@ -113,7 +113,7 @@ def qname(x, g): try: q = g.compute_qname(x) return q[0] + ":" + q[2] - except: + except Exception: return x def color(p): diff --git a/rdflib/tools/rdfs2dot.py b/rdflib/tools/rdfs2dot.py index 69ecfba58..4e639b48d 100644 --- a/rdflib/tools/rdfs2dot.py +++ b/rdflib/tools/rdfs2dot.py @@ -87,7 +87,7 @@ def label(xx, grf): if lbl is None: try: lbl = grf.namespace_manager.compute_qname(xx)[2] - except: + except Exception: pass # bnodes and some weird URIs cannot be split return lbl From cbd61510ec581aee262bc2bb8ad95d94f7784842 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 14 Apr 2023 21:26:10 +0200 Subject: [PATCH 056/114] test: Eliminate flake8 errors in tests (#2353) Eliminate some occurrences of the following flake8 errors in tests: * E265 block comment should start with '# ' * E266 too many leading '#' for block comment * E402 module level import not at top of file * E712 comparison to False should be 'if cond is False:' or 'if not cond:' * E712 comparison to True should be 'if cond is True:' or 'if cond:' * E722 do not use bare 'except' * F401 ... imported but unused * F403 ... used; unable to detect undefined names * F405 ... may be undefined, or defined from star imports: ... * F541 f-string is missing placeholders * F841 local variable 'result' is assigned to but never used * N806 variable 'TEST_DIR' in function should be lowercase This is pursuant to eliminating [flakeheaven](https://github.com/flakeheaven/flakeheaven), as it no longer supports the latest version of flake8 [[ref](https://github.com/flakeheaven/flakeheaven/issues/132)]. --- test/jsonld/__init__.py | 1 - test/jsonld/test_api.py | 7 ------- test/jsonld/test_named_graphs.py | 7 +------ test/test_dataset/test_dataset.py | 2 +- test/test_graph/test_graph_formula.py | 2 +- test/test_issues/test_issue492.py | 3 --- test/test_issues/test_issue604.py | 2 +- test/test_literal/test_term.py | 2 +- test/test_misc/test_events.py | 4 ++-- test/test_namespace/test_namespace.py | 8 ++++---- test/test_parsers/test_parser_hext.py | 6 ++---- test/test_parsers/test_swap_n3.py | 6 ++---- test/test_serializers/test_serializer_hext.py | 5 +---- test/test_sparql/test_datetime_processing.py | 8 ++++---- test/test_store/test_store_sparqlupdatestore.py | 2 +- test/test_turtle_quoting.py | 3 +-- test/test_typing.py | 2 +- test/test_util.py | 6 +++--- test/test_w3c_spec/test_n3_w3c.py | 2 +- 19 files changed, 27 insertions(+), 51 deletions(-) diff --git a/test/jsonld/__init__.py b/test/jsonld/__init__.py index a7d8a6b02..b082da4f8 100644 --- a/test/jsonld/__init__.py +++ b/test/jsonld/__init__.py @@ -5,6 +5,5 @@ assert plugin assert serializer assert parser -import json __all__: List[str] = [] diff --git a/test/jsonld/test_api.py b/test/jsonld/test_api.py index 7879bfda6..5beab1fd9 100644 --- a/test/jsonld/test_api.py +++ b/test/jsonld/test_api.py @@ -1,11 +1,4 @@ # -*- coding: UTF-8 -*- -from rdflib.parser import Parser -from rdflib.plugin import register -from rdflib.serializer import Serializer - -register("json-ld", Parser, "rdflib.plugins.parsers.jsonld", "JsonLDParser") -register("json-ld", Serializer, "rdflib.plugins.serializers.jsonld", "JsonLDSerializer") - from rdflib import Graph, Literal, URIRef diff --git a/test/jsonld/test_named_graphs.py b/test/jsonld/test_named_graphs.py index ca25b7954..1d1bd6265 100644 --- a/test/jsonld/test_named_graphs.py +++ b/test/jsonld/test_named_graphs.py @@ -1,10 +1,5 @@ # -*- coding: UTF-8 -*- -from rdflib import * -from rdflib.parser import Parser -from rdflib.plugin import register - -register("json-ld", Parser, "rdflib.plugins.parsers.jsonld", "JsonLDParser") -register("application/ld+json", Parser, "rdflib.plugins.parsers.jsonld", "JsonLDParser") +from rdflib import ConjunctiveGraph, Dataset, Graph, URIRef data = """ { diff --git a/test/test_dataset/test_dataset.py b/test/test_dataset/test_dataset.py index 373383f7c..18c2920ee 100644 --- a/test/test_dataset/test_dataset.py +++ b/test/test_dataset/test_dataset.py @@ -105,7 +105,7 @@ def get_dataset(request): else: try: os.remove(path) - except: + except Exception: pass diff --git a/test/test_graph/test_graph_formula.py b/test/test_graph/test_graph_formula.py index 6f1092ca3..32b3aef71 100644 --- a/test/test_graph/test_graph_formula.py +++ b/test/test_graph/test_graph_formula.py @@ -115,7 +115,7 @@ def checkFormulaStore(store="default", configString=None): os.unlink(path) else: g.store.destroy(configString) - except: + except Exception: g.close() if store == "SQLite": os.unlink(path) diff --git a/test/test_issues/test_issue492.py b/test/test_issues/test_issue492.py index 713ce7aca..83d2d938f 100644 --- a/test/test_issues/test_issue492.py +++ b/test/test_issues/test_issue492.py @@ -1,7 +1,4 @@ # test for https://github.com/RDFLib/rdflib/issues/492 - -#!/usr/bin/env python3 - import rdflib diff --git a/test/test_issues/test_issue604.py b/test/test_issues/test_issue604.py index d56629434..cb5aaac99 100644 --- a/test/test_issues/test_issue604.py +++ b/test/test_issues/test_issue604.py @@ -1,4 +1,4 @@ -from rdflib import * +from rdflib import RDF, BNode, Graph, Literal, Namespace from rdflib.collection import Collection diff --git a/test/test_literal/test_term.py b/test/test_literal/test_term.py index 506f1a3f7..ca2a972f3 100644 --- a/test/test_literal/test_term.py +++ b/test/test_literal/test_term.py @@ -253,7 +253,7 @@ def isclose(a, b, rel_tol=1e-09, abs_tol=0.0): if not case_passed: try: case_passed = isclose((case[1] + case[2].value), case[3].value) - except: + except Exception: pass if not case_passed: diff --git a/test/test_misc/test_events.py b/test/test_misc/test_events.py index c2654ab0c..7e6849ae6 100644 --- a/test/test_misc/test_events.py +++ b/test/test_misc/test_events.py @@ -60,5 +60,5 @@ def testEvents(self): assert c2["bob"] == "uncle" assert c3["bob"] == "uncle" del c3["bob"] - assert ("bob" in c1) == False - assert ("bob" in c2) == False + assert ("bob" in c1) is False + assert ("bob" in c2) is False diff --git a/test/test_namespace/test_namespace.py b/test/test_namespace/test_namespace.py index cb3f26f50..3f439133c 100644 --- a/test/test_namespace/test_namespace.py +++ b/test/test_namespace/test_namespace.py @@ -284,10 +284,10 @@ def test_expand_curie_exception_messages(self) -> None: ["curie", "expected_result"], [ ("ex:tarek", URIRef("urn:example:tarek")), - ("ex:", URIRef(f"urn:example:")), - ("ex:a", URIRef(f"urn:example:a")), - ("ex:a:b", URIRef(f"urn:example:a:b")), - ("ex:a:b:c", URIRef(f"urn:example:a:b:c")), + ("ex:", URIRef("urn:example:")), + ("ex:a", URIRef("urn:example:a")), + ("ex:a:b", URIRef("urn:example:a:b")), + ("ex:a:b:c", URIRef("urn:example:a:b:c")), ("ex", ValueError), ("em:tarek", ValueError), ("em:", ValueError), diff --git a/test/test_parsers/test_parser_hext.py b/test/test_parsers/test_parser_hext.py index f4d1184ac..5f4a180b7 100644 --- a/test/test_parsers/test_parser_hext.py +++ b/test/test_parsers/test_parser_hext.py @@ -1,7 +1,5 @@ -import sys from pathlib import Path -sys.path.append(str(Path(__file__).parent.parent.absolute())) from rdflib import ConjunctiveGraph, Dataset, Literal from rdflib.namespace import XSD @@ -116,8 +114,8 @@ def test_roundtrip(): try: cg = ConjunctiveGraph().parse(f, format="nt") # print(cg.serialize(format="n3")) - except: - print(f"Skipping: could not NT parse") + except Exception: + print("Skipping: could not NT parse") skipped += 1 skip = True if not skip: diff --git a/test/test_parsers/test_swap_n3.py b/test/test_parsers/test_swap_n3.py index cebb55ad9..e173b8452 100644 --- a/test/test_parsers/test_swap_n3.py +++ b/test/test_parsers/test_swap_n3.py @@ -1,10 +1,8 @@ import os +from test.data import TEST_DATA_DIR import pytest -maketrans = str.maketrans -from test.data import TEST_DATA_DIR - import rdflib """ @@ -116,7 +114,7 @@ def get_cases(): tfiles += files for tfile in set(tfiles): gname = tfile.split("/swap-n3/swap/test/")[1][:-3].translate( - maketrans("-/", "__") + str.maketrans("-/", "__") ) e = Envelope(gname, tfile) if gname in skiptests: diff --git a/test/test_serializers/test_serializer_hext.py b/test/test_serializers/test_serializer_hext.py index cae703966..2a75cc895 100644 --- a/test/test_serializers/test_serializer_hext.py +++ b/test/test_serializers/test_serializer_hext.py @@ -1,8 +1,5 @@ -import sys -from pathlib import Path - -sys.path.append(str(Path(__file__).parent.parent.absolute())) import json +from pathlib import Path from rdflib import ConjunctiveGraph, Dataset, Graph diff --git a/test/test_sparql/test_datetime_processing.py b/test/test_sparql/test_datetime_processing.py index 8cec5cca5..9fb0901a8 100644 --- a/test/test_sparql/test_datetime_processing.py +++ b/test/test_sparql/test_datetime_processing.py @@ -86,7 +86,7 @@ def test_dateTime_duration_subs(): f = io.StringIO(data) graph.parse(f, format="n3") - ## 1st Test Case + # 1st Test Case result1 = graph.query( """ @@ -117,7 +117,7 @@ def test_dateTime_duration_subs(): eq_(list(result1)[0][0], expected[0]) eq_(list(result1)[1][0], expected[1]) - ## 2nd Test Case + # 2nd Test Case result2 = graph.query( """ @@ -165,7 +165,7 @@ def test_dateTime_duration_add(): f = io.StringIO(data) graph.parse(f, format="n3") - ## 1st Test case + # 1st Test case result1 = graph.query( """ @@ -198,7 +198,7 @@ def test_dateTime_duration_add(): eq_(list(result1)[0][0], expected[0]) eq_(list(result1)[1][0], expected[1]) - ## 2nd Test case + # 2nd Test case result2 = graph.query( """ diff --git a/test/test_store/test_store_sparqlupdatestore.py b/test/test_store/test_store_sparqlupdatestore.py index c29a6ac6c..c55b3ac62 100644 --- a/test/test_store/test_store_sparqlupdatestore.py +++ b/test/test_store/test_store_sparqlupdatestore.py @@ -28,7 +28,7 @@ try: assert len(urlopen(HOST).read()) > 0 -except: +except Exception: pytest.skip(f"{HOST} is unavailable.", allow_module_level=True) diff --git a/test/test_turtle_quoting.py b/test/test_turtle_quoting.py index bdafd0713..14d82bca2 100644 --- a/test/test_turtle_quoting.py +++ b/test/test_turtle_quoting.py @@ -5,6 +5,7 @@ import itertools import logging +import re from typing import Callable, Dict, Iterable, List, Tuple import pytest @@ -28,8 +29,6 @@ "\\": "\\", } -import re - def make_unquote_correctness_pairs() -> List[Tuple[str, str]]: """ diff --git a/test/test_typing.py b/test/test_typing.py index 7bce69840..1b9113025 100644 --- a/test/test_typing.py +++ b/test/test_typing.py @@ -129,7 +129,7 @@ def test_rdflib_query_exercise() -> None: assert python_two == 2 python_true: bool = literal_true.toPython() - assert python_true == True + assert python_true is True python_iri: str = kb_https_uriref.toPython() assert python_iri == "https://example.org/kb/y" diff --git a/test/test_util.py b/test/test_util.py index c842bc928..37d1db291 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -336,9 +336,9 @@ def test_util_from_n3_not_escapes(self, string: str) -> None: @pytest.mark.parametrize( "string", [ - (f"j\\366rn"), - (f"\\"), - (f"\\0"), + ("j\\366rn"), + ("\\"), + ("\\0"), ], ) def test_util_from_n3_not_escapes_xf(self, string: str) -> None: diff --git a/test/test_w3c_spec/test_n3_w3c.py b/test/test_w3c_spec/test_n3_w3c.py index 436e07901..61b851a70 100644 --- a/test/test_w3c_spec/test_n3_w3c.py +++ b/test/test_w3c_spec/test_n3_w3c.py @@ -55,7 +55,7 @@ def n3(test: RDFTest): res.serialize(), ) - except: + except Exception: if test.syntax: raise From 0eee4ff288aa5d0977648830c28231c5b6fde439 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 18:43:44 +0200 Subject: [PATCH 057/114] build(deps-dev): bump pytest from 7.3.0 to 7.3.1 (#2359) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.3.0 to 7.3.1. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.3.0...7.3.1) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4a693ab95..130dc12f3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -980,14 +980,14 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.3.0" +version = "7.3.1" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.0-py3-none-any.whl", hash = "sha256:933051fa1bfbd38a21e73c3960cebdad4cf59483ddba7696c48509727e17f201"}, - {file = "pytest-7.3.0.tar.gz", hash = "sha256:58ecc27ebf0ea643ebfdf7fb1249335da761a00c9f955bcd922349bcb68ee57d"}, + {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, + {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, ] [package.dependencies] From 91f70007c70c66d7519f740b200b80f146e40f3c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 18:44:05 +0200 Subject: [PATCH 058/114] build(deps-dev): bump flakeheaven from 3.2.1 to 3.3.0 (#2358) Bumps [flakeheaven](https://github.com/flakeheaven/flakeheaven) from 3.2.1 to 3.3.0. - [Release notes](https://github.com/flakeheaven/flakeheaven/releases) - [Changelog](https://github.com/flakeheaven/flakeheaven/blob/main/CHANGELOG.md) - [Commits](https://github.com/flakeheaven/flakeheaven/compare/3.2.1...3.3.0) --- updated-dependencies: - dependency-name: flakeheaven dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 130dc12f3..8d5291e5f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -339,14 +339,14 @@ pyflakes = ">=2.4.0,<2.5.0" [[package]] name = "flakeheaven" -version = "3.2.1" +version = "3.3.0" description = "FlakeHeaven is a [Flake8](https://gitlab.com/pycqa/flake8) wrapper to make it cool." category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "flakeheaven-3.2.1-py3-none-any.whl", hash = "sha256:fdae542414a8cd327dbbc969bb18d5972379570f6562af21b4a83f67bdd6b87c"}, - {file = "flakeheaven-3.2.1.tar.gz", hash = "sha256:f2d54aedd98b817e94c8c0fcc0da1230b43dbf911ce38aa412d00eb5db6fb71d"}, + {file = "flakeheaven-3.3.0-py3-none-any.whl", hash = "sha256:ae246197a178845b30b63fc03023f7ba925cc84cc96314ec19807dafcd6b39a3"}, + {file = "flakeheaven-3.3.0.tar.gz", hash = "sha256:eb07860e028ff8dd56cce742c4766624a37a4ce397fd34300254ab623d13047b"}, ] [package.dependencies] From b6fe3b19abbb20ba6f4bb0a59dfb104f179cf54b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 18:44:24 +0200 Subject: [PATCH 059/114] build(deps): bump library/python in /docker/unstable (#2360) Bumps library/python from `5a67c38` to `286f2f1`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/unstable/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/unstable/Dockerfile b/docker/unstable/Dockerfile index f73094bf4..aff1d8767 100644 --- a/docker/unstable/Dockerfile +++ b/docker/unstable/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.3-slim@sha256:5a67c38a7c28ad09d08f4e153280023a2df77189b55af7804d7ceb96fee6a68f +FROM docker.io/library/python:3.11.3-slim@sha256:286f2f1d6f2f730a44108656afb04b131504b610a6cb2f3413918e98dabba67e # This file is generated from docker:unstable in Taskfile.yml COPY var/requirements.txt /var/tmp/build/ From a13b0abb88b0ca7fd3833758e1a91cfcf1cd891b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 18:44:35 +0200 Subject: [PATCH 060/114] build(deps-dev): bump sphinx-autodoc-typehints from 1.22 to 1.23.0 (#2361) Bumps [sphinx-autodoc-typehints](https://github.com/tox-dev/sphinx-autodoc-typehints) from 1.22 to 1.23.0. - [Release notes](https://github.com/tox-dev/sphinx-autodoc-typehints/releases) - [Changelog](https://github.com/tox-dev/sphinx-autodoc-typehints/blob/main/CHANGELOG.md) - [Commits](https://github.com/tox-dev/sphinx-autodoc-typehints/compare/1.22...1.23.0) --- updated-dependencies: - dependency-name: sphinx-autodoc-typehints dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8d5291e5f..9b64f1548 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1184,22 +1184,22 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] [[package]] name = "sphinx-autodoc-typehints" -version = "1.22" +version = "1.23.0" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "sphinx_autodoc_typehints-1.22-py3-none-any.whl", hash = "sha256:ef4a8b9d52de66065aa7d3adfabf5a436feb8a2eff07c2ddc31625d8807f2b69"}, - {file = "sphinx_autodoc_typehints-1.22.tar.gz", hash = "sha256:71fca2d5eee9b034204e4c686ab20b4d8f5eb9409396216bcae6c87c38e18ea6"}, + {file = "sphinx_autodoc_typehints-1.23.0-py3-none-any.whl", hash = "sha256:ac099057e66b09e51b698058ba7dd76e57e1fe696cd91b54e121d3dad188f91d"}, + {file = "sphinx_autodoc_typehints-1.23.0.tar.gz", hash = "sha256:5d44e2996633cdada499b6d27a496ddf9dbc95dd1f0f09f7b37940249e61f6e9"}, ] [package.dependencies] sphinx = ">=5.3" [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.21)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.5)", "diff-cover (>=7.3)", "nptyping (>=2.4.1)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.4)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23.4)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "nptyping (>=2.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.5)"] type-comment = ["typed-ast (>=1.5.4)"] [[package]] From a074e3c2712e4674e784e9e8cbb39659343793fa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 18:44:49 +0200 Subject: [PATCH 061/114] build(deps): bump library/python in /docker/latest (#2362) Bumps library/python from `5a67c38` to `286f2f1`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/latest/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/latest/Dockerfile b/docker/latest/Dockerfile index 5d82c7a58..d7a75f572 100644 --- a/docker/latest/Dockerfile +++ b/docker/latest/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.3-slim@sha256:5a67c38a7c28ad09d08f4e153280023a2df77189b55af7804d7ceb96fee6a68f +FROM docker.io/library/python:3.11.3-slim@sha256:286f2f1d6f2f730a44108656afb04b131504b610a6cb2f3413918e98dabba67e COPY docker/latest/requirements.txt /var/tmp/build/ From 2c8d1e13b812a28078b0b31f58386057f54bedb4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 18:45:10 +0200 Subject: [PATCH 062/114] build(deps): bump importlib-metadata from 6.3.0 to 6.6.0 (#2370) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 6.3.0 to 6.6.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/CHANGES.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v6.3.0...v6.6.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9b64f1548..4f479c6cc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -408,14 +408,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.3.0" +version = "6.6.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.3.0-py3-none-any.whl", hash = "sha256:8f8bd2af397cf33bd344d35cfe7f489219b7d14fc79a3f854b75b8417e9226b0"}, - {file = "importlib_metadata-6.3.0.tar.gz", hash = "sha256:23c2bcae4762dfb0bbe072d358faec24957901d75b6c4ab11172c0c982532402"}, + {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, + {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, ] [package.dependencies] From e0b3152a799e7bb04770fcddf010b22c0b05379e Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Wed, 17 May 2023 18:51:55 +0200 Subject: [PATCH 063/114] fix: HTTP 308 Permanent Redirect status code handling (#2389) Change the handling of HTTP status code 308 to behave more like `urllib.request.HTTPRedirectHandler`, most critically, the new 308 handling will create a new `urllib.request.Request` object with the new URL, which will prevent state from being carried over from the original request. One case where this is important is when the domain name changes, for example, when the original URL is `http://www.w3.org/ns/adms.ttl` and the redirect URL is `https://uri.semic.eu/w3c/ns/adms.ttl`. With the previous behaviour, the redirect would contain a `Host` header with the value `www.w3.org` instead of `uri.semic.eu` because the `Host` header is placed in `Request.unredirected_hdrs` and takes precedence over the `Host` header in `Request.headers`. Other changes: - Only handle HTTP status code 308 on Python versions before 3.11 as Python 3.11 will handle 308 by default [[ref](https://docs.python.org/3.11/whatsnew/changelog.html#id128)]. - Move code which uses `http://www.w3.org/ns/adms.ttl` and `http://www.w3.org/ns/adms.rdf` out of `test_guess_format_for_parse` into a separate parameterized test, which instead uses the embedded http server. This allows the test to fully control the `Content-Type` header in the response instead of relying on the value that the server is sending. This is needed because the server is sending `Content-Type: text/plain` for the `adms.ttl` file, which is not a valid RDF format, and the test is expecting `Content-Type: text/turtle`. Fixes: - . --- rdflib/_networking.py | 117 +++++++++ rdflib/parser.py | 19 +- test/conftest.py | 34 ++- test/data.py | 18 ++ test/data/defined_namespaces/adms.rdf | 277 +++++++++++++++++++++ test/data/defined_namespaces/adms.ttl | 175 +++++++++++++ test/data/defined_namespaces/rdfs.rdf | 130 ++++++++++ test/data/fetcher.py | 15 ++ test/test_graph/test_graph.py | 61 ++++- test/test_graph/test_graph_redirect.py | 45 ++++ test/test_misc/test_input_source.py | 17 +- test/test_misc/test_networking_redirect.py | 217 ++++++++++++++++ test/utils/exceptions.py | 29 +++ test/utils/http.py | 9 + test/utils/httpfileserver.py | 10 +- 15 files changed, 1121 insertions(+), 52 deletions(-) create mode 100644 rdflib/_networking.py create mode 100644 test/data/defined_namespaces/adms.rdf create mode 100644 test/data/defined_namespaces/adms.ttl create mode 100644 test/data/defined_namespaces/rdfs.rdf create mode 100644 test/test_graph/test_graph_redirect.py create mode 100644 test/test_misc/test_networking_redirect.py create mode 100644 test/utils/exceptions.py diff --git a/rdflib/_networking.py b/rdflib/_networking.py new file mode 100644 index 000000000..311096a89 --- /dev/null +++ b/rdflib/_networking.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +import string +import sys +from typing import Dict +from urllib.error import HTTPError +from urllib.parse import quote as urlquote +from urllib.parse import urljoin, urlsplit +from urllib.request import HTTPRedirectHandler, Request, urlopen +from urllib.response import addinfourl + + +def _make_redirect_request(request: Request, http_error: HTTPError) -> Request: + """ + Create a new request object for a redirected request. + + The logic is based on `urllib.request.HTTPRedirectHandler` from `this commit _`. + + :param request: The original request that resulted in the redirect. + :param http_error: The response to the original request that indicates a + redirect should occur and contains the new location. + :return: A new request object to the location indicated by the response. + :raises HTTPError: the supplied ``http_error`` if the redirect request + cannot be created. + :raises ValueError: If the response code is `None`. + :raises ValueError: If the response does not contain a ``Location`` header + or the ``Location`` header is not a string. + :raises HTTPError: If the scheme of the new location is not ``http``, + ``https``, or ``ftp``. + :raises HTTPError: If there are too many redirects or a redirect loop. + """ + new_url = http_error.headers.get("Location") + if new_url is None: + raise http_error + if not isinstance(new_url, str): + raise ValueError(f"Location header {new_url!r} is not a string") + + new_url_parts = urlsplit(new_url) + + # For security reasons don't allow redirection to anything other than http, + # https or ftp. + if new_url_parts.scheme not in ("http", "https", "ftp", ""): + raise HTTPError( + new_url, + http_error.code, + f"{http_error.reason} - Redirection to url {new_url!r} is not allowed", + http_error.headers, + http_error.fp, + ) + + # http.client.parse_headers() decodes as ISO-8859-1. Recover the original + # bytes and percent-encode non-ASCII bytes, and any special characters such + # as the space. + new_url = urlquote(new_url, encoding="iso-8859-1", safe=string.punctuation) + new_url = urljoin(request.full_url, new_url) + + # XXX Probably want to forget about the state of the current + # request, although that might interact poorly with other + # handlers that also use handler-specific request attributes + content_headers = ("content-length", "content-type") + newheaders = { + k: v for k, v in request.headers.items() if k.lower() not in content_headers + } + new_request = Request( + new_url, + headers=newheaders, + origin_req_host=request.origin_req_host, + unverifiable=True, + ) + + visited: Dict[str, int] + if hasattr(request, "redirect_dict"): + visited = request.redirect_dict + if ( + visited.get(new_url, 0) >= HTTPRedirectHandler.max_repeats + or len(visited) >= HTTPRedirectHandler.max_redirections + ): + raise HTTPError( + request.full_url, + http_error.code, + HTTPRedirectHandler.inf_msg + http_error.reason, + http_error.headers, + http_error.fp, + ) + else: + visited = {} + setattr(request, "redirect_dict", visited) + + setattr(new_request, "redirect_dict", visited) + visited[new_url] = visited.get(new_url, 0) + 1 + return new_request + + +def _urlopen(request: Request) -> addinfourl: + """ + This is a shim for `urlopen` that handles HTTP redirects with status code + 308 (Permanent Redirect). + + This function should be removed once all supported versions of Python + handles the 308 HTTP status code. + + :param request: The request to open. + :return: The response to the request. + """ + try: + return urlopen(request) + except HTTPError as error: + if error.code == 308 and sys.version_info < (3, 11): + # HTTP response code 308 (Permanent Redirect) is not supported by python + # versions older than 3.11. See and + # for more details. + # This custom error handling should be removed once all supported + # versions of Python handles 308. + new_request = _make_redirect_request(request, error) + return _urlopen(new_request) + else: + raise diff --git a/rdflib/parser.py b/rdflib/parser.py index 6cf6f1da6..a35c1d825 100644 --- a/rdflib/parser.py +++ b/rdflib/parser.py @@ -27,13 +27,13 @@ Tuple, Union, ) -from urllib.error import HTTPError from urllib.parse import urljoin -from urllib.request import Request, url2pathname, urlopen +from urllib.request import Request, url2pathname from xml.sax import xmlreader import rdflib.util from rdflib import __version__ +from rdflib._networking import _urlopen from rdflib.namespace import Namespace from rdflib.term import URIRef @@ -267,21 +267,6 @@ def __init__(self, system_id: Optional[str] = None, format: Optional[str] = None req = Request(system_id, None, myheaders) # type: ignore[arg-type] - def _urlopen(req: Request) -> Any: - try: - return urlopen(req) - except HTTPError as ex: - # 308 (Permanent Redirect) is not supported by current python version(s) - # See https://bugs.python.org/issue40321 - # This custom error handling should be removed once all - # supported versions of python support 308. - if ex.code == 308: - # type error: Incompatible types in assignment (expression has type "Optional[Any]", variable has type "str") - req.full_url = ex.headers.get("Location") # type: ignore[assignment] - return _urlopen(req) - else: - raise - response: addinfourl = _urlopen(req) self.url = response.geturl() # in case redirections took place self.links = self.get_links(response) diff --git a/test/conftest.py b/test/conftest.py index 2f61c9fe3..38f4dabc1 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -44,22 +44,44 @@ def rdfs_graph() -> Graph: return Graph().parse(TEST_DATA_DIR / "defined_namespaces/rdfs.ttl", format="turtle") +_ServedBaseHTTPServerMocks = Tuple[ServedBaseHTTPServerMock, ServedBaseHTTPServerMock] + + @pytest.fixture(scope="session") -def _session_function_httpmock() -> Generator[ServedBaseHTTPServerMock, None, None]: +def _session_function_httpmocks() -> Generator[_ServedBaseHTTPServerMocks, None, None]: """ This fixture is session scoped, but it is reset for each function in :func:`function_httpmock`. This should not be used directly. """ - with ServedBaseHTTPServerMock() as httpmock: - yield httpmock + with ServedBaseHTTPServerMock() as httpmock_a, ServedBaseHTTPServerMock() as httpmock_b: + yield httpmock_a, httpmock_b @pytest.fixture(scope="function") def function_httpmock( - _session_function_httpmock: ServedBaseHTTPServerMock, + _session_function_httpmocks: _ServedBaseHTTPServerMocks, ) -> Generator[ServedBaseHTTPServerMock, None, None]: - _session_function_httpmock.reset() - yield _session_function_httpmock + """ + HTTP server mock that is reset for each test function. + """ + (mock, _) = _session_function_httpmocks + mock.reset() + yield mock + + +@pytest.fixture(scope="function") +def function_httpmocks( + _session_function_httpmocks: _ServedBaseHTTPServerMocks, +) -> Generator[Tuple[ServedBaseHTTPServerMock, ServedBaseHTTPServerMock], None, None]: + """ + Alternative HTTP server mock that is reset for each test function. + + This exists in case a tests needs to work with two different HTTP servers. + """ + (mock_a, mock_b) = _session_function_httpmocks + mock_a.reset() + mock_b.reset() + yield mock_a, mock_b @pytest.fixture(scope="session", autouse=True) diff --git a/test/data.py b/test/data.py index f1271aaed..779c522ae 100644 --- a/test/data.py +++ b/test/data.py @@ -1,6 +1,7 @@ from pathlib import Path from rdflib import URIRef +from rdflib.graph import Graph TEST_DIR = Path(__file__).parent TEST_DATA_DIR = TEST_DIR / "data" @@ -19,3 +20,20 @@ context0 = URIRef("urn:example:context-0") context1 = URIRef("urn:example:context-1") context2 = URIRef("urn:example:context-2") + + +simple_triple_graph = Graph().add( + ( + URIRef("http://example.org/subject"), + URIRef("http://example.org/predicate"), + URIRef("http://example.org/object"), + ) +) +""" +A simple graph with a single triple. This is equivalent to the following RDF files: + +* ``test/data/variants/simple_triple.nq`` +* ``test/data/variants/simple_triple.nt`` +* ``test/data/variants/simple_triple.ttl`` +* ``test/data/variants/simple_triple.xml`` +""" diff --git a/test/data/defined_namespaces/adms.rdf b/test/data/defined_namespaces/adms.rdf new file mode 100644 index 000000000..cb56922af --- /dev/null +++ b/test/data/defined_namespaces/adms.rdf @@ -0,0 +1,277 @@ + + + + 2023-04-05 + + + + + Semantic Interoperability Community (SEMIC) + + + adms + adms + + + Bert + Van Nuffelen + + + + TenForce + + + + + + + Natasa + Sofou + + + + + Pavlina + Fragkou + + + SEMIC EU + + + + + + + Makx + Dekkers + + + + + Pavlina + Fragkou + + + SEMIC EU + + + + + + + An abstract entity that reflects the intellectual content of the asset and represents those characteristics of the asset that are independent of its physical embodiment. This abstract entity combines the FRBR entities work (a distinct intellectual or artistic creation) and expression (the intellectual or artistic realization of a work) + + Asset + + + A particular physical embodiment of an Asset, which is an example of the FRBR entity manifestation (the physical embodiment of an expression of a work). + + Asset Distribution + + + A system or service that provides facilities for storage and maintenance of descriptions of Assets and Asset Distributions, and functionality that allows users to search and access these descriptions. An Asset Repository will typically contain descriptions of several Assets and related Asset Distributions. + + Asset repository + + + This is based on the UN/CEFACT Identifier class. + + Identifier + + + Links a resource to an adms:Identifier class. + + + identifier + + + + An Asset that is contained in the Asset being described, e.g. when there are several vocabularies defined in a single document. + + + included asset + + + + The interoperability level for which the Asset is relevant. + + + interoperability level + + + + A link to the current or latest version of the Asset. + + + last + + + + + A link to the next version of the Asset. + + + next + + + + + A link to the previous version of the Asset. + + + prev + + + + + More information about the format in which an Asset Distribution is released. This is different from the file format as, for example, a ZIP file (file format) could contain an XML schema (representation technique). + + + representation technique + + + + Links to a sample of an Asset (which is itself an Asset). + + + sample + + + + The name of the agency that issued the identifier. + + + schema agency + + + + The status of the Asset in the context of a particular workflow process. + + + status + + + + A schema according to which the Asset Repository can provide data about its content, e.g. ADMS. + + + supported schema + + + + Links Assets that are translations of each other. + + + translation + + + + A description of changes between this version and the previous version of the Asset. + + + version notes + + + diff --git a/test/data/defined_namespaces/adms.ttl b/test/data/defined_namespaces/adms.ttl new file mode 100644 index 000000000..865611010 --- /dev/null +++ b/test/data/defined_namespaces/adms.ttl @@ -0,0 +1,175 @@ +@prefix rdf: . + + + "2023-04-05" ; + ; + [ + ; + "Semantic Interoperability Community (SEMIC)" + ] ; + a ; + "adms"@en, "adms"@nl ; + [ + a ; + "Bert" ; + "Van Nuffelen" ; + ; + [ + "TenForce" + ] + ], [ + a ; + "Natasa" ; + "Sofou" + ], [ + a ; + "Pavlina" ; + "Fragkou" ; + [ + "SEMIC EU" + ] + ], [ + a ; + "Makx" ; + "Dekkers" + ] ; + [ + a ; + "Pavlina" ; + "Fragkou" ; + [ + "SEMIC EU" + ] + ] . + + + a ; + "An abstract entity that reflects the intellectual content of the asset and represents those characteristics of the asset that are independent of its physical embodiment. This abstract entity combines the FRBR entities work (a distinct intellectual or artistic creation) and expression (the intellectual or artistic realization of a work)"@en ; + ; + "Asset"@en . + + + a ; + "A particular physical embodiment of an Asset, which is an example of the FRBR entity manifestation (the physical embodiment of an expression of a work)."@en ; + ; + "Asset Distribution"@en . + + + a ; + "A system or service that provides facilities for storage and maintenance of descriptions of Assets and Asset Distributions, and functionality that allows users to search and access these descriptions. An Asset Repository will typically contain descriptions of several Assets and related Asset Distributions."@en ; + ; + "Asset repository"@en . + + + a ; + "This is based on the UN/CEFACT Identifier class."@en ; + ; + "Identifier"@en . + + + a ; + "Links a resource to an adms:Identifier class."@en ; + ; + ; + "identifier"@en ; + . + + + a ; + "An Asset that is contained in the Asset being described, e.g. when there are several vocabularies defined in a single document."@en ; + ; + ; + "included asset"@en ; + . + + + a ; + "The interoperability level for which the Asset is relevant."@en ; + ; + ; + "interoperability level"@en ; + . + + + a ; + "A link to the current or latest version of the Asset."@en ; + ; + ; + "last"@en ; + ; + . + + + a ; + "A link to the next version of the Asset."@en ; + ; + ; + "next"@en ; + ; + . + + + a ; + "A link to the previous version of the Asset."@en ; + ; + ; + "prev"@en ; + ; + . + + + a ; + "More information about the format in which an Asset Distribution is released. This is different from the file format as, for example, a ZIP file (file format) could contain an XML schema (representation technique)."@en ; + ; + ; + "representation technique"@en ; + . + + + a ; + "Links to a sample of an Asset (which is itself an Asset)."@en ; + ; + ; + "sample"@en ; + . + + + a ; + "The name of the agency that issued the identifier."@en ; + ; + ; + "schema agency"@en ; + . + + + a ; + "The status of the Asset in the context of a particular workflow process."@en ; + ; + ; + "status"@en ; + . + + + a ; + "A schema according to which the Asset Repository can provide data about its content, e.g. ADMS."@en ; + ; + ; + "supported schema"@en ; + . + + + a ; + "Links Assets that are translations of each other."@en ; + ; + ; + "translation"@en ; + . + + + a ; + "A description of changes between this version and the previous version of the Asset."@en ; + ; + ; + "version notes"@en ; + . + diff --git a/test/data/defined_namespaces/rdfs.rdf b/test/data/defined_namespaces/rdfs.rdf new file mode 100644 index 000000000..bf17bab06 --- /dev/null +++ b/test/data/defined_namespaces/rdfs.rdf @@ -0,0 +1,130 @@ + + + + + + + Resource + The class resource, everything. + + + + + Class + The class of classes. + + + + + + subClassOf + The subject is a subclass of a class. + + + + + + + subPropertyOf + The subject is a subproperty of a property. + + + + + + + comment + A description of the subject resource. + + + + + + + label + A human-readable name for the subject. + + + + + + + domain + A domain of the subject property. + + + + + + + range + A range of the subject property. + + + + + + + seeAlso + Further information about the subject resource. + + + + + + + + isDefinedBy + The defininition of the subject resource. + + + + + + + Literal + The class of literal values, eg. textual strings and integers. + + + + + + Container + + The class of RDF containers. + + + + + ContainerMembershipProperty + The class of container membership properties, rdf:_1, rdf:_2, ..., + all of which are sub-properties of 'member'. + + + + + + member + A member of the subject resource. + + + + + + + Datatype + The class of RDF datatypes. + + + + + + + + diff --git a/test/data/fetcher.py b/test/data/fetcher.py index 7c9e4ff0c..1ea8e337c 100755 --- a/test/data/fetcher.py +++ b/test/data/fetcher.py @@ -248,6 +248,21 @@ def _member_io( ), local_path=(DATA_PATH / "defined_namespaces/rdfs.ttl"), ), + FileResource( + remote=Request( + "http://www.w3.org/2000/01/rdf-schema#", + headers={"Accept": "application/rdf+xml"}, + ), + local_path=(DATA_PATH / "defined_namespaces/rdfs.rdf"), + ), + FileResource( + remote=Request("http://www.w3.org/ns/adms.rdf"), + local_path=(DATA_PATH / "defined_namespaces/adms.rdf"), + ), + FileResource( + remote=Request("http://www.w3.org/ns/adms.ttl"), + local_path=(DATA_PATH / "defined_namespaces/adms.ttl"), + ), FileResource( remote=Request("https://www.w3.org/ns/rdftest.ttl"), local_path=(DATA_PATH / "defined_namespaces/rdftest.ttl"), diff --git a/test/test_graph/test_graph.py b/test/test_graph/test_graph.py index b133c2b54..289d577ab 100644 --- a/test/test_graph/test_graph.py +++ b/test/test_graph/test_graph.py @@ -1,11 +1,13 @@ # -*- coding: utf-8 -*- import logging import os +from contextlib import ExitStack from pathlib import Path from test.data import TEST_DATA_DIR, bob, cheese, hates, likes, michel, pizza, tarek from test.utils import GraphHelper, get_unique_plugin_names +from test.utils.exceptions import ExceptionChecker from test.utils.httpfileserver import HTTPFileServer, ProtoFileResource -from typing import Callable, Optional, Set +from typing import Callable, Optional, Set, Tuple, Union from urllib.error import HTTPError, URLError import pytest @@ -342,14 +344,6 @@ def test_guess_format_for_parse( # only getting HTML with pytest.raises(PluginException): graph.parse(location=file_info.request_url) - - try: - graph.parse(location="http://www.w3.org/ns/adms.ttl") - graph.parse(location="http://www.w3.org/ns/adms.rdf") - except (URLError, HTTPError): - # this endpoint is currently not available, ignore this test. - pass - try: # persistent Australian Government online RDF resource without a file-like ending graph.parse(location="https://linked.data.gov.au/def/agrif?_format=text/turtle") @@ -358,6 +352,55 @@ def test_guess_format_for_parse( pass +@pytest.mark.parametrize( + ("file", "content_type", "expected_result"), + ( + (TEST_DATA_DIR / "defined_namespaces/adms.rdf", "application/rdf+xml", 132), + (TEST_DATA_DIR / "defined_namespaces/adms.ttl", "text/turtle", 132), + (TEST_DATA_DIR / "defined_namespaces/adms.ttl", None, 132), + ( + TEST_DATA_DIR / "defined_namespaces/adms.rdf", + None, + ExceptionChecker( + ParserError, + r"Could not guess RDF format .* from file extension so tried Turtle", + ), + ), + ), +) +def test_guess_format_for_parse_http( + make_graph: GraphFactory, + http_file_server: HTTPFileServer, + file: Path, + content_type: Optional[str], + expected_result: Union[int, ExceptionChecker], +) -> None: + graph = make_graph() + headers: Tuple[Tuple[str, str], ...] = tuple() + if content_type is not None: + headers = (("Content-Type", content_type),) + + file_info = http_file_server.add_file_with_caching( + ProtoFileResource(headers, file), + suffix=f"/{file.name}", + ) + catcher: Optional[pytest.ExceptionInfo[Exception]] = None + + assert 0 == len(graph) + with ExitStack() as exit_stack: + if isinstance(expected_result, ExceptionChecker): + catcher = exit_stack.enter_context(pytest.raises(expected_result.type)) + graph.parse(location=file_info.request_url) + + if catcher is not None: + # assert catcher.value is not None + assert isinstance(expected_result, ExceptionChecker) + logging.debug("graph = %s", list(graph.triples((None, None, None)))) + else: + assert isinstance(expected_result, int) + assert expected_result == len(graph) + + def test_parse_file_uri(make_graph: GraphFactory): EG = Namespace("http://example.org/#") g = make_graph() diff --git a/test/test_graph/test_graph_redirect.py b/test/test_graph/test_graph_redirect.py new file mode 100644 index 000000000..c61adbc59 --- /dev/null +++ b/test/test_graph/test_graph_redirect.py @@ -0,0 +1,45 @@ +from test.data import TEST_DATA_DIR, simple_triple_graph +from test.utils import GraphHelper +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock +from typing import Tuple +from urllib.parse import urlparse + +from rdflib.graph import Graph + + +def test_graph_redirect_new_host( + function_httpmocks: Tuple[ServedBaseHTTPServerMock, ServedBaseHTTPServerMock] +) -> None: + """ + Redirect to new host results in a request with the right Host header + parameter. + """ + + mock_a, mock_b = function_httpmocks + + mock_a.responses[MethodName.GET].append( + MockHTTPResponse( + 308, + "Permanent Redirect", + b"", + {"Location": [f"{mock_b.url}/b/data.ttl"]}, + ) + ) + + mock_b.responses[MethodName.GET].append( + MockHTTPResponse( + 200, + "OK", + (TEST_DATA_DIR / "variants" / "simple_triple.ttl").read_bytes(), + {"Content-Type": ["text/turtle"]}, + ) + ) + + graph = Graph() + graph.parse(location=f"{mock_a.url}/a/data.ttl") + GraphHelper.assert_sets_equals(graph, simple_triple_graph) + for mock in function_httpmocks: + assert 1 == len(mock.requests[MethodName.GET]) + for request in mock.requests[MethodName.GET]: + assert request.headers["Host"] == urlparse(mock.url).netloc diff --git a/test/test_misc/test_input_source.py b/test/test_misc/test_input_source.py index f3da062bc..90e6e238a 100644 --- a/test/test_misc/test_input_source.py +++ b/test/test_misc/test_input_source.py @@ -11,6 +11,7 @@ # from itertools import product from pathlib import Path from test.utils import GraphHelper +from test.utils.exceptions import ExceptionChecker from test.utils.httpfileserver import ( HTTPFileInfo, HTTPFileServer, @@ -27,7 +28,6 @@ Generic, Iterable, Optional, - Pattern, TextIO, Tuple, Type, @@ -251,21 +251,6 @@ def call_create_input_source( yield input_source -@dataclass -class ExceptionChecker: - type: Type[Exception] - pattern: Optional[Pattern[str]] = None - - def check(self, exception: Exception) -> None: - try: - assert isinstance(exception, self.type) - if self.pattern is not None: - assert self.pattern.match(f"{exception}") - except Exception: - logging.error("problem checking exception", exc_info=exception) - raise - - AnyT = TypeVar("AnyT") diff --git a/test/test_misc/test_networking_redirect.py b/test/test_misc/test_networking_redirect.py new file mode 100644 index 000000000..acde10d71 --- /dev/null +++ b/test/test_misc/test_networking_redirect.py @@ -0,0 +1,217 @@ +from contextlib import ExitStack +from copy import deepcopy +from test.utils.exceptions import ExceptionChecker +from test.utils.http import headers_as_message as headers_as_message +from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union +from urllib.error import HTTPError +from urllib.request import HTTPRedirectHandler, Request + +import pytest +from _pytest.mark.structures import ParameterSet + +from rdflib._networking import _make_redirect_request + +AnyT = TypeVar("AnyT") + + +def with_attrs(object: AnyT, **kwargs: Any) -> AnyT: + for key, value in kwargs.items(): + setattr(object, key, value) + return object + + +class RaisesIdentity: + pass + + +def generate_make_redirect_request_cases() -> Iterable[ParameterSet]: + yield pytest.param( + Request("http://example.com/data.ttl"), + HTTPError( + "", + 308, + "Permanent Redirect", + headers_as_message({}), + None, + ), + RaisesIdentity, + {}, + id="Exception passes through if no Location header is present", + ) + yield pytest.param( + Request("http://example.com/data.ttl"), + HTTPError( + "", + 308, + "Permanent Redirect", + headers_as_message({"Location": [100]}), # type: ignore[arg-type] + None, + ), + ExceptionChecker(ValueError, "Location header 100 is not a string"), + {}, + id="Location must be a string", + ) + yield pytest.param( + Request("http://example.com/data.ttl"), + HTTPError( + "", + 308, + "Permanent Redirect", + headers_as_message({"Location": ["example:data.ttl"]}), + None, + ), + ExceptionChecker( + HTTPError, + "HTTP Error 308: Permanent Redirect - Redirection to url 'example:data.ttl' is not allowed", + {"code": 308}, + ), + {}, + id="Error passes through with a slight alterations if the Location header is not a supported URL", + ) + + url_prefix = "http://example.com" + for request_url_suffix, redirect_location, new_url_suffix in [ + ("/data.ttl", "", "/data.ttl"), + ("", "", ""), + ("/data.ttl", "a", "/a"), + ("", "a", "/a"), + ("/a/b/c/", ".", "/a/b/c/"), + ("/a/b/c", ".", "/a/b/"), + ("/a/b/c/", "..", "/a/b/"), + ("/a/b/c", "..", "/a/"), + ("/a/b/c/", "/", "/"), + ("/a/b/c/", "/x/", "/x/"), + ("/a/b/c/", "/x/y", "/x/y"), + ("/a/b/c/", f"{url_prefix}", ""), + ("/a/b/c/", f"{url_prefix}/", "/"), + ("/a/b/c/", f"{url_prefix}/a/../b", "/a/../b"), + ("/", f"{url_prefix}/ /data.ttl", "/%20%20%20/data.ttl"), + ]: + request_url = f"http://example.com{request_url_suffix}" + new_url = f"http://example.com{new_url_suffix}" + yield pytest.param( + Request(request_url), + HTTPError( + "", + 308, + "Permanent Redirect", + headers_as_message({"Location": [redirect_location]}), + None, + ), + Request(new_url, unverifiable=True), + {new_url: 1}, + id=f"Redirect from {request_url!r} to {redirect_location!r} is correctly handled", + ) + + yield pytest.param( + Request( + "http://example.com/data.ttl", + b"foo", + headers={ + "Content-Type": "text/plain", + "Content-Length": "3", + "Accept": "text/turtle", + }, + ), + HTTPError( + "", + 308, + "Permanent Redirect", + headers_as_message({"Location": ["http://example.org/data.ttl"]}), + None, + ), + Request( + "http://example.org/data.ttl", + headers={"Accept": "text/turtle"}, + origin_req_host="example.com", + unverifiable=True, + ), + {"http://example.org/data.ttl": 1}, + id="Headers transfer correctly", + ) + + yield pytest.param( + with_attrs( + Request( + "http://example.com/data1.ttl", + ), + redirect_dict=dict( + (f"http://example.com/redirect/{index}", 1) + for index in range(HTTPRedirectHandler.max_redirections) + ), + ), + HTTPError( + "", + 308, + "Permanent Redirect", + headers_as_message({"Location": ["http://example.org/data2.ttl"]}), + None, + ), + ExceptionChecker( + HTTPError, + f"HTTP Error 308: {HTTPRedirectHandler.inf_msg}Permanent Redirect", + ), + {}, + id="Max redirects is respected", + ) + + yield pytest.param( + with_attrs( + Request( + "http://example.com/data1.ttl", + ), + redirect_dict={ + "http://example.org/data2.ttl": HTTPRedirectHandler.max_repeats + }, + ), + HTTPError( + "", + 308, + "Permanent Redirect", + headers_as_message({"Location": ["http://example.org/data2.ttl"]}), + None, + ), + ExceptionChecker( + HTTPError, + f"HTTP Error 308: {HTTPRedirectHandler.inf_msg}Permanent Redirect", + ), + {}, + id="Max repeats is respected", + ) + + +@pytest.mark.parametrize( + ("http_request", "http_error", "expected_result", "expected_redirect_dict"), + generate_make_redirect_request_cases(), +) +def test_make_redirect_request( + http_request: Request, + http_error: HTTPError, + expected_result: Union[Type[RaisesIdentity], ExceptionChecker, Request], + expected_redirect_dict: Dict[str, int], +) -> None: + """ + `_make_redirect_request` correctly handles redirects. + """ + catcher: Optional[pytest.ExceptionInfo[Exception]] = None + result: Optional[Request] = None + with ExitStack() as stack: + if isinstance(expected_result, ExceptionChecker): + catcher = stack.enter_context(pytest.raises(expected_result.type)) + elif expected_result is RaisesIdentity: + catcher = stack.enter_context(pytest.raises(HTTPError)) + result = _make_redirect_request(http_request, http_error) + + if isinstance(expected_result, ExceptionChecker): + assert catcher is not None + expected_result.check(catcher.value) + elif isinstance(expected_result, type): + assert catcher is not None + assert http_error is catcher.value + else: + assert expected_redirect_dict == getattr(result, "redirect_dict", None) + assert expected_redirect_dict == getattr(http_request, "redirect_dict", None) + check = deepcopy(expected_result) + check.unverifiable = True + check = with_attrs(check, redirect_dict=expected_redirect_dict) + assert vars(check) == vars(result) diff --git a/test/utils/exceptions.py b/test/utils/exceptions.py new file mode 100644 index 000000000..a814f9b40 --- /dev/null +++ b/test/utils/exceptions.py @@ -0,0 +1,29 @@ +import logging +import re +from dataclasses import dataclass +from typing import Any, Dict, Optional, Pattern, Type, Union + + +@dataclass(frozen=True) +class ExceptionChecker: + type: Type[Exception] + pattern: Optional[Union[Pattern[str], str]] = None + attributes: Optional[Dict[str, Any]] = None + + def check(self, exception: Exception) -> None: + logging.debug("checking exception %s/%r", type(exception), exception) + pattern = self.pattern + if pattern is not None and not isinstance(pattern, re.Pattern): + pattern = re.compile(pattern) + try: + assert isinstance(exception, self.type) + if pattern is not None: + assert pattern.match(f"{exception}") + if self.attributes is not None: + for key, value in self.attributes.items(): + logging.debug("checking exception attribute %s=%r", key, value) + assert hasattr(exception, key) + assert getattr(exception, key) == value + except Exception: + logging.error("problem checking exception", exc_info=exception) + raise diff --git a/test/utils/http.py b/test/utils/http.py index fa13a2ed9..e40d2a8c8 100644 --- a/test/utils/http.py +++ b/test/utils/http.py @@ -108,3 +108,12 @@ def ctx_http_server(server: HTTPServerT) -> Iterator[HTTPServerT]: server.shutdown() server.socket.close() server_thread.join() + + +def headers_as_message(headers: HeadersT) -> email.message.Message: + message = email.message.Message() + for header, value in header_items(headers): + # This will append the value to any existing values for the header + # instead of replacing it. + message[header] = value + return message diff --git a/test/utils/httpfileserver.py b/test/utils/httpfileserver.py index 1989070a9..49c92e807 100644 --- a/test/utils/httpfileserver.py +++ b/test/utils/httpfileserver.py @@ -74,7 +74,7 @@ class HTTPFileInfo: :param effective_url: The URL that the file will be served from after redirects. :param redirects: A sequence of redirects that will be given to the client - if it uses the ``request_url``. This sequence will terimate in the + if it uses the ``request_url``. This sequence will terminate in the ``effective_url``. """ @@ -128,15 +128,17 @@ def add_file_with_caching( self, proto_file: ProtoFileResource, proto_redirects: Optional[Sequence[ProtoRedirectResource]] = None, + suffix: str = "", ) -> HTTPFileInfo: - return self.add_file(proto_file, proto_redirects) + return self.add_file(proto_file, proto_redirects, suffix) def add_file( self, proto_file: ProtoFileResource, proto_redirects: Optional[Sequence[ProtoRedirectResource]] = None, + suffix: str = "", ) -> HTTPFileInfo: - url_path = f"/file/{uuid4().hex}" + url_path = f"/file/{uuid4().hex}{suffix}" url = urljoin(self.url, url_path) file_resource = FileResource( url_path=url_path, @@ -151,7 +153,7 @@ def add_file( redirects: List[RedirectResource] = [] for proto_redirect in reversed(proto_redirects): - redirect_url_path = f"/redirect/{uuid4().hex}" + redirect_url_path = f"/redirect/{uuid4().hex}{suffix}" if proto_redirect.location_type == LocationType.URL: location = url elif proto_redirect.location_type == LocationType.ABSOLUTE_PATH: From 6a4ae70032495ca42f23163218d134f23f1b9173 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 19:25:32 +0200 Subject: [PATCH 064/114] build(deps-dev): bump coverage from 7.2.3 to 7.2.5 (#2376) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.2.3 to 7.2.5. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.2.3...7.2.5) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 104 ++++++++++++++++++++++++++-------------------------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4f479c6cc..8a8ffd501 100644 --- a/poetry.lock +++ b/poetry.lock @@ -216,63 +216,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.3" +version = "7.2.5" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"}, - {file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"}, - {file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"}, - {file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"}, - {file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"}, - {file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"}, - {file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"}, - {file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"}, - {file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"}, - {file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"}, - {file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"}, - {file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"}, - {file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"}, - {file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"}, - {file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"}, - {file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"}, - {file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"}, - {file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"}, - {file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"}, - {file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"}, - {file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"}, + {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, + {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, + {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, + {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, + {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, + {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, + {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, + {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, + {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, + {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, + {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, + {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, + {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, + {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, + {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, + {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, + {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, + {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, + {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, + {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, + {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, ] [package.dependencies] From f0081a42ee3a6dba692f84dfca8e46662f71927d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 19:25:45 +0200 Subject: [PATCH 065/114] build(deps-dev): bump setuptools from 67.6.1 to 67.7.2 (#2377) Bumps [setuptools](https://github.com/pypa/setuptools) from 67.6.1 to 67.7.2. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/CHANGES.rst) - [Commits](https://github.com/pypa/setuptools/compare/v67.6.1...v67.7.2) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8a8ffd501..3d95f6744 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1107,14 +1107,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "67.6.1" +version = "67.7.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, - {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, + {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, + {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, ] [package.extras] From eb9eab01d336b7881f4f8be3707305c2634f8f03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 19:26:04 +0200 Subject: [PATCH 066/114] build(deps-dev): bump mypy from 1.2.0 to 1.3.0 (#2386) Bumps [mypy](https://github.com/python/mypy) from 1.2.0 to 1.3.0. - [Commits](https://github.com/python/mypy/compare/v1.2.0...v1.3.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 54 ++++++++++++++++++++++++++--------------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3d95f6744..b949cb1ba 100644 --- a/poetry.lock +++ b/poetry.lock @@ -730,38 +730,38 @@ files = [ [[package]] name = "mypy" -version = "1.2.0" +version = "1.3.0" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"}, - {file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"}, - {file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"}, - {file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"}, - {file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"}, - {file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"}, - {file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"}, - {file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"}, - {file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"}, - {file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"}, - {file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"}, - {file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"}, - {file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"}, - {file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"}, - {file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"}, - {file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"}, - {file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"}, - {file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"}, - {file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"}, - {file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"}, - {file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"}, - {file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"}, - {file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"}, - {file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"}, - {file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"}, - {file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"}, + {file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"}, + {file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"}, + {file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"}, + {file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"}, + {file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"}, + {file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"}, + {file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"}, + {file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"}, + {file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"}, + {file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"}, + {file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"}, + {file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"}, + {file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"}, + {file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"}, + {file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"}, + {file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"}, + {file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"}, + {file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"}, + {file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"}, + {file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"}, + {file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"}, + {file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"}, + {file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"}, + {file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"}, + {file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"}, + {file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"}, ] [package.dependencies] From 860a669706f6e991e24229696c503f17ee051711 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 19:31:44 +0200 Subject: [PATCH 067/114] build(deps-dev): bump types-setuptools from 67.6.0.7 to 67.7.0.2 (#2387) Bumps [types-setuptools](https://github.com/python/typeshed) from 67.6.0.7 to 67.7.0.2. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index b949cb1ba..d5a4d623b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1373,14 +1373,14 @@ files = [ [[package]] name = "types-setuptools" -version = "67.6.0.7" +version = "67.7.0.2" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.6.0.7.tar.gz", hash = "sha256:f46b11773b1aeddbd2ef32fd6a6091ef33aa9b32daa124f6ce63f616de59ae51"}, - {file = "types_setuptools-67.6.0.7-py3-none-any.whl", hash = "sha256:ea2873dc8dd9e8421929dc50617ac7c2054c9a873942c5b5b606e2effef5db12"}, + {file = "types-setuptools-67.7.0.2.tar.gz", hash = "sha256:155789e85e79d5682b0d341919d4beb6140408ae52bac922af25b54e36ab25c0"}, + {file = "types_setuptools-67.7.0.2-py3-none-any.whl", hash = "sha256:bd30f6dbe9b83f0a7e6e3eab6d2df748aa4f55700d54e9f077d3aa30cc019445"}, ] [[package]] From e103078be6c75b2f9a1dc515abffcdf6a674407d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 May 2023 19:39:18 +0200 Subject: [PATCH 068/114] build(deps): bump berkeleydb from 18.1.5 to 18.1.6 (#2388) Bumps [berkeleydb](https://www.jcea.es/programacion/pybsddb.htm) from 18.1.5 to 18.1.6. --- updated-dependencies: - dependency-name: berkeleydb dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index d5a4d623b..cec58e05f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -29,13 +29,13 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [[package]] name = "berkeleydb" -version = "18.1.5" +version = "18.1.6" description = "Python bindings for Oracle Berkeley DB" category = "main" optional = true python-versions = "*" files = [ - {file = "berkeleydb-18.1.5.tar.gz", hash = "sha256:d8c2efbca9472f8848b13cce4f4904a5ea1e1540ee576e626a1491817832a50a"}, + {file = "berkeleydb-18.1.6.tar.gz", hash = "sha256:6d412dd1a5b702aeeda3cbfa10d3399b16a804d016de087234f8579fca613ec9"}, ] [[package]] From cd0b442671726efce594ee4502b9a3c9eafc50d0 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 19 May 2023 12:17:29 +0200 Subject: [PATCH 069/114] fix: bugs with `rdflib.extras.infixowl` (#2390) Fix the following issues in `rdflib.extras.infixowl`: - getting and setting of max cardinality only considered identifiers and not other RDF terms. - The return value of `manchesterSyntax` was wrong for some cases. - The way that `BooleanClass` was generating its string representation (i.e. `BooleanClass.__repr__`) was wrong for some cases. Other changes: - Added an example for using infixowl to create an ontology. - Updated infixowl tests. - Updated infixowl documentation. This code is based on code from: - Changes are primarily authored by . --------- Co-authored-by: Graham Higgins --- examples/infixowl_ontology_creation.py | 280 ++++++++++++++++++ rdflib/extras/infixowl.py | 150 ++++++---- .../test_infixowl/test_booleanclass.py | 8 +- .../test_infixowl/test_restriction.py | 143 +++++++-- 4 files changed, 488 insertions(+), 93 deletions(-) create mode 100644 examples/infixowl_ontology_creation.py diff --git a/examples/infixowl_ontology_creation.py b/examples/infixowl_ontology_creation.py new file mode 100644 index 000000000..8efeb69ca --- /dev/null +++ b/examples/infixowl_ontology_creation.py @@ -0,0 +1,280 @@ +from rdflib import Graph, Literal, Namespace, URIRef +from rdflib.extras.infixowl import Class, Ontology, Property, min, only, some + +CPR = Namespace("http://purl.org/cpr/0.75#") +INF = Namespace("http://www.loa-cnr.it/ontologies/InformationObjects.owl#") +EDNS = Namespace("http://www.loa-cnr.it/ontologies/ExtendedDnS.owl#") +DOLCE = Namespace("http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#") +REL = Namespace("http://www.geneontology.org/owl#") +GALEN = Namespace("http://www.co-ode.org/ontologies/galen#") +TIME = Namespace("http://www.w3.org/2006/time#") +CYC = Namespace("http://sw.cyc.com/2006/07/27/cyc/") + + +def infixowl_example(): + g = Graph() + g.bind("cpr", CPR, override=False) + g.bind("ro", REL, override=False) + g.bind("inf", INF, override=False) + g.bind("edns", EDNS, override=False) + g.bind("dol", DOLCE, override=False) + g.bind("time", TIME, override=False) + g.bind("galen", GALEN, override=False) + + Class.factoryGraph = g + Property.factoryGraph = g + Ontology.factoryGraph = g + + cprOntology = Ontology(URIRef("http://purl.org/cpr/owl")) # noqa: N806 + cprOntology.imports = [ + URIRef("http://obo.sourceforge.net/relationship/relationship.owl"), + URIRef(DOLCE), + URIRef(EDNS), + URIRef("http://www.w3.org/2006/time#"), + ] + cprOntology.comment = [ + Literal( + """This OWL ontology was generated by Fuxi 0.85b.dev-r107 + (with newly added Infix OWL syntax library). It imports the + OBO relationship ontology, DOLCE, and OWL time. It formally + defines a focused, core set of archetypes [Jung, C.] + replicated in various patient record terminology. This core is + defined in RDF and follows the normalization principles + of "rigorous formal ontologies" [Rector, A.].""" + ) + ] + cprOntology.setVersion(Literal("0.75")) + + # Relations + # represented-by + representationOf = Property( # noqa: N806 + CPR["representation-of"], + inverseOf=Property(CPR["represented-by"]), + comment=[ + Literal( + """Patient records stand in the cpr:representation-of relation + with patients""" + ) + ], + ) + representedBy = Property( # noqa: F841, N806 + CPR["represented-by"], inverseOf=representationOf + ) + # description-of + descrOf = Property( # noqa: N806 + CPR["description-of"], + comment=[ + Literal( + """Clinical descriptions stand in the cpr:description-of + relation with various clinical phenomenon""" + ) + ], + domain=[Class(CPR["clinical-description"])], + ) + # cpr:interpreted-by + interpretedBy = Property( # noqa: F841, N806 + CPR["interpreted-by"], + comment=[ + Literal( + """Signs and symptoms are interpreted by rational physical + objects (people)""" + ) + ], + domain=[Class(CPR["medical-sign"]) | Class(CPR["symptom"])], + range=[Class(CPR.person)], + ) + # cpr:realized-by + realizedBy = Property( # noqa: N806 + CPR["realized-by"], + comment=[ + Literal( + """The epistemological relation in which screening acts and + the problems they realize stand to each other""" + ) + ], + inverseOf=Property(CPR["realizes"]), + domain=[Class(CPR["medical-problem"])], + range=[Class(CPR["screening-act"])], + ) + # cpr:realizes + realizes = Property(CPR["realizes"], inverseOf=realizedBy) # noqa: F841 + + # Classes + # cpr:person + person = Class(CPR.person) + person.comment = [ + Literal( + """A class which directly corresponds with the “Person” class in + both GALEN and Cyc""" + ) + ] + person.subClassOf = [Class(EDNS["rational-physical-object"])] + person.equivalentClass = [Class(GALEN.Person), Class(CYC.Person)] + + # cpr:patient + patient = Class(CPR.patient) + patient.comment = [ + Literal( + """A class which directly corresponds with the “Patient” + and “MedicalPatient” classes in GALEN / Cyc""" + ) + ] + # patient.equivalentClass = [Class(GALEN.Patient),Class(CYC.MedicalPatient)] + patient.subClassOf = [CPR["represented-by"] @ some @ Class(CPR["patient-record"])] + person += patient + + # cpr:clinician + clinician = Class(CPR.person) + clinician.comment = [ + Literal( + """A person who plays the clinician role (typically Nurse, + Physician / Doctor, etc.)""" + ) + ] + person += clinician + + # bytes + bytes = Class(CPR.bytes) + bytes.comment = [ + Literal( + """The collection of physical objects which constitute a stream of + bytes in memory, disk, etc.""" + ) + ] + bytes.subClassOf = [DOLCE["non-agentive-physical-object"]] + + # cpr:patient-record + patientRecord = Class(CPR["patient-record"]) # noqa: N806 + patientRecord.comment = [ + Literal( + """a class (a representational artifact [REFTERM]) depicting + relevant clinical information about a specific patient and is + primarily comprised of one or more + cpr:clinical-descriptions.""" + ) + ] + patientRecord.seeAlso = [URIRef("")] + patientRecord.subClassOf = [ + bytes, + # Class(CYC.InformationBearingThing), + CPR["representation-of"] @ only @ patient, + REL.OBO_REL_has_proper_part @ some @ Class(CPR["clinical-description"]), + ] + + # cpr:medical-problem + problem = Class( + CPR["medical-problem"], + subClassOf=[ + Class(DOLCE.quality), + realizedBy @ only @ Class(CPR["screening-act"]), + ], + ) + problem.comment = [ + Literal( + """.. problems that clearly require the intervention of a health + care professional. These include acute problems requiring + hospitalization and chronic problems requiring long-term + management.""" + ) + ] + + # cpr:clinical-description + clinDescr = Class(CPR["clinical-description"]) # noqa: N806 + clinDescr.disjointWith = [CPR["patient-record"]] + clinDescr.comment = [ + Literal( + """A class which corresponds (at least syntactically) with the HL7 + RIM Act Class, insofar as its members consist of clinical + recordings (representational artifacts) of natural phenomena + of clinical significance""" + ) + ] + clinDescr.subClassOf = [ + bytes, + # Class(CYC.InformationBearingThing), + DOLCE["has-quality"] @ some @ Class(TIME.TemporalEntity), + descrOf @ min @ Literal(1), + ] + + # cpr:medical-sign + sign = Class( + CPR["medical-sign"], + subClassOf=[ + problem, + Property(CPR["interpreted-by"]) @ only @ clinician, + Property(CPR["interpreted-by"]) @ some @ clinician, + ], + disjointWith=[CPR.symptom], + ) + sign.comment = [ + Literal( + """A cpr:medical-problem which are specifically interpreted by a + clinician. As such, this class is informally defined as an + objective indication of a quality typically detected by a + physician during a physical examination of a patient.""" + ) + ] + + symptom = Class( + CPR["symptom"], + subClassOf=[ + problem, + Property(CPR["interpreted-by"]) @ only @ patient, + Property(CPR["interpreted-by"]) @ some @ patient, + ], + disjointWith=[sign], + ) + symptom.comment = [ + Literal( + """(Medicine) any sensation or change in bodily function that is + experienced by a patient and is associated with a particular + disease.""" + ) + ] + + # clinical-act heriarchy + clinicalAct = Class( # noqa: N806 + CPR["clinical-act"], subClassOf=[Class(EDNS.activity)] + ) + + therapy = Class(CPR["therapeutic-act"], subClassOf=[clinicalAct]) + therapy += Class(CPR["physical-therapy"], disjointWith=[CPR["medical-therapy"]]) + therapy += Class( + CPR["psychological-therapy"], + disjointWith=[CPR["medical-therapy"], CPR["physical-therapy"]], + ) + + medicalTherapy = Class( # noqa: N806 + CPR["medical-therapy"], + disjointWith=[CPR["physical-therapy"], CPR["psychological-therapy"]], + ) + therapy += medicalTherapy + medicalTherapy += Class(CPR["substance-administration"]) + + diagnosticAct = Class(CPR["diagnostic-act"], subClassOf=[clinicalAct]) # noqa: N806 + diagnosticAct.disjointWith = [CPR["therapeutic-act"]] + + screeningAct = Class(CPR["screening-act"]) # noqa: N806 + screeningAct += Class(CPR["laboratory-test"]) + + diagnosticAct += screeningAct + + screeningAct += Class( + CPR["medical-history-screening-act"], + disjointWith=[CPR["clinical-examination"], CPR["laboratory-test"]], + ) + + screeningAct += Class( + CPR["clinical-examination"], + disjointWith=[CPR["laboratory-test"], CPR["medical-history-screening-act"]], + ) + + device = Class( # noqa: F841 + CPR["medical-device"], subClassOf=[Class(GALEN.Device)] + ) + + print(g.serialize(format="turtle")) + + +if __name__ == "__main__": + infixowl_example() diff --git a/rdflib/extras/infixowl.py b/rdflib/extras/infixowl.py index 9c75345bb..dadc6324e 100644 --- a/rdflib/extras/infixowl.py +++ b/rdflib/extras/infixowl.py @@ -2,6 +2,19 @@ __doc__ = """RDFLib Python binding for OWL Abstract Syntax +OWL Constructor DL Syntax Manchester OWL Syntax Example +==================================================================================== +intersectionOf C ∩ D C AND D Human AND Male +unionOf C ∪ D C OR D Man OR Woman +complementOf ¬ C NOT C NOT Male +oneOf {a} ∪ {b}... {a b ...} {England Italy Spain} +someValuesFrom ∃ R C R SOME C hasColleague SOME Professor +allValuesFrom ∀ R C R ONLY C hasColleague ONLY Professor +minCardinality ≥ N R R MIN 3 hasColleague MIN 3 +maxCardinality ≤ N R R MAX 3 hasColleague MAX 3 +cardinality = N R R EXACTLY 3 hasColleague EXACTLY 3 +hasValue ∃ R {a} R VALUE a hasColleague VALUE Matthew + see: http://www.w3.org/TR/owl-semantics/syntax.html http://owl-workshop.man.ac.uk/acceptedLong/submission_9.pdf @@ -12,12 +25,9 @@ Uses Manchester Syntax for __repr__ ->>> exNs = Namespace('http://example.com/') ->>> namespace_manager = NamespaceManager(Graph()) ->>> namespace_manager.bind('ex', exNs, override=False) ->>> namespace_manager.bind('owl', OWL, override=False) +>>> exNs = Namespace("http://example.com/") >>> g = Graph() ->>> g.namespace_manager = namespace_manager +>>> g.bind("ex", exNs, override=False) Now we have an empty graph, we can construct OWL classes in it using the Python classes defined in this module @@ -39,8 +49,6 @@ This can also be used against already populated graphs: >>> owlGraph = Graph().parse(str(OWL)) ->>> namespace_manager.bind('owl', OWL, override=False) ->>> owlGraph.namespace_manager = namespace_manager >>> list(Class(OWL.Class, graph=owlGraph).subClassOf) [Class: rdfs:Class ] @@ -97,13 +105,13 @@ Restrictions can also be created using Manchester OWL syntax in 'colloquial' Python ->>> exNs.hasParent << some >> Class(exNs.Physician, graph=g) +>>> exNs.hasParent @ some @ Class(exNs.Physician, graph=g) ( ex:hasParent SOME ex:Physician ) ->>> Property(exNs.hasParent, graph=g) << max >> Literal(1) +>>> Property(exNs.hasParent, graph=g) @ max @ Literal(1) ( ex:hasParent MAX 1 ) ->>> print(g.serialize(format='pretty-xml')) #doctest: +SKIP +>>> print(g.serialize(format='pretty-xml')) # doctest: +SKIP """ @@ -170,9 +178,7 @@ # definition of an Infix operator class # this recipe also works in jython -# calling sequence for the infix is either: -# x << op >> y -# or: +# calling sequence for the infix is: # x @ op @ y @@ -332,7 +338,8 @@ def castToQName(x): # noqa: N802 except Exception: if isinstance(thing, BNode): return thing.n3() - return "<" + thing + ">" + # Expect the unexpected + return thing.identifier if not isinstance(thing, str) else thing label = first(Class(thing, graph=store).label) if label: return label @@ -359,7 +366,8 @@ def _remover(inst): class Individual: """ - A typed individual + A typed individual, the base class of the InfixOWL classes. + """ factoryGraph = Graph() # noqa: N815 @@ -383,16 +391,45 @@ def __init__(self, identifier=None, graph=None): pass # pragma: no cover def clearInDegree(self): # noqa: N802 + """ + Remove references to this individual as an object in the + backing store. + """ self.graph.remove((None, None, self.identifier)) def clearOutDegree(self): # noqa: N802 + """ + Remove all statements to this individual as a subject in the + backing store. Note that this only removes the statements + themselves, not the blank node closure so there is a chance + that this will cause orphaned blank nodes to remain in the + graph. + """ self.graph.remove((self.identifier, None, None)) def delete(self): + """ + Delete the individual from the graph, clearing the in and + out degrees. + """ self.clearInDegree() self.clearOutDegree() def replace(self, other): + """ + Replace the individual in the graph with the given other, + causing all triples that refer to it to be changed and then + delete the individual. + + >>> g = Graph() + >>> b = Individual(OWL.Restriction, g) + >>> b.type = RDFS.Resource + >>> len(list(b.type)) + 1 + >>> del b.type + >>> len(list(b.type)) + 0 + """ for s, p, _o in self.graph.triples((None, None, self.identifier)): self.graph.add((s, p, classOrIdentifier(other))) self.delete() @@ -829,26 +866,23 @@ def DeepClassClear(class_to_prune): # noqa: N802 Recursively clear the given class, continuing where any related class is an anonymous class - >>> EX = Namespace('http://example.com/') - >>> namespace_manager = NamespaceManager(Graph()) - >>> namespace_manager.bind('ex', EX, override=False) - >>> namespace_manager.bind('owl', OWL, override=False) + >>> EX = Namespace("http://example.com/") >>> g = Graph() - >>> g.namespace_manager = namespace_manager + >>> g.bind("ex", EX, override=False) >>> Individual.factoryGraph = g >>> classB = Class(EX.B) >>> classC = Class(EX.C) >>> classD = Class(EX.D) >>> classE = Class(EX.E) >>> classF = Class(EX.F) - >>> anonClass = EX.someProp << some >> classD + >>> anonClass = EX.someProp @ some @ classD >>> classF += anonClass >>> list(anonClass.subClassOf) [Class: ex:F ] >>> classA = classE | classF | anonClass >>> classB += classA >>> classA.equivalentClass = [Class()] - >>> classB.subClassOf = [EX.someProp << some >> classC] + >>> classB.subClassOf = [EX.someProp @ some @ classC] >>> classA ( ex:E OR ex:F OR ( ex:someProp SOME ex:D ) ) >>> DeepClassClear(classA) @@ -1113,20 +1147,16 @@ def __and__(self, other): Construct an anonymous class description consisting of the intersection of this class and 'other' and return it - >>> exNs = Namespace('http://example.com/') - >>> namespace_manager = NamespaceManager(Graph()) - >>> namespace_manager.bind('ex', exNs, override=False) - >>> namespace_manager.bind('owl', OWL, override=False) - >>> g = Graph() - >>> g.namespace_manager = namespace_manager - Chaining 3 intersections + >>> exNs = Namespace("http://example.com/") + >>> g = Graph() + >>> g.bind("ex", exNs, override=False) >>> female = Class(exNs.Female, graph=g) >>> human = Class(exNs.Human, graph=g) >>> youngPerson = Class(exNs.YoungPerson, graph=g) >>> youngWoman = female & human & youngPerson - >>> youngWoman #doctest: +SKIP + >>> youngWoman # doctest: +SKIP ex:YoungPerson THAT ( ex:Female AND ex:Human ) >>> isinstance(youngWoman, BooleanClass) True @@ -1230,11 +1260,8 @@ def _get_parents(self): >>> from rdflib.util import first >>> exNs = Namespace('http://example.com/') - >>> namespace_manager = NamespaceManager(Graph()) - >>> namespace_manager.bind('ex', exNs, override=False) - >>> namespace_manager.bind('owl', OWL, override=False) >>> g = Graph() - >>> g.namespace_manager = namespace_manager + >>> g.bind("ex", exNs, override=False) >>> Individual.factoryGraph = g >>> brother = Class(exNs.Brother) >>> sister = Class(exNs.Sister) @@ -1462,25 +1489,21 @@ class EnumeratedClass(OWLRDFListProxy, Class): axiom ::= 'EnumeratedClass(' classID ['Deprecated'] { annotation } { individualID } ')' - - >>> exNs = Namespace('http://example.com/') - >>> namespace_manager = NamespaceManager(Graph()) - >>> namespace_manager.bind('ex', exNs, override=False) - >>> namespace_manager.bind('owl', OWL, override=False) + >>> exNs = Namespace("http://example.com/") >>> g = Graph() - >>> g.namespace_manager = namespace_manager + >>> g.bind("ex", exNs, override=False) >>> Individual.factoryGraph = g >>> ogbujiBros = EnumeratedClass(exNs.ogbujicBros, ... members=[exNs.chime, ... exNs.uche, ... exNs.ejike]) - >>> ogbujiBros #doctest: +SKIP + >>> ogbujiBros # doctest: +SKIP { ex:chime ex:uche ex:ejike } >>> col = Collection(g, first( ... g.objects(predicate=OWL.oneOf, subject=ogbujiBros.identifier))) >>> sorted([g.qname(item) for item in col]) ['ex:chime', 'ex:ejike', 'ex:uche'] - >>> print(g.serialize(format='n3')) #doctest: +SKIP + >>> print(g.serialize(format='n3')) # doctest: +SKIP @prefix ex: . @prefix owl: . @prefix rdf: . @@ -1531,16 +1554,14 @@ class BooleanClassExtentHelper: >>> testGraph = Graph() >>> Individual.factoryGraph = testGraph >>> EX = Namespace("http://example.com/") - >>> namespace_manager = NamespaceManager(Graph()) - >>> namespace_manager.bind('ex', EX, override=False) - >>> testGraph.namespace_manager = namespace_manager + >>> testGraph.bind("ex", EX, override=False) >>> fire = Class(EX.Fire) >>> water = Class(EX.Water) >>> testClass = BooleanClass(members=[fire, water]) >>> testClass2 = BooleanClass( ... operator=OWL.unionOf, members=[fire, water]) >>> for c in BooleanClass.getIntersections(): - ... print(c) #doctest: +SKIP + ... print(c) # doctest: +SKIP ( ex:Fire AND ex:Water ) >>> for c in BooleanClass.getUnions(): ... print(c) #doctest: +SKIP @@ -1560,7 +1581,10 @@ def _getExtent(): # noqa: N802 class Callable: def __init__(self, anycallable): - self.__call__ = anycallable + self._callfn = anycallable + + def __call__(self, *args, **kwargs): + return self._callfn(*args, **kwargs) class BooleanClass(OWLRDFListProxy, Class): @@ -1602,9 +1626,7 @@ def __init__( rdf_list = list(self.graph.objects(predicate=operator, subject=self.identifier)) assert ( not members or not rdf_list - ), "This is a previous boolean class description!" + repr( - Collection(self.graph, rdf_list[0]).n3() - ) + ), "This is a previous boolean class description." OWLRDFListProxy.__init__(self, rdf_list, members) def copy(self): @@ -1637,13 +1659,10 @@ def changeOperator(self, newOperator): # noqa: N802, N803 Converts a unionOf / intersectionOf class expression into one that instead uses the given operator - >>> testGraph = Graph() >>> Individual.factoryGraph = testGraph >>> EX = Namespace("http://example.com/") - >>> namespace_manager = NamespaceManager(Graph()) - >>> namespace_manager.bind('ex', EX, override=False) - >>> testGraph.namespace_manager = namespace_manager + >>> testGraph.bind("ex", EX, override=False) >>> fire = Class(EX.Fire) >>> water = Class(EX.Water) >>> testClass = BooleanClass(members=[fire,water]) @@ -1655,7 +1674,7 @@ def changeOperator(self, newOperator): # noqa: N802, N803 >>> try: ... testClass.changeOperator(OWL.unionOf) ... except Exception as e: - ... print(e) #doctest: +SKIP + ... print(e) # doctest: +SKIP The new operator is already being used! """ @@ -1668,7 +1687,11 @@ def __repr__(self): """ Returns the Manchester Syntax equivalent for this class """ - return manchesterSyntax(self._rdfList.uri, self.graph, boolean=self._operator) + return manchesterSyntax( + self._rdfList.uri if isinstance(self._rdfList, Collection) else BNode(), + self.graph, + boolean=self._operator, + ) def __or__(self, other): """ @@ -1704,6 +1727,7 @@ class Restriction(Class): OWL.allValuesFrom, OWL.someValuesFrom, OWL.hasValue, + OWL.cardinality, OWL.maxCardinality, OWL.minCardinality, ] @@ -1774,16 +1798,14 @@ def serialize(self, graph): >>> g1 = Graph() >>> g2 = Graph() >>> EX = Namespace("http://example.com/") - >>> namespace_manager = NamespaceManager(g1) - >>> namespace_manager.bind('ex', EX, override=False) - >>> namespace_manager = NamespaceManager(g2) - >>> namespace_manager.bind('ex', EX, override=False) + >>> g1.bind("ex", EX, override=False) + >>> g2.bind("ex", EX, override=False) >>> Individual.factoryGraph = g1 >>> prop = Property(EX.someProp, baseType=OWL.DatatypeProperty) >>> restr1 = (Property( ... EX.someProp, - ... baseType=OWL.DatatypeProperty)) << some >> (Class(EX.Foo)) - >>> restr1 #doctest: +SKIP + ... baseType=OWL.DatatypeProperty)) @ some @ (Class(EX.Foo)) + >>> restr1 # doctest: +SKIP ( ex:someProp SOME ex:Foo ) >>> restr1.serialize(g2) >>> Individual.factoryGraph = g2 @@ -1917,7 +1939,7 @@ def _get_cardinality(self): def _set_cardinality(self, other): if not other: return - triple = (self.identifier, OWL.cardinality, classOrIdentifier(other)) + triple = (self.identifier, OWL.cardinality, classOrTerm(other)) if triple in self.graph: return else: @@ -1939,7 +1961,7 @@ def _get_maxcardinality(self): def _set_maxcardinality(self, other): if not other: return - triple = (self.identifier, OWL.maxCardinality, classOrIdentifier(other)) + triple = (self.identifier, OWL.maxCardinality, classOrTerm(other)) if triple in self.graph: return else: diff --git a/test/test_extras/test_infixowl/test_booleanclass.py b/test/test_extras/test_infixowl/test_booleanclass.py index 86f7a223e..62153ce06 100644 --- a/test/test_extras/test_infixowl/test_booleanclass.py +++ b/test/test_extras/test_infixowl/test_booleanclass.py @@ -17,7 +17,7 @@ def graph(): del g -@pytest.mark.xfail(reason="assert len(props) == 1, repr(props), so AssertionError: []") +@pytest.mark.xfail(reason="AssertionError, len(props) != 1", raises=AssertionError) def test_booleanclass_operator_as_none(graph): fire = Class(EXNS.Fire) water = Class(EXNS.Water) @@ -63,16 +63,10 @@ def test_booleanclass_with_or_operator(graph): assert str(c) == "( ex:Fire OR ex:Water )" -@pytest.mark.xfail( - reason="BooleanClass.getIntersections() - TypeError: 'Callable' object is not callable" -) def test_getintersections(graph): _ = BooleanClass.getIntersections() -@pytest.mark.xfail( - reason="BooleanClass.getUnions() - TypeError: 'Callable' object is not callable" -) def test_getunions(graph): _ = BooleanClass.getUnions() diff --git a/test/test_extras/test_infixowl/test_restriction.py b/test/test_extras/test_infixowl/test_restriction.py index c57cacb2c..94ffc36f5 100644 --- a/test/test_extras/test_infixowl/test_restriction.py +++ b/test/test_extras/test_infixowl/test_restriction.py @@ -1,6 +1,6 @@ import pytest -from rdflib import OWL, XSD, BNode, Graph, Literal, Namespace, URIRef +from rdflib import OWL, RDF, XSD, BNode, Graph, Literal, Namespace, URIRef from rdflib.extras.infixowl import Class, Individual, Property, Restriction, some EXNS = Namespace("http://example.org/vocab/") @@ -21,11 +21,7 @@ def graph(): def test_restriction_str_and_hash(graph): - r1 = ( - (Property(EXNS.someProp, baseType=OWL.DatatypeProperty)) - @ some - @ (Class(EXNS.Foo)) - ) + r1 = Property(EXNS.someProp, baseType=OWL.DatatypeProperty) @ some @ Class(EXNS.Foo) assert str(r1) == "( ex:someProp SOME ex:Foo )" @@ -236,34 +232,40 @@ def test_restriction_cardinality_value(graph): assert str(r.cardinality) == "Some Class " -@pytest.mark.xfail(reason="_set_cardinality fails to handle Literal") def test_restriction_cardinality_set_value(graph): r = Restriction( onProperty=EXNS.hasChild, graph=graph, - cardinality=OWL.cardinality, + cardinality=Literal("0", datatype=XSD.nonNegativeInteger), + identifier=URIRef(EXNS.r1), ) + assert str(r) == "( ex:hasChild EQUALS 0 )" + assert graph.serialize(format="ttl") == ( "@prefix ex: .\n" "@prefix owl: .\n" + "@prefix xsd: .\n" "\n" - "[] a owl:Restriction ;\n" - " owl:cardinality owl:cardinality ;\n" + "ex:r1 a owl:Restriction ;\n" + ' owl:cardinality "0"^^xsd:nonNegativeInteger ;\n' " owl:onProperty ex:hasChild .\n" "\n" ) - assert r.cardinality is not None - - assert str(r) == "( ex:hasChild EQUALS http://www.w3.org/2002/07/owl#cardinality )" - - assert str(r.cardinality) == "Class: owl:cardinality " + r.cardinality = Literal("1", datatype=XSD.nonNegativeInteger) - r.cardinality = Literal("0", datatype=XSD.nonNegativeInteger) + assert str(r) == "( ex:hasChild EQUALS 1 )" - assert ( - str(r) == '( ex:hasChild EQUALS owl:cardinality "0"^^xsd:nonNegativeInteger )' + assert graph.serialize(format="ttl") == ( + "@prefix ex: .\n" + "@prefix owl: .\n" + "@prefix xsd: .\n" + "\n" + "ex:r1 a owl:Restriction ;\n" + ' owl:cardinality "1"^^xsd:nonNegativeInteger ;\n' + " owl:onProperty ex:hasChild .\n" + "\n" ) @@ -271,21 +273,114 @@ def test_restriction_maxcardinality(graph): r = Restriction( onProperty=EXNS.hasChild, graph=graph, - maxCardinality=OWL.maxCardinality, + maxCardinality=Literal("0", datatype=XSD.nonNegativeInteger), + identifier=URIRef(EXNS.r1), ) - assert str(r.maxCardinality) == "Class: owl:maxCardinality " + assert graph.serialize(format="ttl") == ( + "@prefix ex: .\n" + "@prefix owl: .\n" + "@prefix xsd: .\n" + "\n" + "ex:r1 a owl:Restriction ;\n" + ' owl:maxCardinality "0"^^xsd:nonNegativeInteger ;\n' + " owl:onProperty ex:hasChild .\n" + "\n" + ) + + # FIXME: Don't do this, it changes the value!! + assert str(r.maxCardinality) == "Some Class " + + assert graph.serialize(format="ttl") == ( + "@prefix ex: .\n" + "@prefix owl: .\n" + "@prefix xsd: .\n" + "\n" + "ex:r1 a owl:Restriction ;\n" + ' owl:maxCardinality "0"^^xsd:nonNegativeInteger ;\n' + " owl:onProperty ex:hasChild .\n" + "\n" + "[] a owl:Class .\n" + "\n" + ) r.maxCardinality = OWL.maxCardinality + assert graph.serialize(format="ttl") == ( + "@prefix ex: .\n" + "@prefix owl: .\n" + "\n" + "ex:r1 a owl:Restriction ;\n" + " owl:maxCardinality owl:maxCardinality ;\n" + " owl:onProperty ex:hasChild .\n" + "\n" + "[] a owl:Class .\n" + "\n" + ) + + # Ignored r.maxCardinality = None - r.maxCardinality = EXNS.foo + assert graph.serialize(format="ttl") != "" + + superfluous_assertion_subject = list(graph.subjects(RDF.type, OWL.Class))[0] + + assert isinstance(superfluous_assertion_subject, BNode) + + graph.remove((superfluous_assertion_subject, RDF.type, OWL.Class)) + + assert graph.serialize(format="ttl") == ( + "@prefix ex: .\n" + "@prefix owl: .\n" + "\n" + "ex:r1 a owl:Restriction ;\n" + " owl:maxCardinality owl:maxCardinality ;\n" + " owl:onProperty ex:hasChild .\n" + "\n" + ) + + r.maxCardinality = EXNS.maxkids + + assert str(r) == "( ex:hasChild MAX http://example.org/vocab/maxkids )" + + assert graph.serialize(format="ttl") == ( + "@prefix ex: .\n" + "@prefix owl: .\n" + "\n" + "ex:r1 a owl:Restriction ;\n" + " owl:maxCardinality ex:maxkids ;\n" + " owl:onProperty ex:hasChild .\n" + "\n" + ) del r.maxCardinality + assert graph.serialize(format="ttl") == ( + "@prefix ex: .\n" + "@prefix owl: .\n" + "\n" + "ex:r1 a owl:Restriction ;\n" + " owl:onProperty ex:hasChild .\n" + "\n" + ) + assert r.maxCardinality is None + r.maxCardinality = Literal("2", datatype=XSD.nonNegativeInteger) + + assert str(r) == "( ex:hasChild MAX 2 )" + + assert graph.serialize(format="ttl") == ( + "@prefix ex: .\n" + "@prefix owl: .\n" + "@prefix xsd: .\n" + "\n" + "ex:r1 a owl:Restriction ;\n" + ' owl:maxCardinality "2"^^xsd:nonNegativeInteger ;\n' + " owl:onProperty ex:hasChild .\n" + "\n" + ) + def test_restriction_mincardinality(graph): r = Restriction( @@ -300,12 +395,16 @@ def test_restriction_mincardinality(graph): r.minCardinality = None - r.minCardinality = EXNS.foo + r.minCardinality = EXNS.minkids + + assert str(r) == "( ex:hasChild MIN http://example.org/vocab/minkids )" del r.minCardinality assert r.minCardinality is None + r.minCardinality = Literal("0", datatype=XSD.nonNegativeInteger) + def test_restriction_kind(graph): r = Restriction( From 63b082c3e6d2a51dda0fe70b4da905890123e97c Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 19 May 2023 12:18:12 +0200 Subject: [PATCH 070/114] docs: recommend making an issue before making an enhancement (#2391) Suggest that contributors first make an issue to get in principle agreement for pull requests before making the pull request. Enhancements can be controversial, and we may reject the enhancement sometimes, even if the code is good, as it may just not be deemed important enough to increase the maintenance burden of RDFLib. Other changes: - Updated the checklist in the pull request template to be more accurate to current practice. - Improved grammar and writing in the pull request template, contribution guide and developers guide. --- .github/PULL_REQUEST_TEMPLATE.md | 38 ++++++++++++++++++-------------- docs/CONTRIBUTING.md | 12 +++++++--- docs/developers.rst | 15 ++++++++----- 3 files changed, 41 insertions(+), 24 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index f426017b4..9bd11d95c 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,6 +1,6 @@ # Summary of changes + - [ ] Created an issue to discuss the change and get in-principle agreement. + - [ ] Considered adding an example in `./examples`. +- If the change has a potential impact on users of this project: + + - [ ] Added or updated tests that fail without the change. - [ ] Updated relevant documentation to avoid inaccuracies. - [ ] Considered adding additional documentation. - - [ ] Considered adding an example in `./examples` for new features. - - [ ] Considered updating our changelog (`CHANGELOG.md`). - [ ] Considered granting [push permissions to the PR branch](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork), so maintainers can fix minor issues and keep your PR up to date. diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index f900dcb49..dfbb00e1d 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -1,8 +1,8 @@ # RDFLib Contributing Guide Thank you for considering contributing to RDFLib. This project has no formal -funding or full-time maintainers and relies entirely on independent contributors -to keep it alive and relevant. +funding or full-time maintainers, and relies entirely on independent +contributors to keep it alive and relevant. ## Ways to contribute @@ -47,6 +47,12 @@ Some ways in which you can contribute to RDFLib are: Contributions that involve changes to the RDFLib repository have to be made with pull requests and should follow the [RDFLib developers guide](./developers.rst). +For changes that add features or affect the public API of RDFLib, it is +recommended to first open an issue to discuss the change before starting to work +on it. That way you can get feedback on the design of the feature before +spending time on it. + ## Code of Conduct -All contributions to the project should be consistent with the [code of conduct](./CODE_OF_CONDUCT.md) adopted by RDFLib. +All contributions to the project should be consistent with the [code of +conduct](./CODE_OF_CONDUCT.md) adopted by RDFLib. diff --git a/docs/developers.rst b/docs/developers.rst index 9d74df2f2..43e88385b 100644 --- a/docs/developers.rst +++ b/docs/developers.rst @@ -23,6 +23,11 @@ Pull Requests Guidelines Contributions to RDFLib are made through pull requests (PRs). +For changes that add features or affect the public API of RDFLib, it +is recommended to first open an issue to discuss the change before starting to +work on it. That way you can get feedback on the design of the feature before +spending time on it. + In general, maintainers will only merge PRs if the following conditions are met: @@ -47,11 +52,11 @@ met: workflow pass. In addition to these conditions, PRs that are easier to review and approve will -be processed quicker. The primary factors that determine this is the scope and -size of a PR. If there are few changes and the scope is limited then there is +be processed quicker. The primary factors that determine this are the scope and +size of a PR. If there are few changes and the scope is limited, then there is less that a reviewer has to understand and less that they can disagree with. It -is thus important to try and split up your changes into multiple independent -PRs if possible. No PR is too small. +is thus important to try to split up your changes into multiple independent PRs +if possible. No PR is too small. For PRs that introduce breaking changes, it is even more critical that they are limited in size and scope, as they will likely have to be kept up to date with @@ -59,7 +64,7 @@ the ``main`` branch of this project for some time before they are merged. It is also critical that your PR is understandable both in what it does and why it does it, and how the change will impact the users of this project, for this -reason it is essential that your PR's description explains the nature of the +reason, it is essential that your PR's description explains the nature of the PR, what the PR intends to do, why this is desirable, and how this will affect the users of this project. From 75178cfaeac1b6708c79d594e3a52bed4d41318c Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 19 May 2023 12:32:54 +0200 Subject: [PATCH 071/114] chore: Update `.gitignore` for change in flakeheaven (#2392) flakeheaven was changed to place its cache directory in the working directory, this change adds the flakeheaven cache directory to the `.gitignore` file. --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 5c2017045..d42dc26fd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.flakeheaven_cache/ RDFLib.sublime-project /docs/_build/ RDFLib.sublime-workspace From fec7f0a71de98cc47cbacbab9a36e10bde575f1a Mon Sep 17 00:00:00 2001 From: Elliot Ford Date: Fri, 19 May 2023 11:34:01 +0100 Subject: [PATCH 072/114] docs: fix typo SPATQL -> SPARQL (#2371) --- docs/intro_to_sparql.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/intro_to_sparql.rst b/docs/intro_to_sparql.rst index 4539264f7..9de055413 100644 --- a/docs/intro_to_sparql.rst +++ b/docs/intro_to_sparql.rst @@ -95,7 +95,7 @@ example: # x: # y: - # Add in a new triple using SPATQL UPDATE + # Add in a new triple using SPARQL UPDATE g.update("""INSERT DATA { a }""") # Select all the things (s) that are of type (rdf:type) c: From ddcc4eb622a000cf991f9c530d55d62115484fca Mon Sep 17 00:00:00 2001 From: Alex Nelson Date: Fri, 19 May 2023 08:59:11 -0400 Subject: [PATCH 073/114] docs: remove unicode string form in rdflib/term.py (#2384) The use of Unicode literals is an artefact of Python 2 and is incorrect in Python 3. Doctests for docstrings using Unicode literals only pass because [ALLOW_UNICODE](https://docs.pytest.org/en/7.1.x/how-to/doctest.html#using-doctest-options) is set, but this option should be disabled as RDFLib does not support Python 2 any more. This partially resolves . Signed-off-by: Alex Nelson --- rdflib/term.py | 82 +++++++++++++++++++++++++------------------------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/rdflib/term.py b/rdflib/term.py index bdfbec111..ff357d4de 100644 --- a/rdflib/term.py +++ b/rdflib/term.py @@ -574,7 +574,7 @@ class Literal(Identifier): >>> lit2006 < Literal('2007-01-01',datatype=XSD.date) True >>> Literal(datetime.utcnow()).datatype - rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#dateTime') + rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#dateTime') >>> Literal(1) > Literal(2) # by value False >>> Literal(1) > Literal(2.0) # by value @@ -696,11 +696,11 @@ def normalize(self) -> "Literal": of this literal >>> from rdflib import XSD >>> Literal("01", datatype=XSD.integer, normalize=False).normalize() - rdflib.term.Literal(u'1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) Illegal lexical forms for the datatype given are simply passed on >>> Literal("a", datatype=XSD.integer, normalize=False) - rdflib.term.Literal(u'a', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('a', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) """ @@ -754,9 +754,9 @@ def __add__(self, val: Any) -> "Literal": """ >>> from rdflib.namespace import XSD >>> Literal(1) + 1 - rdflib.term.Literal(u'2', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('2', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> Literal("1") + "1" - rdflib.term.Literal(u'11') + rdflib.term.Literal('11') # Handling dateTime/date/time based operations in Literals >>> a = Literal('2006-01-01T20:50:00', datatype=XSD.dateTime) @@ -970,17 +970,17 @@ def __bool__(self) -> bool: def __neg__(self) -> "Literal": """ >>> (- Literal(1)) - rdflib.term.Literal(u'-1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('-1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> (- Literal(10.5)) - rdflib.term.Literal(u'-10.5', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#double')) + rdflib.term.Literal('-10.5', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#double')) >>> from rdflib.namespace import XSD >>> (- Literal("1", datatype=XSD.integer)) - rdflib.term.Literal(u'-1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('-1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> (- Literal("1")) Traceback (most recent call last): File "", line 1, in - TypeError: Not a number; rdflib.term.Literal(u'1') + TypeError: Not a number; rdflib.term.Literal('1') >>> """ @@ -992,17 +992,17 @@ def __neg__(self) -> "Literal": def __pos__(self) -> "Literal": """ >>> (+ Literal(1)) - rdflib.term.Literal(u'1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> (+ Literal(-1)) - rdflib.term.Literal(u'-1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('-1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> from rdflib.namespace import XSD >>> (+ Literal("-1", datatype=XSD.integer)) - rdflib.term.Literal(u'-1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('-1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> (+ Literal("1")) Traceback (most recent call last): File "", line 1, in - TypeError: Not a number; rdflib.term.Literal(u'1') + TypeError: Not a number; rdflib.term.Literal('1') """ if isinstance(self.value, (int, long_type, float)): return Literal(self.value.__pos__()) @@ -1012,16 +1012,16 @@ def __pos__(self) -> "Literal": def __abs__(self) -> "Literal": """ >>> abs(Literal(-1)) - rdflib.term.Literal(u'1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> from rdflib.namespace import XSD >>> abs( Literal("-1", datatype=XSD.integer)) - rdflib.term.Literal(u'1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> abs(Literal("1")) Traceback (most recent call last): File "", line 1, in - TypeError: Not a number; rdflib.term.Literal(u'1') + TypeError: Not a number; rdflib.term.Literal('1') """ if isinstance(self.value, (int, long_type, float)): return Literal(self.value.__abs__()) @@ -1031,18 +1031,18 @@ def __abs__(self) -> "Literal": def __invert__(self) -> "Literal": """ >>> ~(Literal(-1)) - rdflib.term.Literal(u'0', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('0', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) >>> from rdflib.namespace import XSD >>> ~( Literal("-1", datatype=XSD.integer)) - rdflib.term.Literal(u'0', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) + rdflib.term.Literal('0', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) Not working: >>> ~(Literal("1")) Traceback (most recent call last): File "", line 1, in - TypeError: Not a number; rdflib.term.Literal(u'1') + TypeError: Not a number; rdflib.term.Literal('1') """ if isinstance(self.value, (int, long_type, float)): # type error: Unsupported operand type for ~ ("float") @@ -1423,51 +1423,51 @@ def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: Examples:: >>> Literal("foo").n3() - u'"foo"' + '"foo"' Strings with newlines or triple-quotes:: >>> Literal("foo\nbar").n3() - u'"""foo\nbar"""' + '"""foo\nbar"""' >>> Literal("''\'").n3() - u'"\'\'\'"' + '"\'\'\'"' >>> Literal('"""').n3() - u'"\\"\\"\\""' + '"\\"\\"\\""' Language:: >>> Literal("hello", lang="en").n3() - u'"hello"@en' + '"hello"@en' Datatypes:: >>> Literal(1).n3() - u'"1"^^' + '"1"^^' >>> Literal(1.0).n3() - u'"1.0"^^' + '"1.0"^^' >>> Literal(True).n3() - u'"true"^^' + '"true"^^' Datatype and language isn't allowed (datatype takes precedence):: >>> Literal(1, lang="en").n3() - u'"1"^^' + '"1"^^' Custom datatype:: >>> footype = URIRef("http://example.org/ns#foo") >>> Literal("1", datatype=footype).n3() - u'"1"^^' + '"1"^^' Passing a namespace-manager will use it to abbreviate datatype URIs: >>> from rdflib import Graph >>> Literal(1).n3(Graph().namespace_manager) - u'"1"^^xsd:integer' + '"1"^^xsd:integer' ''' if namespace_manager: return self._literal_n3(qname_callback=namespace_manager.normalizeUri) @@ -1484,43 +1484,43 @@ def _literal_n3( >>> from rdflib.namespace import XSD >>> Literal(1)._literal_n3(use_plain=True) - u'1' + '1' >>> Literal(1.0)._literal_n3(use_plain=True) - u'1e+00' + '1e+00' >>> Literal(1.0, datatype=XSD.decimal)._literal_n3(use_plain=True) - u'1.0' + '1.0' >>> Literal(1.0, datatype=XSD.float)._literal_n3(use_plain=True) - u'"1.0"^^' + '"1.0"^^' >>> Literal("foo", datatype=XSD.string)._literal_n3( ... use_plain=True) - u'"foo"^^' + '"foo"^^' >>> Literal(True)._literal_n3(use_plain=True) - u'true' + 'true' >>> Literal(False)._literal_n3(use_plain=True) - u'false' + 'false' >>> Literal(1.91)._literal_n3(use_plain=True) - u'1.91e+00' + '1.91e+00' Only limited precision available for floats: >>> Literal(0.123456789)._literal_n3(use_plain=True) - u'1.234568e-01' + '1.234568e-01' >>> Literal('0.123456789', ... datatype=XSD.decimal)._literal_n3(use_plain=True) - u'0.123456789' + '0.123456789' Using callback for datatype QNames:: >>> Literal(1)._literal_n3( ... qname_callback=lambda uri: "xsd:integer") - u'"1"^^xsd:integer' + '"1"^^xsd:integer' """ if use_plain and self.datatype in _PLAIN_LITERAL_TYPES: From f200722bfecb15d108d3f6c752eee5b34aacc058 Mon Sep 17 00:00:00 2001 From: "Jeffrey C. Lerman" Date: Sun, 21 May 2023 02:35:47 -0700 Subject: [PATCH 074/114] feat: add `curie` method to `NamespaceManager` (#2365) Added a `curie` method to `NamespaceManager`, which can be used to generate a CURIE from a URI. Other changes: - Fixed `NamespaceManager.expand_curie` to work with CURIES that have blank prefixes (e.g. `:something`), which are valid according to [CURIE Syntax 1.0](https://www.w3.org/TR/2010/NOTE-curie-20101216/). - Added a test to confirm . Fixes . --------- Co-authored-by: Iwan Aucamp --- rdflib/namespace/__init__.py | 31 ++++- test/test_namespace/test_namespacemanager.py | 113 +++++++++++++++++++ test/utils/exceptions.py | 44 ++++++-- 3 files changed, 179 insertions(+), 9 deletions(-) diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index 8455e2b63..3e591fcf7 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -490,6 +490,35 @@ def qname(self, uri: str) -> str: else: return ":".join((prefix, name)) + def curie(self, uri: str, generate: bool = True) -> str: + """ + From a URI, generate a valid CURIE. + + Result is guaranteed to contain a colon separating the prefix from the + name, even if the prefix is an empty string. + + .. warning:: + + When ``generate`` is `True` (which is the default) and there is no + matching namespace for the URI in the namespace manager then a new + namespace will be added with prefix ``ns{index}``. + + Thus, when ``generate`` is `True`, this function is not a pure + function because of this side-effect. + + This default behaviour is chosen so that this function operates + similarly to `NamespaceManager.qname`. + + :param uri: URI to generate CURIE for. + :param generate: Whether to add a prefix for the namespace if one doesn't + already exist. Default: `True`. + :return: CURIE for the URI. + :raises KeyError: If generate is `False` and the namespace doesn't already have + a prefix. + """ + prefix, namespace, name = self.compute_qname(uri, generate=generate) + return ":".join((prefix, name)) + def qname_strict(self, uri: str) -> str: prefix, namespace, name = self.compute_qname_strict(uri) if prefix == "": @@ -643,7 +672,7 @@ def expand_curie(self, curie: str) -> URIRef: if not type(curie) is str: raise TypeError(f"Argument must be a string, not {type(curie).__name__}.") parts = curie.split(":", 1) - if len(parts) != 2 or len(parts[0]) < 1: + if len(parts) != 2: raise ValueError( "Malformed curie argument, format should be e.g. “foaf:name”." ) diff --git a/test/test_namespace/test_namespacemanager.py b/test/test_namespace/test_namespacemanager.py index 20cb9594f..a35f3ac63 100644 --- a/test/test_namespace/test_namespacemanager.py +++ b/test/test_namespace/test_namespacemanager.py @@ -5,6 +5,7 @@ import sys from contextlib import ExitStack from pathlib import Path +from test.utils.exceptions import ExceptionChecker from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Set, Tuple, Type, Union import pytest @@ -484,3 +485,115 @@ def check() -> None: check() # Run a second time to check caching check() + + +def make_test_nsm() -> NamespaceManager: + namespaces = [ + ("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"), + ("", "http://example.org/"), + ( + # Because of this + # will have no effect on the namespace manager. + "eg", + "http://example.org/", + ), + ] + graph = Graph(bind_namespaces="none") + for prefix, namespace in namespaces: + graph.bind(prefix, namespace, override=False) + + return graph.namespace_manager + + +@pytest.fixture(scope="session") +def test_nsm_session() -> NamespaceManager: + return make_test_nsm() + + +@pytest.fixture(scope="function") +def test_nsm_function() -> NamespaceManager: + return make_test_nsm() + + +@pytest.mark.parametrize( + ["curie", "expected_result"], + [ + ("rdf:type", "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + (":foo", "http://example.org/foo"), + ("too_small", ExceptionChecker(ValueError, "Malformed curie argument")), + ( + "egdo:bar", + ExceptionChecker(ValueError, 'Prefix "egdo" not bound to any namespace'), + ), + pytest.param( + "eg:foo", + "http://example.org/foo", + marks=pytest.mark.xfail( + raises=ValueError, + reason="This is failing because of https://github.com/RDFLib/rdflib/issues/2077", + ), + ), + ], +) +def test_expand_curie( + test_nsm_session: NamespaceManager, + curie: str, + expected_result: Union[ExceptionChecker, str], +) -> None: + nsm = test_nsm_session + with ExitStack() as xstack: + if isinstance(expected_result, ExceptionChecker): + xstack.enter_context(expected_result) + result = nsm.expand_curie(curie) + + if not isinstance(expected_result, ExceptionChecker): + assert URIRef(expected_result) == result + + +@pytest.mark.parametrize( + ["uri", "generate", "expected_result"], + [ + ("http://www.w3.org/1999/02/22-rdf-syntax-ns#type", None, "rdf:type"), + ("http://example.org/foo", None, ":foo"), + ("http://example.com/a#chair", None, "ns1:chair"), + ("http://example.com/a#chair", True, "ns1:chair"), + ( + "http://example.com/a#chair", + False, + ExceptionChecker( + KeyError, "No known prefix for http://example.com/a# and generate=False" + ), + ), + ("http://example.com/b#chair", None, "ns1:chair"), + ("http://example.com/c", None, "ns1:c"), + ("", None, ExceptionChecker(ValueError, "Can't split ''")), + ( + "http://example.com/", + None, + ExceptionChecker(ValueError, "Can't split 'http://example.com/'"), + ), + ], +) +def test_generate_curie( + test_nsm_function: NamespaceManager, + uri: str, + generate: Optional[bool], + expected_result: Union[ExceptionChecker, str], +) -> None: + """ + .. note:: + + This is using the function scoped nsm fixture because curie has side + effects and will modify the namespace manager. + """ + nsm = test_nsm_function + with ExitStack() as xstack: + if isinstance(expected_result, ExceptionChecker): + xstack.enter_context(expected_result) + if generate is None: + result = nsm.curie(uri) + else: + result = nsm.curie(uri, generate=generate) + + if not isinstance(expected_result, ExceptionChecker): + assert expected_result == result diff --git a/test/utils/exceptions.py b/test/utils/exceptions.py index a814f9b40..94cfd9c29 100644 --- a/test/utils/exceptions.py +++ b/test/utils/exceptions.py @@ -1,15 +1,32 @@ +from __future__ import annotations + import logging import re from dataclasses import dataclass -from typing import Any, Dict, Optional, Pattern, Type, Union +from types import TracebackType +from typing import Any, ContextManager, Dict, Optional, Pattern, Type, Union + +import pytest +from pytest import ExceptionInfo -@dataclass(frozen=True) -class ExceptionChecker: +@dataclass +class ExceptionChecker(ContextManager[ExceptionInfo[Exception]]): type: Type[Exception] pattern: Optional[Union[Pattern[str], str]] = None attributes: Optional[Dict[str, Any]] = None + def __post_init__(self) -> None: + self._catcher = pytest.raises(self.type, match=self.pattern) + self._exception_info: Optional[ExceptionInfo[Exception]] = None + + def _check_attributes(self, exception: Exception) -> None: + if self.attributes is not None: + for key, value in self.attributes.items(): + logging.debug("checking exception attribute %s=%r", key, value) + assert hasattr(exception, key) + assert getattr(exception, key) == value + def check(self, exception: Exception) -> None: logging.debug("checking exception %s/%r", type(exception), exception) pattern = self.pattern @@ -19,11 +36,22 @@ def check(self, exception: Exception) -> None: assert isinstance(exception, self.type) if pattern is not None: assert pattern.match(f"{exception}") - if self.attributes is not None: - for key, value in self.attributes.items(): - logging.debug("checking exception attribute %s=%r", key, value) - assert hasattr(exception, key) - assert getattr(exception, key) == value + self._check_attributes(exception) except Exception: logging.error("problem checking exception", exc_info=exception) raise + + def __enter__(self) -> ExceptionInfo[Exception]: + self._exception_info = self._catcher.__enter__() + return self._exception_info + + def __exit__( + self, + __exc_type: Optional[Type[BaseException]], + __exc_value: Optional[BaseException], + __traceback: Optional[TracebackType], + ) -> bool: + result = self._catcher.__exit__(__exc_type, __exc_value, __traceback) + if self._exception_info is not None: + self._check_attributes(self._exception_info.value) + return result From b0c7b279f88d17969592627c2418b3d952133183 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 20:08:29 +0200 Subject: [PATCH 075/114] build(deps): bump library/python in /docker/latest (#2396) Bumps library/python from `286f2f1` to `551c952`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/latest/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/latest/Dockerfile b/docker/latest/Dockerfile index d7a75f572..2fe5f1064 100644 --- a/docker/latest/Dockerfile +++ b/docker/latest/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.3-slim@sha256:286f2f1d6f2f730a44108656afb04b131504b610a6cb2f3413918e98dabba67e +FROM docker.io/library/python:3.11.3-slim@sha256:551c9529e77896518ac5693d7e98ee5e12051d625de450ac2a68da1eae15ec87 COPY docker/latest/requirements.txt /var/tmp/build/ From aeefd2ea6a715064761d56d9abecb3efcbbee0e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 20:08:51 +0200 Subject: [PATCH 076/114] build(deps-dev): bump setuptools from 67.7.2 to 67.8.0 (#2397) Bumps [setuptools](https://github.com/pypa/setuptools) from 67.7.2 to 67.8.0. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/CHANGES.rst) - [Commits](https://github.com/pypa/setuptools/compare/v67.7.2...v67.8.0) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index cec58e05f..fc9f76d73 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1107,19 +1107,19 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "67.7.2" +version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, - {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, + {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, + {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] From cb9ae11d31e1e66b443641e1086301fed349499a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 20:09:10 +0200 Subject: [PATCH 077/114] build(deps): bump library/python in /docker/unstable (#2398) Bumps library/python from `286f2f1` to `551c952`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/unstable/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/unstable/Dockerfile b/docker/unstable/Dockerfile index aff1d8767..87afdf96f 100644 --- a/docker/unstable/Dockerfile +++ b/docker/unstable/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.3-slim@sha256:286f2f1d6f2f730a44108656afb04b131504b610a6cb2f3413918e98dabba67e +FROM docker.io/library/python:3.11.3-slim@sha256:551c9529e77896518ac5693d7e98ee5e12051d625de450ac2a68da1eae15ec87 # This file is generated from docker:unstable in Taskfile.yml COPY var/requirements.txt /var/tmp/build/ From 6cdca825fa477b2af1dcac50b0aa508cf41d4244 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 20:09:42 +0200 Subject: [PATCH 078/114] build(deps-dev): bump types-setuptools from 67.7.0.2 to 67.8.0.0 (#2401) Bumps [types-setuptools](https://github.com/python/typeshed) from 67.7.0.2 to 67.8.0.0. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index fc9f76d73..eaa2fd503 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1373,14 +1373,14 @@ files = [ [[package]] name = "types-setuptools" -version = "67.7.0.2" +version = "67.8.0.0" description = "Typing stubs for setuptools" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.7.0.2.tar.gz", hash = "sha256:155789e85e79d5682b0d341919d4beb6140408ae52bac922af25b54e36ab25c0"}, - {file = "types_setuptools-67.7.0.2-py3-none-any.whl", hash = "sha256:bd30f6dbe9b83f0a7e6e3eab6d2df748aa4f55700d54e9f077d3aa30cc019445"}, + {file = "types-setuptools-67.8.0.0.tar.gz", hash = "sha256:95c9ed61871d6c0e258433373a4e1753c0a7c3627a46f4d4058c7b5a08ab844f"}, + {file = "types_setuptools-67.8.0.0-py3-none-any.whl", hash = "sha256:6df73340d96b238a4188b7b7668814b37e8018168aef1eef94a3b1872e3f60ff"}, ] [[package]] From 7860cd472f697553dea6fe47d86f3d601497e2f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 20:33:36 +0200 Subject: [PATCH 079/114] build(deps): bump poetry from 1.4.2 to 1.5.0 in /devtools (#2399) Bumps [poetry](https://github.com/python-poetry/poetry) from 1.4.2 to 1.5.0. - [Release notes](https://github.com/python-poetry/poetry/releases) - [Changelog](https://github.com/python-poetry/poetry/blob/master/CHANGELOG.md) - [Commits](https://github.com/python-poetry/poetry/compare/1.4.2...1.5.0) --- updated-dependencies: - dependency-name: poetry dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- devtools/requirements-poetry.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/devtools/requirements-poetry.in b/devtools/requirements-poetry.in index b01ce00a4..2c6833724 100644 --- a/devtools/requirements-poetry.in +++ b/devtools/requirements-poetry.in @@ -1,3 +1,3 @@ # Fixing this here as readthedocs can't use the compiled requirements-poetry.txt # due to conflicts. -poetry==1.4.2 +poetry==1.5.0 From ad56044cf3033f8d2a3c625d322f3d091d7c9322 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 22 May 2023 23:12:54 +0200 Subject: [PATCH 080/114] [pre-commit.ci] pre-commit autoupdate (#2403) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/python-poetry/poetry: 1.4.2 → 1.5.0](https://github.com/python-poetry/poetry/compare/1.4.2...1.5.0) --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 04a9693f0..5f0c147ca 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: require_serial: true args: ["."] - repo: https://github.com/python-poetry/poetry - rev: 1.4.2 + rev: 1.5.0 hooks: - id: poetry-check - id: poetry-lock From cad367e6cb47c3f2ea0f0951649e82ae8dd82eea Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 2 Jun 2023 20:54:13 +0200 Subject: [PATCH 081/114] docs: add guidelines for breaking changes (#2402) Add guidelines on how breaking changes should be approached. The guidelines take a very pragmatic approach with known downsides, but this seems like the best compromise given the current situation. For prior discussion on this point see: - https://github.com/RDFLib/rdflib/discussions/2395 - https://github.com/RDFLib/rdflib/pull/2108 - https://github.com/RDFLib/rdflib/discussions/1841 --- docs/conf.py | 1 + docs/developers.rst | 83 ++++++++++++++++++++++++++++++++++++++++++++- docs/index.rst | 10 ++++++ 3 files changed, 93 insertions(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 1e2b7ef46..9836b9748 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -266,6 +266,7 @@ def find_version(filename): # This is here to prevent: # "WARNING: more than one target found for cross-reference" "ref.python", + "autosectionlabel.*", ] sphinx_version = tuple(int(part) for part in sphinx.__version__.split(".")) diff --git a/docs/developers.rst b/docs/developers.rst index 43e88385b..759e017ea 100644 --- a/docs/developers.rst +++ b/docs/developers.rst @@ -16,7 +16,7 @@ developing RDFLib code. * You must supply tests for new code. * RDFLib uses `Poetry `_ for dependency management and packaging. -If you add a new cool feature, consider also adding an example in ``./examples`` +If you add a new cool feature, consider also adding an example in ``./examples``. Pull Requests Guidelines ------------------------ @@ -71,6 +71,87 @@ the users of this project. Please note that while we would like all PRs to follow the guidelines given here, we will not reject a PR just because it does not. +Maintenance Guidelines +---------------------- + +This section contains guidelines for maintaining RDFLib. RDFLib maintainers +should try to follow these. These guidelines also serve as an indication to +RDFLib users what they can expect. + +Breaking changes +~~~~~~~~~~~~~~~~ + +Breaking changes to RDFLib's public API should be made incrementally, with small +pull requests to the main branch that change as few things as possible. + +Breaking changes should be discussed first in an issue before work is started, +as it is possible that the change is not necessary or that there is a better way +to achieve the same goal, in which case the work on the PR would have been +wasted. This will however not be strictly enforced, and no PR will be rejected +solely on the basis that it was not discussed upfront. + +RDFLib follows `semantic versioning `_ and `trunk-based development +`_, so if any breaking changes were +introduced into the main branch since the last release, then the next release +will be a major release with an incremented major version. + +Releases of RDFLib will not as a rule be conditioned on specific features, so +there may be new major releases that contain very few breaking changes, and +there could be no minor or patch releases between two major releases. + +.. _breaking_changes_rationale: + +Rationale +^^^^^^^^^ + +RDFLib has been around for more than a decade, and in this time both Python and +RDF have evolved, and RDFLib's API also has to evolve to keep up with these +changes and to make it easier for users to use. This will inevitably require +breaking changes. + +There are more or less two ways to introduce breaking changes to RDFLib's public +API: + +- Revolutionary: Create a new API from scratch and reimplement it, and when + ready, release a new version of RDFLib with the new API. +- Evolutionary: Incrementally improve the existing API with small changes and + release any breaking changes that were made at regular intervals. + +While the revolutionary approach seems appealing, it is also risky and +time-consuming. + +The evolutionary approach puts a lot of strain on the users of RDFLib as they +have to adapt to breaking changes more often, but the shortcomings of the RDFLib +public API also put a lot of strain on the users of RDFLib. On the other hand, a +major advantage of the evolutionary approach is that it is simple and achievable +from a maintenance and contributor perspective. + +Deprecating functionality +~~~~~~~~~~~~~~~~~~~~~~~~~ + +To whatever extent possible, classes, functions, variables, or parameters that +will be removed should be marked for deprecation in documentation, and if +possible, should be changed to raise deprecation warnings if used. + +There is however no hard requirement that something may only be removed after a +deprecation notice has been added, or only after a release was made with a +deprecation notice. + +Consequently, functionality may be removed without it ever being marked as +deprecated. + +.. _deprecation_rationale: + +Rationale +^^^^^^^^^ + +Current resource limitations and the backlog of issues make it impractical to +first release or incorporate deprecation notices before making quality of life +changes. + +RDFLib uses semantic versioning and provides type hints, and these are the +primary mechanisms for signalling breaking changes to our users. + .. _tests: Tests diff --git a/docs/index.rst b/docs/index.rst index e36962ea0..e3e2ca003 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -90,6 +90,16 @@ API reference: .. * :ref:`genindex` .. * :ref:`modindex` +Versioning +---------- +RDFLib follows `Semantic Versioning 2.0.0 `_, which can be summarized as follows: + + Given a version number ``MAJOR.MINOR.PATCH``, increment the: + + #. ``MAJOR`` version when you make incompatible API changes + #. ``MINOR`` version when you add functionality in a backwards-compatible + manner + #. ``PATCH`` version when you make backwards-compatible bug fixes For developers -------------- From d615cd5ad3bc1b69fa19524a7957b21d69f403d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Jun 2023 20:55:38 +0200 Subject: [PATCH 082/114] build(deps-dev): bump coverage from 7.2.5 to 7.2.6 (#2413) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.2.5 to 7.2.6. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.2.5...7.2.6) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 104 ++++++++++++++++++++++++++-------------------------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/poetry.lock b/poetry.lock index eaa2fd503..5fd063882 100644 --- a/poetry.lock +++ b/poetry.lock @@ -216,63 +216,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.5" +version = "7.2.6" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, - {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, - {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, - {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, - {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, - {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, - {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, - {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, - {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, - {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, - {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, - {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, - {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, - {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, - {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, + {file = "coverage-7.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:496b86f1fc9c81a1cd53d8842ef712e950a4611bba0c42d33366a7b91ba969ec"}, + {file = "coverage-7.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbe6e8c0a9a7193ba10ee52977d4d5e7652957c1f56ccefed0701db8801a2a3b"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d06b721c2550c01a60e5d3093f417168658fb454e5dfd9a23570e9bffe39a1"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77a04b84d01f0e12c66f16e69e92616442dc675bbe51b90bfb074b1e5d1c7fbd"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35db06450272473eab4449e9c2ad9bc6a0a68dab8e81a0eae6b50d9c2838767e"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6727a0d929ff0028b1ed8b3e7f8701670b1d7032f219110b55476bb60c390bfb"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aac1d5fdc5378f6bac2c0c7ebe7635a6809f5b4376f6cf5d43243c1917a67087"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9e4a5eb1bbc3675ee57bc31f8eea4cd7fb0cbcbe4912cf1cb2bf3b754f4a80"}, + {file = "coverage-7.2.6-cp310-cp310-win32.whl", hash = "sha256:71f739f97f5f80627f1fee2331e63261355fd1e9a9cce0016394b6707ac3f4ec"}, + {file = "coverage-7.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:fde5c7a9d9864d3e07992f66767a9817f24324f354caa3d8129735a3dc74f126"}, + {file = "coverage-7.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc7b667f8654376e9353dd93e55e12ce2a59fb6d8e29fce40de682273425e044"}, + {file = "coverage-7.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:697f4742aa3f26c107ddcb2b1784a74fe40180014edbd9adaa574eac0529914c"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541280dde49ce74a4262c5e395b48ea1207e78454788887118c421cb4ffbfcac"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7f1a8328eeec34c54f1d5968a708b50fc38d31e62ca8b0560e84a968fbf9a9"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd58eb5a2371bf160590f4262109f66b6043b0b991930693134cb617bc0169"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae82c5f168d2a39a5d69a12a69d4dc23837a43cf2ca99be60dfe59996ea6b113"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f5440cdaf3099e7ab17a5a7065aed59aff8c8b079597b61c1f8be6f32fe60636"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6f03f87fea579d55e0b690d28f5042ec1368650466520fbc400e7aeaf09e995"}, + {file = "coverage-7.2.6-cp311-cp311-win32.whl", hash = "sha256:dc4d5187ef4d53e0d4c8eaf530233685667844c5fb0b855fea71ae659017854b"}, + {file = "coverage-7.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:c93d52c3dc7b9c65e39473704988602300e3cc1bad08b5ab5b03ca98bbbc68c1"}, + {file = "coverage-7.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42c692b55a647a832025a4c048007034fe77b162b566ad537ce65ad824b12a84"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7786b2fa7809bf835f830779ad285215a04da76293164bb6745796873f0942d"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25bad4196104761bc26b1dae9b57383826542ec689ff0042f7f4f4dd7a815cba"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2692306d3d4cb32d2cceed1e47cebd6b1d2565c993d6d2eda8e6e6adf53301e6"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:392154d09bd4473b9d11351ab5d63391f3d5d24d752f27b3be7498b0ee2b5226"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa079995432037b5e2ef5ddbb270bcd2ded9f52b8e191a5de11fe59a00ea30d8"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d712cefff15c712329113b01088ba71bbcef0f7ea58478ca0bbec63a824844cb"}, + {file = "coverage-7.2.6-cp37-cp37m-win32.whl", hash = "sha256:004948e296149644d208964300cb3d98affc5211e9e490e9979af4030b0d6473"}, + {file = "coverage-7.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:c1d7a31603c3483ac49c1726723b0934f88f2c011c660e6471e7bd735c2fa110"}, + {file = "coverage-7.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3436927d1794fa6763b89b60c896f9e3bd53212001026ebc9080d23f0c2733c1"}, + {file = "coverage-7.2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44c9b9f1a245f3d0d202b1a8fa666a80b5ecbe4ad5d0859c0fb16a52d9763224"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3783a286d5a93a2921396d50ce45a909aa8f13eee964465012f110f0cbb611"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cff6980fe7100242170092bb40d2b1cdad79502cd532fd26b12a2b8a5f9aee0"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c534431153caffc7c495c3eddf7e6a6033e7f81d78385b4e41611b51e8870446"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3062fd5c62df988cea9f2972c593f77fed1182bfddc5a3b12b1e606cb7aba99e"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6284a2005e4f8061c58c814b1600ad0074ccb0289fe61ea709655c5969877b70"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:97729e6828643f168a2a3f07848e1b1b94a366b13a9f5aba5484c2215724edc8"}, + {file = "coverage-7.2.6-cp38-cp38-win32.whl", hash = "sha256:dc11b42fa61ff1e788dd095726a0aed6aad9c03d5c5984b54cb9e1e67b276aa5"}, + {file = "coverage-7.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:cbcc874f454ee51f158afd604a315f30c0e31dff1d5d5bf499fc529229d964dd"}, + {file = "coverage-7.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3cacc6a665221108ecdf90517a8028d07a2783df3417d12dcfef1c517e67478"}, + {file = "coverage-7.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:272ab31228a9df857ab5df5d67936d8861464dc89c5d3fab35132626e9369379"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a8723ccec4e564d4b9a79923246f7b9a8de4ec55fa03ec4ec804459dade3c4f"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5906f6a84b47f995cd1bf0aca1c72d591c55ee955f98074e93660d64dfc66eb9"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c139b7ab3f0b15f9aad0a3fedef5a1f8c0b2bdc291d88639ca2c97d3682416"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a5ffd45c6b93c23a8507e2f436983015c6457aa832496b6a095505ca2f63e8f1"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4f3c7c19581d471af0e9cb49d928172cd8492cd78a2b7a4e82345d33662929bb"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8c0e79820cdd67978e1120983786422d279e07a381dbf89d03bbb23ec670a6"}, + {file = "coverage-7.2.6-cp39-cp39-win32.whl", hash = "sha256:13cde6bb0e58fb67d09e2f373de3899d1d1e866c5a9ff05d93615f2f54fbd2bb"}, + {file = "coverage-7.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:6b9f64526286255735847aed0221b189486e0b9ed943446936e41b7e44b08783"}, + {file = "coverage-7.2.6-pp37.pp38.pp39-none-any.whl", hash = "sha256:6babcbf1e66e46052442f10833cfc4a0d3554d8276aa37af8531a83ed3c1a01d"}, + {file = "coverage-7.2.6.tar.gz", hash = "sha256:2025f913f2edb0272ef15d00b1f335ff8908c921c8eb2013536fcaf61f5a683d"}, ] [package.dependencies] From 663054e9d031e76feeca9a8a68508e03bd573c67 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Jun 2023 20:55:46 +0200 Subject: [PATCH 083/114] build(deps-dev): bump typing-extensions from 4.5.0 to 4.6.2 (#2415) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.5.0 to 4.6.2. - [Changelog](https://github.com/python/typing_extensions/blob/main/CHANGELOG.md) - [Commits](https://github.com/python/typing_extensions/compare/4.5.0...4.6.2) --- updated-dependencies: - dependency-name: typing-extensions dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5fd063882..964c2d0f8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1385,14 +1385,14 @@ files = [ [[package]] name = "typing-extensions" -version = "4.5.0" +version = "4.6.2" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, + {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, ] [[package]] From 6c119e29a878967bba4687f8984d5b0dab088bab Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Jun 2023 20:55:57 +0200 Subject: [PATCH 084/114] build(deps-dev): bump pytest-cov from 4.0.0 to 4.1.0 (#2414) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.0.0 to 4.1.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v4.0.0...v4.1.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 964c2d0f8..310359890 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1004,14 +1004,14 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] From 15a7c54ea0c56dd1ae9b6ac669ea3221a63a4e08 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Jun 2023 20:57:05 +0200 Subject: [PATCH 085/114] build(deps): bump library/python in /docker/latest (#2416) Bumps library/python from `551c952` to `eaee5f7`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/latest/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/latest/Dockerfile b/docker/latest/Dockerfile index 2fe5f1064..1539914e9 100644 --- a/docker/latest/Dockerfile +++ b/docker/latest/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.3-slim@sha256:551c9529e77896518ac5693d7e98ee5e12051d625de450ac2a68da1eae15ec87 +FROM docker.io/library/python:3.11.3-slim@sha256:eaee5f73efa9ae962d2077756292bc4878c04fcbc13dc168bb00cc365f35647e COPY docker/latest/requirements.txt /var/tmp/build/ From b5ce69071acac684d4ec6d0d63b3466cd8be74c4 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Fri, 2 Jun 2023 22:19:00 +0200 Subject: [PATCH 086/114] test: add variants for blank prefixes and base (#2421) Test that the base directive and blank prefixes are handled correctly. --- .../blank_and_base_prefix-asserts.json | 4 +++ test/data/variants/blank_and_base_prefix.nt | 6 +++++ test/data/variants/blank_and_base_prefix.ttl | 17 ++++++++++++ test/data/variants/blank_and_base_prefix.xml | 26 +++++++++++++++++++ 4 files changed, 53 insertions(+) create mode 100644 test/data/variants/blank_and_base_prefix-asserts.json create mode 100644 test/data/variants/blank_and_base_prefix.nt create mode 100644 test/data/variants/blank_and_base_prefix.ttl create mode 100644 test/data/variants/blank_and_base_prefix.xml diff --git a/test/data/variants/blank_and_base_prefix-asserts.json b/test/data/variants/blank_and_base_prefix-asserts.json new file mode 100644 index 000000000..83ae1a8af --- /dev/null +++ b/test/data/variants/blank_and_base_prefix-asserts.json @@ -0,0 +1,4 @@ +{ + "quad_count": 6, + "exact_match": true +} diff --git a/test/data/variants/blank_and_base_prefix.nt b/test/data/variants/blank_and_base_prefix.nt new file mode 100644 index 000000000..67ff3564d --- /dev/null +++ b/test/data/variants/blank_and_base_prefix.nt @@ -0,0 +1,6 @@ + . + "subject0"@en . + . + . + "subject0"@en . + . diff --git a/test/data/variants/blank_and_base_prefix.ttl b/test/data/variants/blank_and_base_prefix.ttl new file mode 100644 index 000000000..27a7ceb9c --- /dev/null +++ b/test/data/variants/blank_and_base_prefix.ttl @@ -0,0 +1,17 @@ +@base . +@prefix : . +@prefix rdfs: . +@prefix owl: . + +# A Turtle document using both a blank and a base prefix together with well +# known prefixes. + +:subject0 a owl:Class; + rdfs:label "subject0"@en; + :predicate00 :object00; + . + +<#subject1> a owl:Class; + rdfs:label "subject0"@en; + <#predicate10> <#object10>; + . diff --git a/test/data/variants/blank_and_base_prefix.xml b/test/data/variants/blank_and_base_prefix.xml new file mode 100644 index 000000000..afd91a76e --- /dev/null +++ b/test/data/variants/blank_and_base_prefix.xml @@ -0,0 +1,26 @@ + + + + + + + subject0 + + + + + + subject0 + + + + From d37cb5688835b70ed49d51b6b1db727c43306e07 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Jun 2023 19:22:26 +0200 Subject: [PATCH 087/114] build(deps-dev): bump typing-extensions from 4.6.2 to 4.6.3 (#2424) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.6.2 to 4.6.3. - [Changelog](https://github.com/python/typing_extensions/blob/main/CHANGELOG.md) - [Commits](https://github.com/python/typing_extensions/compare/4.6.2...4.6.3) --- updated-dependencies: - dependency-name: typing-extensions dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 310359890..fb8591f46 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1385,14 +1385,14 @@ files = [ [[package]] name = "typing-extensions" -version = "4.6.2" +version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, - {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, + {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, + {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, ] [[package]] From 01c7d602a94fca7b4a654a2cda9bbd69d61636c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Jun 2023 19:22:43 +0200 Subject: [PATCH 088/114] build(deps-dev): bump coverage from 7.2.6 to 7.2.7 (#2423) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.2.6 to 7.2.7. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.2.6...7.2.7) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 113 ++++++++++++++++++++++++++++------------------------ 1 file changed, 61 insertions(+), 52 deletions(-) diff --git a/poetry.lock b/poetry.lock index fb8591f46..eb4240821 100644 --- a/poetry.lock +++ b/poetry.lock @@ -216,63 +216,72 @@ files = [ [[package]] name = "coverage" -version = "7.2.6" +version = "7.2.7" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:496b86f1fc9c81a1cd53d8842ef712e950a4611bba0c42d33366a7b91ba969ec"}, - {file = "coverage-7.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbe6e8c0a9a7193ba10ee52977d4d5e7652957c1f56ccefed0701db8801a2a3b"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d06b721c2550c01a60e5d3093f417168658fb454e5dfd9a23570e9bffe39a1"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77a04b84d01f0e12c66f16e69e92616442dc675bbe51b90bfb074b1e5d1c7fbd"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35db06450272473eab4449e9c2ad9bc6a0a68dab8e81a0eae6b50d9c2838767e"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6727a0d929ff0028b1ed8b3e7f8701670b1d7032f219110b55476bb60c390bfb"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aac1d5fdc5378f6bac2c0c7ebe7635a6809f5b4376f6cf5d43243c1917a67087"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9e4a5eb1bbc3675ee57bc31f8eea4cd7fb0cbcbe4912cf1cb2bf3b754f4a80"}, - {file = "coverage-7.2.6-cp310-cp310-win32.whl", hash = "sha256:71f739f97f5f80627f1fee2331e63261355fd1e9a9cce0016394b6707ac3f4ec"}, - {file = "coverage-7.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:fde5c7a9d9864d3e07992f66767a9817f24324f354caa3d8129735a3dc74f126"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc7b667f8654376e9353dd93e55e12ce2a59fb6d8e29fce40de682273425e044"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:697f4742aa3f26c107ddcb2b1784a74fe40180014edbd9adaa574eac0529914c"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541280dde49ce74a4262c5e395b48ea1207e78454788887118c421cb4ffbfcac"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7f1a8328eeec34c54f1d5968a708b50fc38d31e62ca8b0560e84a968fbf9a9"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd58eb5a2371bf160590f4262109f66b6043b0b991930693134cb617bc0169"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae82c5f168d2a39a5d69a12a69d4dc23837a43cf2ca99be60dfe59996ea6b113"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f5440cdaf3099e7ab17a5a7065aed59aff8c8b079597b61c1f8be6f32fe60636"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6f03f87fea579d55e0b690d28f5042ec1368650466520fbc400e7aeaf09e995"}, - {file = "coverage-7.2.6-cp311-cp311-win32.whl", hash = "sha256:dc4d5187ef4d53e0d4c8eaf530233685667844c5fb0b855fea71ae659017854b"}, - {file = "coverage-7.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:c93d52c3dc7b9c65e39473704988602300e3cc1bad08b5ab5b03ca98bbbc68c1"}, - {file = "coverage-7.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42c692b55a647a832025a4c048007034fe77b162b566ad537ce65ad824b12a84"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7786b2fa7809bf835f830779ad285215a04da76293164bb6745796873f0942d"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25bad4196104761bc26b1dae9b57383826542ec689ff0042f7f4f4dd7a815cba"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2692306d3d4cb32d2cceed1e47cebd6b1d2565c993d6d2eda8e6e6adf53301e6"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:392154d09bd4473b9d11351ab5d63391f3d5d24d752f27b3be7498b0ee2b5226"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa079995432037b5e2ef5ddbb270bcd2ded9f52b8e191a5de11fe59a00ea30d8"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d712cefff15c712329113b01088ba71bbcef0f7ea58478ca0bbec63a824844cb"}, - {file = "coverage-7.2.6-cp37-cp37m-win32.whl", hash = "sha256:004948e296149644d208964300cb3d98affc5211e9e490e9979af4030b0d6473"}, - {file = "coverage-7.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:c1d7a31603c3483ac49c1726723b0934f88f2c011c660e6471e7bd735c2fa110"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3436927d1794fa6763b89b60c896f9e3bd53212001026ebc9080d23f0c2733c1"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44c9b9f1a245f3d0d202b1a8fa666a80b5ecbe4ad5d0859c0fb16a52d9763224"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3783a286d5a93a2921396d50ce45a909aa8f13eee964465012f110f0cbb611"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cff6980fe7100242170092bb40d2b1cdad79502cd532fd26b12a2b8a5f9aee0"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c534431153caffc7c495c3eddf7e6a6033e7f81d78385b4e41611b51e8870446"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3062fd5c62df988cea9f2972c593f77fed1182bfddc5a3b12b1e606cb7aba99e"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6284a2005e4f8061c58c814b1600ad0074ccb0289fe61ea709655c5969877b70"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:97729e6828643f168a2a3f07848e1b1b94a366b13a9f5aba5484c2215724edc8"}, - {file = "coverage-7.2.6-cp38-cp38-win32.whl", hash = "sha256:dc11b42fa61ff1e788dd095726a0aed6aad9c03d5c5984b54cb9e1e67b276aa5"}, - {file = "coverage-7.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:cbcc874f454ee51f158afd604a315f30c0e31dff1d5d5bf499fc529229d964dd"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3cacc6a665221108ecdf90517a8028d07a2783df3417d12dcfef1c517e67478"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:272ab31228a9df857ab5df5d67936d8861464dc89c5d3fab35132626e9369379"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a8723ccec4e564d4b9a79923246f7b9a8de4ec55fa03ec4ec804459dade3c4f"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5906f6a84b47f995cd1bf0aca1c72d591c55ee955f98074e93660d64dfc66eb9"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c139b7ab3f0b15f9aad0a3fedef5a1f8c0b2bdc291d88639ca2c97d3682416"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a5ffd45c6b93c23a8507e2f436983015c6457aa832496b6a095505ca2f63e8f1"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4f3c7c19581d471af0e9cb49d928172cd8492cd78a2b7a4e82345d33662929bb"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8c0e79820cdd67978e1120983786422d279e07a381dbf89d03bbb23ec670a6"}, - {file = "coverage-7.2.6-cp39-cp39-win32.whl", hash = "sha256:13cde6bb0e58fb67d09e2f373de3899d1d1e866c5a9ff05d93615f2f54fbd2bb"}, - {file = "coverage-7.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:6b9f64526286255735847aed0221b189486e0b9ed943446936e41b7e44b08783"}, - {file = "coverage-7.2.6-pp37.pp38.pp39-none-any.whl", hash = "sha256:6babcbf1e66e46052442f10833cfc4a0d3554d8276aa37af8531a83ed3c1a01d"}, - {file = "coverage-7.2.6.tar.gz", hash = "sha256:2025f913f2edb0272ef15d00b1f335ff8908c921c8eb2013536fcaf61f5a683d"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] From ace4acd09a2c5bf353cef24e6400bda8a904e88b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Jun 2023 19:23:00 +0200 Subject: [PATCH 089/114] build(deps-dev): bump poetry from 1.5.0 to 1.5.1 (#2422) Bumps [poetry](https://github.com/python-poetry/poetry) from 1.5.0 to 1.5.1. - [Release notes](https://github.com/python-poetry/poetry/releases) - [Changelog](https://github.com/python-poetry/poetry/blob/master/CHANGELOG.md) - [Commits](https://github.com/python-poetry/poetry/compare/1.5.0...1.5.1) --- updated-dependencies: - dependency-name: poetry dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- devtools/requirements-poetry.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/devtools/requirements-poetry.in b/devtools/requirements-poetry.in index 2c6833724..d179dac98 100644 --- a/devtools/requirements-poetry.in +++ b/devtools/requirements-poetry.in @@ -1,3 +1,3 @@ # Fixing this here as readthedocs can't use the compiled requirements-poetry.txt # due to conflicts. -poetry==1.5.0 +poetry==1.5.1 From 4b96e9d18cf42b64237d232fcb88cab5b316e877 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Thu, 8 Jun 2023 21:40:27 +0200 Subject: [PATCH 090/114] BREAKING CHANGE: Don't use `publicID` as the name for the default graph. (#2406) When parsing data into a `ConjunctiveGraph` or `Dataset`, the triples in the default graphs in the sources were loaded into a graph named `publicID`. This behaviour has been changed, and now the triples from the default graph in source RDF documents will be loaded into `ConjunctiveGraph.default_context` or `Dataset.default_context`. The `publicID` parameter to `ConjunctiveGraph.parse` and `Dataset.parse` constructors will now only be used as the base URI for relative URI resolution. - Fixes https://github.com/RDFLib/rdflib/issues/2404 - Fixes https://github.com/RDFLib/rdflib/issues/2375 - Fixes https://github.com/RDFLib/rdflib/issues/436 - Fixes https://github.com/RDFLib/rdflib/issues/1804 --- README.md | 2 +- docs/conf.py | 1 + docs/index.rst | 1 + docs/upgrade6to7.rst | 45 ++++++ pyproject.toml | 2 +- rdflib/graph.py | 71 +++++--- rdflib/plugins/sparql/sparql.py | 13 +- test/data/variants/more_quads-asserts.json | 2 +- test/data/variants/more_quads.jsonld | 107 ++++++------ test/data/variants/more_quads.nq | 8 +- test/data/variants/more_quads.trig | 5 + test/data/variants/simple_triple.n3 | 1 + test/data/variants/simple_triple.trig | 2 + .../test_conjunctive_graph.py | 2 +- .../test_dataset_default_graph.py | 152 ++++++++++++++++++ test/test_graph/test_variants.py | 13 +- test/test_issues/test_issue535.py | 2 +- test/test_store/test_store_berkeleydb.py | 37 ++++- test/test_trig.py | 9 -- test/test_w3c_spec/test_sparql10_w3c.py | 7 +- test/test_w3c_spec/test_sparql11_w3c.py | 7 +- test/test_w3c_spec/test_sparql_rdflib.py | 7 +- test/utils/__init__.py | 4 + test/utils/iri.py | 30 ++++ test/utils/sparql_checker.py | 55 ++----- test/utils/test/test_testutils.py | 30 ++-- 26 files changed, 448 insertions(+), 167 deletions(-) create mode 100644 docs/upgrade6to7.rst create mode 100644 test/data/variants/simple_triple.n3 create mode 100644 test/data/variants/simple_triple.trig create mode 100644 test/test_dataset/test_dataset_default_graph.py diff --git a/README.md b/README.md index 6157ef8ef..b2a92d16a 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ Help with maintenance of all of the RDFLib family of packages is always welcome ## Versions & Releases -* `6.4.0a0` current `main` branch +* `7.0.0a0` current `main` branch * `6.x.y` current release and support Python 3.7+ only. Many improvements over 5.0.0 * see [Releases](https://github.com/RDFLib/rdflib/releases) * `5.x.y` supports Python 2.7 and 3.4+ and is [mostly backwards compatible with 4.2.2](https://rdflib.readthedocs.io/en/stable/upgrade4to5.html). diff --git a/docs/conf.py b/docs/conf.py index 9836b9748..add49fdfe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -55,6 +55,7 @@ # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html autodoc_default_options = {"special-members": True} +autodoc_inherit_docstrings = True # https://github.com/tox-dev/sphinx-autodoc-typehints always_document_param_types = True diff --git a/docs/index.rst b/docs/index.rst index e3e2ca003..9d130501e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -66,6 +66,7 @@ If you are familiar with RDF and are looking for details on how RDFLib handles i namespaces_and_bindings persistence merging + upgrade6to7 upgrade5to6 upgrade4to5 security_considerations diff --git a/docs/upgrade6to7.rst b/docs/upgrade6to7.rst new file mode 100644 index 000000000..c8847adbf --- /dev/null +++ b/docs/upgrade6to7.rst @@ -0,0 +1,45 @@ +.. _upgrade4to5: Upgrading from RDFLib version 6 to 7 + +============================================ +Upgrading 6 to 7 +============================================ + +New behaviour for ``publicID`` in ``parse`` methods. +---------------------------------------------------- + +Before version 7, the ``publicID`` argument to the +:meth:`~rdflib.graph.ConjunctiveGraph.parse` and +:meth:`~rdflib.graph.Dataset.parse` methods was used as the name for the default +graph, and triples from the default graph in a source were loaded into the graph +named ``publicID``. + +In version 7, the ``publicID`` argument is only used as the base URI for relative +URI resolution as defined in `IETF RFC 3986 +`_. + +To accommodate this change, ensure that use of the ``publicID`` argument is +consistent with the new behaviour. + +If you want to load triples from a format that does not support named graphs +into a named graph, use the following code: + +.. code-block:: python + + from rdflib import ConjunctiveGraph + + cg = ConjunctiveGraph() + cg.get_context("example:graph_name").parse("http://example.com/source.ttl", format="turtle") + +If you want to move triples from the default graph into a named graph, use the +following code: + +.. code-block:: python + + from rdflib import ConjunctiveGraph + + cg = ConjunctiveGraph() + cg.parse("http://example.com/source.trig", format="trig") + destination_graph = cg.get_context("example:graph_name") + for triple in cg.default_context.triples((None, None, None)): + destination_graph.add(triple) + cg.default_context.remove(triple) diff --git a/pyproject.toml b/pyproject.toml index 5dee7655b..4d37e57c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "rdflib" -version = "6.4.0a0" +version = "7.0.0a0" description = """RDFLib is a Python library for working with RDF, \ a simple yet powerful language for representing information.""" authors = ["Daniel 'eikeon' Krech "] diff --git a/rdflib/graph.py b/rdflib/graph.py index 6e2e50aff..4d8645b2f 100644 --- a/rdflib/graph.py +++ b/rdflib/graph.py @@ -1400,26 +1400,26 @@ def parse( :doc:`Security Considerations ` documentation. - :Parameters: - - - ``source``: An InputSource, file-like object, or string. In the case - of a string the string is the location of the source. - - ``location``: A string indicating the relative or absolute URL of - the source. Graph's absolutize method is used if a relative location + :param source: An `InputSource`, file-like object, `Path` like object, + or string. In the case of a string the string is the location of the + source. + :param location: A string indicating the relative or absolute URL of the + source. `Graph`'s absolutize method is used if a relative location is specified. - - ``file``: A file-like object. - - ``data``: A string containing the data to be parsed. - - ``format``: Used if format can not be determined from source, e.g. + :param file: A file-like object. + :param data: A string containing the data to be parsed. + :param format: Used if format can not be determined from source, e.g. file extension or Media Type. Defaults to text/turtle. Format support can be extended with plugins, but "xml", "n3" (use for turtle), "nt" & "trix" are built in. - - ``publicID``: the logical URI to use as the document base. If None + :param publicID: the logical URI to use as the document base. If None specified the document location is used (at least in the case where - there is a document location). - - :Returns: - - - self, the graph instance. + there is a document location). This is used as the base URI when + resolving relative URIs in the source document, as defined in `IETF + RFC 3986 + `_, + given the source document does not define a base URI. + :return: ``self``, i.e. the :class:`~rdflib.graph.Graph` instance. Examples: @@ -2206,15 +2206,18 @@ def parse( **args: Any, ) -> "Graph": """ - Parse source adding the resulting triples to its own context - (sub graph of this graph). + Parse source adding the resulting triples to its own context (sub graph + of this graph). See :meth:`rdflib.graph.Graph.parse` for documentation on arguments. + If the source is in a format that does not support named graphs it's triples + will be added to the default graph (i.e. `Dataset.default_context`). + :Returns: - The graph into which the source was parsed. In the case of n3 - it returns the root context. + The graph into which the source was parsed. In the case of n3 it returns + the root context. .. caution:: @@ -2228,6 +2231,14 @@ def parse( For information on available security measures, see the RDFLib :doc:`Security Considerations ` documentation. + + *Changed in 7.0*: The ``publicID`` argument is no longer used as the + identifier (i.e. name) of the default graph as was the case before + version 7.0. In the case of sources that do not support named graphs, + the ``publicID`` parameter will also not be used as the name for the + graph that the data is loaded into, and instead the triples from sources + that do not support named graphs will be loaded into the default graph + (i.e. `ConjunctionGraph.default_context`). """ source = create_input_source( @@ -2246,12 +2257,8 @@ def parse( # create_input_source will ensure that publicId is not None, though it # would be good if this guarantee was made more explicit i.e. by type # hint on InputSource (TODO/FIXME). - g_id: str = publicID and publicID or source.getPublicId() - if not isinstance(g_id, Node): - g_id = URIRef(g_id) - context = Graph(store=self.store, identifier=g_id) - context.remove((None, None, None)) # hmm ? + context = self.default_context context.parse(source, publicID=publicID, format=format, **args) # TODO: FIXME: This should not return context, but self. return context @@ -2459,6 +2466,14 @@ def parse( **args: Any, ) -> "Graph": """ + Parse an RDF source adding the resulting triples to the Graph. + + See :meth:`rdflib.graph.Graph.parse` for documentation on arguments. + + The source is specified using one of source, location, file or data. + + If the source is in a format that does not support named graphs it's triples + will be added to the default graph (i.e. `Dataset.default_context`). .. caution:: @@ -2472,6 +2487,14 @@ def parse( For information on available security measures, see the RDFLib :doc:`Security Considerations ` documentation. + + *Changed in 7.0*: The ``publicID`` argument is no longer used as the + identifier (i.e. name) of the default graph as was the case before + version 7.0. In the case of sources that do not support named graphs, + the ``publicID`` parameter will also not be used as the name for the + graph that the data is loaded into, and instead the triples from sources + that do not support named graphs will be loaded into the default graph + (i.e. `ConjunctionGraph.default_context`). """ c = ConjunctiveGraph.parse( diff --git a/rdflib/plugins/sparql/sparql.py b/rdflib/plugins/sparql/sparql.py index 64230a645..7bfe28284 100644 --- a/rdflib/plugins/sparql/sparql.py +++ b/rdflib/plugins/sparql/sparql.py @@ -312,6 +312,17 @@ def dataset(self) -> ConjunctiveGraph: return self._dataset def load(self, source: URIRef, default: bool = False, **kwargs: Any) -> None: + """ + Load data from the source into the query context's. + + :param source: The source to load from. + :param default: If `True`, triples from the source will be added to the + default graph, otherwise it will be loaded into a graph with + ``source`` URI as its name. + :param kwargs: Keyword arguments to pass to + :meth:`rdflib.graph.Graph.parse`. + """ + def _load(graph, source): try: return graph.parse(source, format="turtle", **kwargs) @@ -342,7 +353,7 @@ def _load(graph, source): if default: _load(self.graph, source) else: - _load(self.dataset, source) + _load(self.dataset.get_context(source), source) def __getitem__(self, key: Union[str, Path]) -> Optional[Union[str, Path]]: # in SPARQL BNodes are just labels diff --git a/test/data/variants/more_quads-asserts.json b/test/data/variants/more_quads-asserts.json index 83ae1a8af..bd0224160 100644 --- a/test/data/variants/more_quads-asserts.json +++ b/test/data/variants/more_quads-asserts.json @@ -1,4 +1,4 @@ { - "quad_count": 6, + "quad_count": 8, "exact_match": true } diff --git a/test/data/variants/more_quads.jsonld b/test/data/variants/more_quads.jsonld index 08d6c9360..305497123 100644 --- a/test/data/variants/more_quads.jsonld +++ b/test/data/variants/more_quads.jsonld @@ -1,56 +1,65 @@ { - "@graph": [ - { - "@graph": [ + "@graph": [ { - "@id": "example:s20", - "example:p20": { - "@id": "example:o20" - } + "@id": "example:s10", + "example:p10": { + "@id": "example:o10" + } }, { - "@id": "example:s21", - "example:p21": { - "@id": "example:o21" - } + "@id": "example:s01", + "example:p01": { + "@id": "example:o01" + } + }, + { + "@id": "example:s00", + "example:p00": { + "@id": "example:o02" + } + }, + { + "@id": "example:s11", + "example:p11": { + "@id": "example:o11" + } + }, + { + "@id": "example:g3", + "@graph": [ + { + "@id": "example:s31", + "example:p31": { + "@id": "example:o31" + } + }, + { + "@id": "example:s30", + "example:p30": { + "@id": "example:o30" + } + } + ] + }, + { + "@id": "example:g2", + "@graph": [ + { + "@id": "example:s21", + "example:p21": { + "@id": "example:o21" + } + }, + { + "@id": "example:s20", + "example:p20": { + "@id": "example:o20" + } + } + ] } - ], - "@id": "example:g2" - }, - { - "@id": "example:s00", - "p00": "example:o02" - }, - { - "@id": "example:s01", - "p01": "example:o01" - }, - { - "@id": "example:s10", - "p10": "example:o10" - }, - { - "@id": "example:s11", - "p11": "example:o11" + ], + "@context": { + "example": "http://example.org/" } - ], - "@context": { - "p10": { - "@id": "http://example.org/p10", - "@type": "@id" - }, - "p01": { - "@id": "http://example.org/p01", - "@type": "@id" - }, - "p00": { - "@id": "http://example.org/p00", - "@type": "@id" - }, - "p11": { - "@id": "http://example.org/p11", - "@type": "@id" - }, - "example": "http://example.org/" - } } diff --git a/test/data/variants/more_quads.nq b/test/data/variants/more_quads.nq index 64b6ccf33..49ed7b49e 100644 --- a/test/data/variants/more_quads.nq +++ b/test/data/variants/more_quads.nq @@ -1,6 +1,8 @@ - . - . . + . + . . - . . + . + . + . diff --git a/test/data/variants/more_quads.trig b/test/data/variants/more_quads.trig index ddbf7020e..13d534d68 100644 --- a/test/data/variants/more_quads.trig +++ b/test/data/variants/more_quads.trig @@ -13,3 +13,8 @@ example:g2 { example:s20 example:p20 example:o20 . example:s21 example:p21 example:o21 . } + +example:g3 { + example:s30 example:p30 example:o30 . + example:s31 example:p31 example:o31 . +} diff --git a/test/data/variants/simple_triple.n3 b/test/data/variants/simple_triple.n3 new file mode 100644 index 000000000..0529c7857 --- /dev/null +++ b/test/data/variants/simple_triple.n3 @@ -0,0 +1 @@ + . diff --git a/test/data/variants/simple_triple.trig b/test/data/variants/simple_triple.trig new file mode 100644 index 000000000..e5ec98502 --- /dev/null +++ b/test/data/variants/simple_triple.trig @@ -0,0 +1,2 @@ + + . diff --git a/test/test_conjunctivegraph/test_conjunctive_graph.py b/test/test_conjunctivegraph/test_conjunctive_graph.py index 54393ac34..bbaedcdee 100644 --- a/test/test_conjunctivegraph/test_conjunctive_graph.py +++ b/test/test_conjunctivegraph/test_conjunctive_graph.py @@ -22,7 +22,7 @@ def test_bnode_publicid(): b = BNode() data = " ." print("Parsing %r into %r" % (data, b)) - g.parse(data=data, format="turtle", publicID=b) + g.get_context(b).parse(data=data, format="turtle", publicID=b) triples = list(g.get_context(b).triples((None, None, None))) if not triples: diff --git a/test/test_dataset/test_dataset_default_graph.py b/test/test_dataset/test_dataset_default_graph.py new file mode 100644 index 000000000..fb219770c --- /dev/null +++ b/test/test_dataset/test_dataset_default_graph.py @@ -0,0 +1,152 @@ +import itertools +import logging +from test.data import TEST_DATA_DIR +from typing import Iterable, Type, Union + +import pytest +from _pytest.mark.structures import ParameterSet + +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, ConjunctiveGraph, Dataset +from rdflib.term import BNode, URIRef + + +def make_load_default_and_named() -> Iterable[ParameterSet]: + for container_type, file_extension in itertools.product( + (Dataset, ConjunctiveGraph), ("trig", "nq", "jsonld") + ): + yield pytest.param( + container_type, + file_extension, + id=f"{container_type.__name__}-{file_extension}", + ) + + +EXTENSION_FORMATS = { + "trig": "trig", + "nq": "nquads", + "jsonld": "json-ld", + "nt": "ntriples", + "ttl": "turtle", + "hext": "hext", + "n3": "n3", +} + + +@pytest.mark.parametrize( + ["container_type", "file_extension"], make_load_default_and_named() +) +def test_load_default_and_named( + container_type: Union[Type[Dataset], Type[ConjunctiveGraph]], file_extension: str +) -> None: + logging.debug("container_type = %s", container_type) + container = container_type() + + if container_type is Dataset: + # An empty dataset has 1 default graph and no named graphs, so 1 graph in + # total. + assert 1 == sum(1 for _ in container.contexts()) + assert DATASET_DEFAULT_GRAPH_ID == next( + (context.identifier for context in container.contexts()), None + ) + assert container.default_context == next(container.contexts(), None) + else: + assert isinstance(container.default_context.identifier, BNode) + + # Load an RDF document with triples in three graphs into the container. + format = EXTENSION_FORMATS[file_extension] + source = TEST_DATA_DIR / "variants" / f"more_quads.{file_extension}" + container.parse(source=source, format=format) + + context_identifiers = set(context.identifier for context in container.contexts()) + + logging.info("context_identifiers = %s", context_identifiers) + logging.info( + "container.default_context.triples(...) = %s", + set(container.default_context.triples((None, None, None))), + ) + + all_contexts = set(container.contexts()) + logging.info( + "all_contexts = %s", set(context.identifier for context in all_contexts) + ) + + non_default_contexts = set(container.contexts()) - {container.default_context} + # There should now be two graphs in the container that are not the default graph. + logging.info( + "non_default_graphs = %s", + set(context.identifier for context in non_default_contexts), + ) + assert 2 == len(non_default_contexts) + + # The identifiers of the the non-default graphs should be the ones from the document. + assert { + URIRef("http://example.org/g2"), + URIRef("http://example.org/g3"), + } == set(context.identifier for context in non_default_contexts) + + # The default graph should have 4 triples. + assert 4 == len(container.default_context) + + +def make_load_default_only_cases() -> Iterable[ParameterSet]: + for container_type, file_extension in itertools.product( + (Dataset, ConjunctiveGraph), ("trig", "ttl", "nq", "nt", "jsonld", "hext", "n3") + ): + yield pytest.param( + container_type, + file_extension, + id=f"{container_type.__name__}-{file_extension}", + ) + + +@pytest.mark.parametrize( + ["container_type", "file_extension"], make_load_default_only_cases() +) +def test_load_default_only( + container_type: Union[Type[Dataset], Type[ConjunctiveGraph]], file_extension: str +) -> None: + logging.debug("container_type = %s", container_type) + container = container_type() + + if container_type is Dataset: + # An empty dataset has 1 default graph and no named graphs, so 1 graph in + # total. + assert 1 == sum(1 for _ in container.contexts()) + assert DATASET_DEFAULT_GRAPH_ID == next( + (context.identifier for context in container.contexts()), None + ) + assert container.default_context == next(container.contexts(), None) + else: + assert isinstance(container.default_context.identifier, BNode) + + # Load an RDF document with only triples in the default graph into the container. + format = EXTENSION_FORMATS[file_extension] + source = TEST_DATA_DIR / "variants" / f"simple_triple.{file_extension}" + container.parse(source=source, format=format) + + context_identifiers = set(context.identifier for context in container.contexts()) + + logging.info("context_identifiers = %s", context_identifiers) + logging.info( + "container.default_context.triples(...) = %s", + set(container.default_context.triples((None, None, None))), + ) + + all_contexts = set(container.contexts()) + logging.info( + "all_contexts = %s", set(context.identifier for context in all_contexts) + ) + + non_default_contexts = set(container.contexts()) - {container.default_context} + # There should now be no graphs in the container that are not the default graph. + logging.info( + "non_default_graphs = %s", + set(context.identifier for context in non_default_contexts), + ) + assert 0 == len(non_default_contexts) + + # The identifiers of the the non-default graphs should be an empty set. + assert set() == set(context.identifier for context in non_default_contexts) + + # The default graph should have 3 triples. + assert 1 == len(container.default_context) diff --git a/test/test_graph/test_variants.py b/test/test_graph/test_variants.py index 3cf931c44..09b2a156d 100644 --- a/test/test_graph/test_variants.py +++ b/test/test_graph/test_variants.py @@ -27,7 +27,7 @@ import rdflib.compare import rdflib.util -from rdflib.graph import ConjunctiveGraph +from rdflib.graph import Dataset from rdflib.namespace import XSD from rdflib.term import URIRef from rdflib.util import guess_format @@ -52,9 +52,7 @@ class GraphAsserts: exact_match: bool = False has_subject_iris: Optional[List[str]] = None - def check( - self, first_graph: Optional[ConjunctiveGraph], graph: ConjunctiveGraph - ) -> None: + def check(self, first_graph: Optional[Dataset], graph: Dataset) -> None: """ if `first_graph` is `None` then this is the first check before any other graphs have been processed. @@ -223,7 +221,7 @@ def test_variants(graph_variant: GraphVariants) -> None: logging.debug("graph_variant = %s", graph_variant) public_id = URIRef(f"example:{graph_variant.key}") assert len(graph_variant.variants) > 0 - first_graph: Optional[ConjunctiveGraph] = None + first_graph: Optional[Dataset] = None first_path: Optional[Path] = None logging.debug("graph_variant.asserts = %s", graph_variant.asserts) @@ -231,7 +229,7 @@ def test_variants(graph_variant: GraphVariants) -> None: logging.debug("variant_path = %s", variant_path) format = guess_format(variant_path.name, fmap=SUFFIX_FORMAT_MAP) assert format is not None, f"could not determine format for {variant_path.name}" - graph = ConjunctiveGraph() + graph = Dataset() graph.parse(variant_path, format=format, publicID=public_id) # Stripping data types as different parsers (e.g. hext) have different # opinions of when a bare string is of datatype XSD.string or not. @@ -243,8 +241,9 @@ def test_variants(graph_variant: GraphVariants) -> None: first_path = variant_path else: assert first_path is not None - GraphHelper.assert_isomorphic( + GraphHelper.assert_cgraph_isomorphic( first_graph, graph, + False, f"checking {variant_path.relative_to(VARIANTS_DIR)} against {first_path.relative_to(VARIANTS_DIR)}", ) diff --git a/test/test_issues/test_issue535.py b/test/test_issues/test_issue535.py index de38404d7..dbb7113ae 100644 --- a/test/test_issues/test_issue535.py +++ b/test/test_issues/test_issue535.py @@ -16,4 +16,4 @@ def test_nquads_default_graph(): assert len(ds) == 3, len(g) assert len(list(ds.contexts())) == 2, len(list(ds.contexts())) - assert len(ds.get_context(publicID)) == 2, len(ds.get_context(publicID)) + assert len(ds.default_context) == 2, len(ds.get_context(publicID)) diff --git a/test/test_store/test_store_berkeleydb.py b/test/test_store/test_store_berkeleydb.py index 0223fbad0..a0edecc54 100644 --- a/test/test_store/test_store_berkeleydb.py +++ b/test/test_store/test_store_berkeleydb.py @@ -1,18 +1,23 @@ +import logging import tempfile +from typing import Iterable, Optional, Tuple import pytest from rdflib import ConjunctiveGraph, URIRef from rdflib.plugins.stores.berkeleydb import has_bsddb +from rdflib.query import ResultRow from rdflib.store import VALID_STORE +logger = logging.getLogger(__name__) + pytestmark = pytest.mark.skipif( not has_bsddb, reason="skipping berkeleydb tests, modile not available" ) @pytest.fixture -def get_graph(): +def get_graph() -> Iterable[Tuple[str, ConjunctiveGraph]]: path = tempfile.NamedTemporaryFile().name g = ConjunctiveGraph("BerkeleyDB") rt = g.open(path, create=True) @@ -35,7 +40,7 @@ def get_graph(): g.destroy(path) -def test_write(get_graph): +def test_write(get_graph: Tuple[str, ConjunctiveGraph]): path, g = get_graph assert ( len(g) == 3 @@ -60,7 +65,7 @@ def test_write(get_graph): ), "There must still be four triples in the graph after the third data chunk parse" -def test_read(get_graph): +def test_read(get_graph: Tuple[str, ConjunctiveGraph]): path, g = get_graph sx = None for s in g.subjects( @@ -71,7 +76,7 @@ def test_read(get_graph): assert sx == URIRef("https://example.org/d") -def test_sparql_query(get_graph): +def test_sparql_query(get_graph: Tuple[str, ConjunctiveGraph]): path, g = get_graph q = """ PREFIX : @@ -83,11 +88,12 @@ def test_sparql_query(get_graph): c = 0 for row in g.query(q): + assert isinstance(row, ResultRow) c = int(row.c) assert c == 2, "SPARQL COUNT must return 2" -def test_sparql_insert(get_graph): +def test_sparql_insert(get_graph: Tuple[str, ConjunctiveGraph]): path, g = get_graph q = """ PREFIX : @@ -100,8 +106,15 @@ def test_sparql_insert(get_graph): assert len(g) == 4, "After extra triple insert, length must be 4" -def test_multigraph(get_graph): +def test_multigraph(get_graph: Tuple[str, ConjunctiveGraph]): path, g = get_graph + + if logger.isEnabledFor(logging.DEBUG): + logging.debug( + "graph before = \n%s", + g.serialize(format="trig"), + ) + q = """ PREFIX : @@ -116,6 +129,12 @@ def test_multigraph(get_graph): g.update(q) + if logger.isEnabledFor(logging.DEBUG): + logging.debug( + "graph after = \n%s", + g.serialize(format="trig"), + ) + q = """ SELECT (COUNT(?g) AS ?c) WHERE { @@ -129,11 +148,13 @@ def test_multigraph(get_graph): """ c = 0 for row in g.query(q): + assert isinstance(row, ResultRow) c = int(row.c) - assert c == 3, "SPARQL COUNT must return 3 (default, :m & :n)" + assert c == 2, "SPARQL COUNT must return 2 (default, :m & :n)" -def test_open_shut(get_graph): +def test_open_shut(get_graph: Tuple[str, ConjunctiveGraph]): + g: Optional[ConjunctiveGraph] path, g = get_graph assert len(g) == 3, "Initially we must have 3 triples from setUp" g.close() diff --git a/test/test_trig.py b/test/test_trig.py index 49572e445..de5c2108f 100644 --- a/test/test_trig.py +++ b/test/test_trig.py @@ -1,7 +1,5 @@ import re -import pytest - import rdflib TRIPLE = ( @@ -125,13 +123,6 @@ def test_graph_parsing(): assert len(list(g.contexts())) == 2 -@pytest.mark.xfail( - raises=AssertionError, - reason=""" - This is failing because conjuncitve graph assigns things in the default graph to - a graph with a bnode as name. On every parse iteration a new BNode is generated - resulting in the default graph content appearing multipile times in the output.""", -) def test_round_trips(): data = """ . diff --git a/test/test_w3c_spec/test_sparql10_w3c.py b/test/test_w3c_spec/test_sparql10_w3c.py index 73d06d89b..70df2d066 100644 --- a/test/test_w3c_spec/test_sparql10_w3c.py +++ b/test/test_w3c_spec/test_sparql10_w3c.py @@ -1,6 +1,7 @@ """ Runs the SPARQL 1.0 test suite from. """ +from contextlib import ExitStack from test.data import TEST_DATA_DIR from test.utils import ensure_suffix from test.utils.dawg_manifest import MarksDictType, params_from_sources @@ -118,5 +119,7 @@ def configure_rdflib() -> Generator[None, None, None]: report_prefix="rdflib_w3c_sparql10", ), ) -def test_entry_sparql10(monkeypatch: MonkeyPatch, manifest_entry: SPARQLEntry) -> None: - check_entry(monkeypatch, manifest_entry) +def test_entry_sparql10( + monkeypatch: MonkeyPatch, exit_stack: ExitStack, manifest_entry: SPARQLEntry +) -> None: + check_entry(monkeypatch, exit_stack, manifest_entry) diff --git a/test/test_w3c_spec/test_sparql11_w3c.py b/test/test_w3c_spec/test_sparql11_w3c.py index 6bfcb31f1..2afcf910a 100644 --- a/test/test_w3c_spec/test_sparql11_w3c.py +++ b/test/test_w3c_spec/test_sparql11_w3c.py @@ -1,6 +1,7 @@ """ Runs the SPARQL 1.1 test suite from. """ +from contextlib import ExitStack from test.data import TEST_DATA_DIR from test.utils import ensure_suffix from test.utils.dawg_manifest import MarksDictType, params_from_sources @@ -259,5 +260,7 @@ def configure_rdflib() -> Generator[None, None, None]: report_prefix="rdflib_w3c_sparql11", ), ) -def test_entry_sparql11(monkeypatch: MonkeyPatch, manifest_entry: SPARQLEntry) -> None: - check_entry(monkeypatch, manifest_entry) +def test_entry_sparql11( + monkeypatch: MonkeyPatch, exit_stack: ExitStack, manifest_entry: SPARQLEntry +) -> None: + check_entry(monkeypatch, exit_stack, manifest_entry) diff --git a/test/test_w3c_spec/test_sparql_rdflib.py b/test/test_w3c_spec/test_sparql_rdflib.py index 2a278461a..73809109a 100644 --- a/test/test_w3c_spec/test_sparql_rdflib.py +++ b/test/test_w3c_spec/test_sparql_rdflib.py @@ -1,6 +1,7 @@ """ Runs the RDFLib SPARQL test suite. """ +from contextlib import ExitStack from test.data import TEST_DATA_DIR from test.utils import ensure_suffix from test.utils.dawg_manifest import MarksDictType, params_from_sources @@ -61,5 +62,7 @@ def configure_rdflib() -> Generator[None, None, None]: report_prefix="rdflib_sparql", ), ) -def test_entry_rdflib(monkeypatch: MonkeyPatch, manifest_entry: SPARQLEntry) -> None: - check_entry(monkeypatch, manifest_entry) +def test_entry_rdflib( + monkeypatch: MonkeyPatch, exit_stack: ExitStack, manifest_entry: SPARQLEntry +) -> None: + check_entry(monkeypatch, exit_stack, manifest_entry) diff --git a/test/utils/__init__.py b/test/utils/__init__.py index a5c40e3f8..dc27251a3 100644 --- a/test/utils/__init__.py +++ b/test/utils/__init__.py @@ -349,6 +349,10 @@ def get_contexts(cgraph: ConjunctiveGraph) -> Dict[URIRef, Graph]: else: raise AssertionError("BNode labelled graphs not supported") elif isinstance(context.identifier, URIRef): + if len(context) == 0: + # If a context has no triples it does not exist in a + # meaningful way. + continue result[context.identifier] = context else: raise AssertionError( diff --git a/test/utils/iri.py b/test/utils/iri.py index 24f114b2c..ad7419d59 100644 --- a/test/utils/iri.py +++ b/test/utils/iri.py @@ -2,12 +2,17 @@ Various utilities for working with IRIs and URIs. """ +import email.utils +import http.client import logging +import mimetypes from dataclasses import dataclass from pathlib import Path, PurePath, PurePosixPath, PureWindowsPath from test.utils import ensure_suffix from typing import Callable, Optional, Set, Tuple, Type, TypeVar, Union from urllib.parse import quote, unquote, urljoin, urlparse, urlsplit, urlunsplit +from urllib.request import BaseHandler, OpenerDirector, Request +from urllib.response import addinfourl from nturl2path import url2pathname as nt_url2pathname @@ -148,3 +153,28 @@ def from_mappings( value = URIMapping.from_tuple(value) result.add(value) return cls(result) + + def opener(self) -> OpenerDirector: + opener = OpenerDirector() + + opener.add_handler(URIMapperHTTPHandler(self)) + + return opener + + +class URIMapperHTTPHandler(BaseHandler): + def __init__(self, mapper: URIMapper): + self.mapper = mapper + + def http_open(self, req: Request) -> addinfourl: + url = req.get_full_url() + local_uri, local_path = self.mapper.to_local(url) + stats = local_path.stat() + size = stats.st_size + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + mtype = mimetypes.guess_type(f"{local_path}")[0] + headers = email.message_from_string( + "Content-type: %s\nContent-length: %d\nLast-modified: %s\n" + % (mtype or "text/plain", size, modified) + ) + return addinfourl(local_path.open("rb"), headers, url, http.client.OK) diff --git a/test/utils/sparql_checker.py b/test/utils/sparql_checker.py index 836c040fd..477c9d3c8 100644 --- a/test/utils/sparql_checker.py +++ b/test/utils/sparql_checker.py @@ -6,24 +6,13 @@ from contextlib import ExitStack, contextmanager from dataclasses import dataclass, field from io import BytesIO, StringIO -from pathlib import Path from test.utils import BNodeHandling, GraphHelper from test.utils.dawg_manifest import Manifest, ManifestEntry from test.utils.iri import URIMapper from test.utils.namespace import MF, QT, UT from test.utils.result import ResultType, assert_bindings_collections_equal -from typing import ( - Any, - Callable, - Dict, - Generator, - Optional, - Set, - Tuple, - Type, - Union, - cast, -) +from test.utils.urlopen import context_urlopener +from typing import Dict, Generator, Optional, Set, Tuple, Type, Union, cast from urllib.parse import urljoin import pytest @@ -36,7 +25,6 @@ from rdflib.plugins.sparql.algebra import translateQuery, translateUpdate from rdflib.plugins.sparql.parser import parseQuery, parseUpdate from rdflib.plugins.sparql.results.rdfresults import RDFResultParser -from rdflib.plugins.sparql.sparql import QueryContext from rdflib.query import Result from rdflib.term import BNode, IdentifiedNode, Identifier, Literal, Node, URIRef from rdflib.util import guess_format @@ -131,7 +119,7 @@ def load_into(self, manifest: Manifest, dataset: Dataset) -> None: logging.debug( "public_id = %s - graph = %s\n%s", public_id, graph_path, graph_text ) - dataset.parse( + dataset.get_context(public_id).parse( # type error: Argument 1 to "guess_format" has incompatible type "Path"; expected "str" data=graph_text, publicID=public_id, @@ -351,33 +339,11 @@ def check_update(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: rdflib_sparql_module.SPARQL_LOAD_GRAPHS = True -def patched_query_context_load(uri_mapper: URIMapper) -> Callable[..., Any]: - def _patched_load( - self: QueryContext, source: URIRef, default: bool = False, **kwargs - ) -> None: - public_id = None - use_source: Union[URIRef, Path] = source - # type error: Argument 1 to "guess_format" has incompatible type "Union[URIRef, Path]"; expected "str" - format = guess_format(use_source) # type: ignore[arg-type] - if f"{source}".startswith(("https://", "http://")): - use_source = uri_mapper.to_local_path(source) - public_id = source - if default: - assert self.graph is not None - self.graph.parse(use_source, format=format, publicID=public_id) - else: - self.dataset.parse(use_source, format=format, publicID=public_id) - - return _patched_load - - -def check_query(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: +def check_query(exit_stack: ExitStack, entry: SPARQLEntry) -> None: assert entry.query is not None assert isinstance(entry.result, URIRef) - monkeypatch.setattr( - QueryContext, "load", patched_query_context_load(entry.uri_mapper) - ) + exit_stack.enter_context(context_urlopener(entry.uri_mapper.opener())) query_text = entry.query_text() dataset = entry.action_dataset() @@ -400,6 +366,11 @@ def check_query(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: assert expected_result.type == result.type if result.type == ResultType.SELECT: + if logger.isEnabledFor(logging.DEBUG): + logging.debug( + "expected_result.bindings = \n%s", + pprint.pformat(expected_result.bindings, indent=2, width=80), + ) if logger.isEnabledFor(logging.DEBUG): logging.debug( "entry.result_cardinality = %s, result.bindings = \n%s", @@ -441,7 +412,9 @@ def check_query(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: } -def check_entry(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: +def check_entry( + monkeypatch: MonkeyPatch, exit_stack: ExitStack, entry: SPARQLEntry +) -> None: if logger.isEnabledFor(logging.DEBUG): logging.debug( "entry = \n%s", @@ -452,5 +425,5 @@ def check_entry(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: if entry.type_info.query_type is QueryType.UPDATE: return check_update(monkeypatch, entry) elif entry.type_info.query_type is QueryType.QUERY: - return check_query(monkeypatch, entry) + return check_query(exit_stack, entry) raise ValueError(f"unsupported test {entry.type}") diff --git a/test/utils/test/test_testutils.py b/test/utils/test/test_testutils.py index a624c4456..44a0292ec 100644 --- a/test/utils/test/test_testutils.py +++ b/test/utils/test/test_testutils.py @@ -288,21 +288,21 @@ def test_assert_sets_equal(test_case: SetsEqualTestCase): rhs_graph: Graph = Graph().parse(data=test_case.rhs, format=test_case.rhs_format) public_id = URIRef("example:graph") - lhs_cgraph: ConjunctiveGraph = ConjunctiveGraph() - lhs_cgraph.parse( + lhs_dataset: Dataset = Dataset() + lhs_dataset.parse( data=test_case.lhs, format=test_case.lhs_format, publicID=public_id ) - rhs_cgraph: ConjunctiveGraph = ConjunctiveGraph() - rhs_cgraph.parse( + rhs_dataset: Dataset = Dataset() + rhs_dataset.parse( data=test_case.rhs, format=test_case.rhs_format, publicID=public_id ) - assert isinstance(lhs_cgraph, ConjunctiveGraph) - assert isinstance(rhs_cgraph, ConjunctiveGraph) + assert isinstance(lhs_dataset, Dataset) + assert isinstance(rhs_dataset, Dataset) graph: Graph - cgraph: ConjunctiveGraph - for graph, cgraph in ((lhs_graph, lhs_cgraph), (rhs_graph, rhs_cgraph)): + cgraph: Dataset + for graph, cgraph in ((lhs_graph, lhs_dataset), (rhs_graph, rhs_dataset)): GraphHelper.assert_sets_equals(graph, graph, BNodeHandling.COLLAPSE) GraphHelper.assert_sets_equals(cgraph, cgraph, BNodeHandling.COLLAPSE) GraphHelper.assert_triple_sets_equals(graph, graph, BNodeHandling.COLLAPSE) @@ -316,7 +316,7 @@ def test_assert_sets_equal(test_case: SetsEqualTestCase): ) with pytest.raises(AssertionError): GraphHelper.assert_sets_equals( - lhs_cgraph, rhs_cgraph, test_case.bnode_handling + lhs_dataset, rhs_dataset, test_case.bnode_handling ) with pytest.raises(AssertionError): GraphHelper.assert_triple_sets_equals( @@ -324,23 +324,25 @@ def test_assert_sets_equal(test_case: SetsEqualTestCase): ) with pytest.raises(AssertionError): GraphHelper.assert_triple_sets_equals( - lhs_cgraph, rhs_cgraph, test_case.bnode_handling + lhs_dataset, rhs_dataset, test_case.bnode_handling ) with pytest.raises(AssertionError): GraphHelper.assert_quad_sets_equals( - lhs_cgraph, rhs_cgraph, test_case.bnode_handling + lhs_dataset, rhs_dataset, test_case.bnode_handling ) else: GraphHelper.assert_sets_equals(lhs_graph, rhs_graph, test_case.bnode_handling) - GraphHelper.assert_sets_equals(lhs_cgraph, rhs_cgraph, test_case.bnode_handling) + GraphHelper.assert_sets_equals( + lhs_dataset, rhs_dataset, test_case.bnode_handling + ) GraphHelper.assert_triple_sets_equals( lhs_graph, rhs_graph, test_case.bnode_handling ) GraphHelper.assert_triple_sets_equals( - lhs_cgraph, rhs_cgraph, test_case.bnode_handling + lhs_dataset, rhs_dataset, test_case.bnode_handling ) GraphHelper.assert_quad_sets_equals( - lhs_cgraph, rhs_cgraph, test_case.bnode_handling + lhs_dataset, rhs_dataset, test_case.bnode_handling ) From a7e9d1fb06eb6f2f0640c97f666d624325f8fe6d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 19:42:28 +0200 Subject: [PATCH 091/114] build(deps): bump library/python in /docker/unstable (#2441) Bumps library/python from 3.11.3-slim to 3.11.4-slim. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/unstable/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/unstable/Dockerfile b/docker/unstable/Dockerfile index 87afdf96f..569bb9e17 100644 --- a/docker/unstable/Dockerfile +++ b/docker/unstable/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.3-slim@sha256:551c9529e77896518ac5693d7e98ee5e12051d625de450ac2a68da1eae15ec87 +FROM docker.io/library/python:3.11.4-slim@sha256:1966141ab594e175852a033da2a38f0cb042b5b92896c22073f8477f96f43b06 # This file is generated from docker:unstable in Taskfile.yml COPY var/requirements.txt /var/tmp/build/ From 900f0413df8aa9bc4bfc1ed02403a0462a998aa7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 19:42:41 +0200 Subject: [PATCH 092/114] build(deps): bump library/python in /docker/latest (#2440) Bumps library/python from 3.11.3-slim to 3.11.4-slim. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/latest/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/latest/Dockerfile b/docker/latest/Dockerfile index 1539914e9..aed8f07ad 100644 --- a/docker/latest/Dockerfile +++ b/docker/latest/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.3-slim@sha256:eaee5f73efa9ae962d2077756292bc4878c04fcbc13dc168bb00cc365f35647e +FROM docker.io/library/python:3.11.4-slim@sha256:1966141ab594e175852a033da2a38f0cb042b5b92896c22073f8477f96f43b06 COPY docker/latest/requirements.txt /var/tmp/build/ From ef25896bcfd84aa1bdd2c94d1ab04b9bc02fd6d2 Mon Sep 17 00:00:00 2001 From: Michael Cochez <3889771+miselico@users.noreply.github.com> Date: Tue, 13 Jun 2023 20:03:14 +0200 Subject: [PATCH 093/114] fix: dbpedia URL to use https instead of http (#2444) The URL for the service keyword had the http address for the dbpedia endpoint, which no longer works. Changing it to https as that works. --- docs/intro_to_sparql.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/intro_to_sparql.rst b/docs/intro_to_sparql.rst index 9de055413..f2cbf5a69 100644 --- a/docs/intro_to_sparql.rst +++ b/docs/intro_to_sparql.rst @@ -140,7 +140,7 @@ The ``SERVICE`` keyword of SPARQL 1.1 can send a query to a remote SPARQL endpoi """ SELECT ?s WHERE { - SERVICE { + SERVICE { ?s a ?o . } } From d21a451b88df9dcf3828c41cb7346d9dfe28048a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Jun 2023 21:15:40 +0200 Subject: [PATCH 094/114] build(deps-dev): bump pytest from 7.3.1 to 7.3.2 (#2439) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.3.1 to 7.3.2. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.3.1...7.3.2) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 78 ++++------------------------------------------------- 1 file changed, 5 insertions(+), 73 deletions(-) diff --git a/poetry.lock b/poetry.lock index eb4240821..340769ca5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -31,7 +29,6 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "berkeleydb" version = "18.1.6" description = "Python bindings for Oracle Berkeley DB" -category = "main" optional = true python-versions = "*" files = [ @@ -42,7 +39,6 @@ files = [ name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -93,7 +89,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -105,7 +100,6 @@ files = [ name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -190,7 +184,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -206,7 +199,6 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -218,7 +210,6 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -294,7 +285,6 @@ toml = ["tomli"] name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -306,7 +296,6 @@ files = [ name = "entrypoints" version = "0.4" description = "Discover and load entry points from installed packages." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -318,7 +307,6 @@ files = [ name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -333,7 +321,6 @@ test = ["pytest (>=6)"] name = "flake8" version = "4.0.1" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -350,7 +337,6 @@ pyflakes = ">=2.4.0,<2.5.0" name = "flakeheaven" version = "3.3.0" description = "FlakeHeaven is a [Flake8](https://gitlab.com/pycqa/flake8) wrapper to make it cool." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -373,7 +359,6 @@ docs = ["alabaster", "myst-parser (>=0.18.0,<0.19.0)", "pygments-github-lexers", name = "html5lib" version = "1.1" description = "HTML parser based on the WHATWG HTML specification" -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -395,7 +380,6 @@ lxml = ["lxml"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -407,7 +391,6 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -419,7 +402,6 @@ files = [ name = "importlib-metadata" version = "6.6.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -440,7 +422,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -452,7 +433,6 @@ files = [ name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = false python-versions = "*" files = [ @@ -467,7 +447,6 @@ six = "*" name = "isort" version = "5.11.5" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -485,7 +464,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -503,7 +481,6 @@ i18n = ["Babel (>=2.7)"] name = "lxml" version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -596,7 +573,6 @@ source = ["Cython (>=0.29.7)"] name = "lxml-stubs" version = "0.4.0" description = "Type annotations for the lxml package" -category = "dev" optional = false python-versions = "*" files = [ @@ -611,7 +587,6 @@ test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1. name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -637,7 +612,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -697,7 +671,6 @@ files = [ name = "mccabe" version = "0.6.1" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = "*" files = [ @@ -709,7 +682,6 @@ files = [ name = "mdit-py-plugins" version = "0.3.5" description = "Collection of plugins for markdown-it-py" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -729,7 +701,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -741,7 +712,6 @@ files = [ name = "mypy" version = "1.3.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -789,7 +759,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -801,7 +770,6 @@ files = [ name = "myst-parser" version = "1.0.0" description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -829,7 +797,6 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4, name = "networkx" version = "2.6.3" description = "Python package for creating and manipulating graphs and networks" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -848,7 +815,6 @@ test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] name = "packaging" version = "23.0" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -860,7 +826,6 @@ files = [ name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -872,7 +837,6 @@ files = [ name = "pbr" version = "5.11.1" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -884,7 +848,6 @@ files = [ name = "pep8-naming" version = "0.13.2" description = "Check PEP-8 naming conventions, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -899,7 +862,6 @@ flake8 = ">=3.9.1" name = "platformdirs" version = "3.1.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -918,7 +880,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytes name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -937,7 +898,6 @@ testing = ["pytest", "pytest-benchmark"] name = "pycodestyle" version = "2.8.0" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -949,7 +909,6 @@ files = [ name = "pyflakes" version = "2.4.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -961,7 +920,6 @@ files = [ name = "pygments" version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -976,7 +934,6 @@ plugins = ["importlib-metadata"] name = "pyparsing" version = "3.0.9" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -989,14 +946,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.3.1" +version = "7.3.2" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.3.2-py3-none-any.whl", hash = "sha256:cdcbd012c9312258922f8cd3f1b62a6580fdced17db6014896053d47cddf9295"}, + {file = "pytest-7.3.2.tar.gz", hash = "sha256:ee990a3cc55ba808b80795a79944756f315c67c12b56abd3ac993a7b8c17030b"}, ] [package.dependencies] @@ -1009,13 +965,12 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1034,7 +989,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytz" version = "2022.7.1" description = "World timezone definitions, modern and historical" -category = "dev" optional = false python-versions = "*" files = [ @@ -1046,7 +1000,6 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1096,7 +1049,6 @@ files = [ name = "requests" version = "2.28.2" description = "Python HTTP for Humans." -category = "dev" optional = false python-versions = ">=3.7, <4" files = [ @@ -1118,7 +1070,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "setuptools" version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1135,7 +1086,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1147,7 +1097,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -1159,7 +1108,6 @@ files = [ name = "sphinx" version = "5.3.0" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1195,7 +1143,6 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] name = "sphinx-autodoc-typehints" version = "1.23.0" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1215,7 +1162,6 @@ type-comment = ["typed-ast (>=1.5.4)"] name = "sphinxcontrib-apidoc" version = "0.3.0" description = "A Sphinx extension for running 'sphinx-apidoc' on each build" -category = "dev" optional = false python-versions = "*" files = [ @@ -1231,7 +1177,6 @@ Sphinx = ">=1.6.0" name = "sphinxcontrib-applehelp" version = "1.0.2" description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1247,7 +1192,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1263,7 +1207,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1279,7 +1222,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1294,7 +1236,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1310,7 +1251,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1326,7 +1266,6 @@ test = ["pytest"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1338,7 +1277,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1350,7 +1288,6 @@ files = [ name = "typed-ast" version = "1.5.4" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1384,7 +1321,6 @@ files = [ name = "types-setuptools" version = "67.8.0.0" description = "Typing stubs for setuptools" -category = "dev" optional = false python-versions = "*" files = [ @@ -1396,7 +1332,6 @@ files = [ name = "typing-extensions" version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1408,7 +1343,6 @@ files = [ name = "urllib3" version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -1425,7 +1359,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "main" optional = true python-versions = "*" files = [ @@ -1437,7 +1370,6 @@ files = [ name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.7" files = [ From 1e5f56b58f0803f13a576f2dffe31382ebba9ff4 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Tue, 13 Jun 2023 21:33:01 +0200 Subject: [PATCH 095/114] BREAKING CHANGE: drop support for python 3.7 (#2436) Python 3.7 will be end-of-life on the 27th of June 2023 and the next release of RDFLib will be a new major version. This changes the minimum supported version of Python to 3.8.1 as some of the dependencies we use are not too fond of python 3.8.0. This change also removes all accommodations for older python versions. --- .github/workflows/validate.yaml | 15 ++++------ README.md | 2 +- docs/conf.py | 5 +--- docs/developers.rst | 2 ++ docs/gettingstarted.rst | 2 +- docs/upgrade6to7.rst | 5 ++++ examples/secure_with_audit.py | 9 +----- poetry.lock | 50 ++----------------------------- pyproject.toml | 12 ++++---- rdflib/__init__.py | 7 +---- rdflib/_type_checking.py | 7 +---- rdflib/plugin.py | 7 +---- rdflib/plugins/sparql/__init__.py | 6 +--- test/conftest.py | 11 +++---- test/test_misc/test_security.py | 9 ++---- 15 files changed, 34 insertions(+), 115 deletions(-) diff --git a/.github/workflows/validate.yaml b/.github/workflows/validate.yaml index dc95a79b4..8044ef02f 100644 --- a/.github/workflows/validate.yaml +++ b/.github/workflows/validate.yaml @@ -25,30 +25,30 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11"] os: [ubuntu-latest, macos-latest, windows-latest] # This is used for injecting additional tests for a specific python # version and OS. suffix: [""] include: - - python-version: "3.7" + - python-version: "3.8" os: ubuntu-latest extensive-tests: true TOXENV_SUFFIX: "-docs" - - python-version: "3.7" + - python-version: "3.8" os: ubuntu-latest extensive-tests: true suffix: "-min" TOXENV_SUFFIX: "-min" - - python-version: "3.8" + - python-version: "3.9" os: ubuntu-latest TOX_EXTRA_COMMAND: "- isort --check-only --diff ." TOXENV_SUFFIX: "-docs" - - python-version: "3.9" + - python-version: "3.10" os: ubuntu-latest TOX_EXTRA_COMMAND: "- black --check --diff ./rdflib" TOXENV_SUFFIX: "-lxml" - - python-version: "3.10" + - python-version: "3.11" os: ubuntu-latest TOX_EXTRA_COMMAND: "flake8 --exit-zero rdflib" TOXENV_SUFFIX: "-docs" @@ -56,9 +56,6 @@ jobs: extensive-tests: true TOX_TEST_HARNESS: "firejail --net=none --" TOX_PYTEST_EXTRA_ARGS: "-m 'not webtest'" - - python-version: "3.11" - os: ubuntu-latest - TOXENV_SUFFIX: "-docs" steps: - uses: actions/checkout@v3 - name: Cache XDG_CACHE_HOME diff --git a/README.md b/README.md index b2a92d16a..c654a2700 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ Help with maintenance of all of the RDFLib family of packages is always welcome ## Versions & Releases -* `7.0.0a0` current `main` branch +* `7.0.0a0` current `main` branch and supports Python 3.8.1+ only. * `6.x.y` current release and support Python 3.7+ only. Many improvements over 5.0.0 * see [Releases](https://github.com/RDFLib/rdflib/releases) * `5.x.y` supports Python 2.7 and 3.4+ and is [mostly backwards compatible with 4.2.2](https://rdflib.readthedocs.io/en/stable/upgrade4to5.html). diff --git a/docs/conf.py b/docs/conf.py index add49fdfe..93d78d8a0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -256,7 +256,7 @@ def find_version(filename): # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("https://docs.python.org/3.7", None), + "python": ("https://docs.python.org/3.8", None), } html_experimental_html5_writer = True @@ -331,9 +331,6 @@ def find_version(filename): ] ) -if sys.version_info < (3, 8): - nitpick_ignore.extend([("py:class", "importlib_metadata.EntryPoint")]) - def autodoc_skip_member_handler( app: sphinx.application.Sphinx, diff --git a/docs/developers.rst b/docs/developers.rst index 759e017ea..d6cc67e2e 100644 --- a/docs/developers.rst +++ b/docs/developers.rst @@ -437,6 +437,8 @@ flag them as expecting to fail. Compatibility ------------- +RDFlib 7.0.0 release and later only support Python 3.8.1 and newer. + RDFlib 6.0.0 release and later only support Python 3.7 and newer. RDFLib 5.0.0 maintained compatibility with Python versions 2.7, 3.4, 3.5, 3.6, 3.7. diff --git a/docs/gettingstarted.rst b/docs/gettingstarted.rst index ec6573766..44307ae8a 100644 --- a/docs/gettingstarted.rst +++ b/docs/gettingstarted.rst @@ -51,7 +51,7 @@ methods that search triples and return them in arbitrary order. RDFLib graphs also redefine certain built-in Python methods in order to behave in a predictable way. They do this by `emulating container types -`_ and +`_ and are best thought of as a set of 3-item tuples ("triples", in RDF-speak): .. code-block:: text diff --git a/docs/upgrade6to7.rst b/docs/upgrade6to7.rst index c8847adbf..d58d25735 100644 --- a/docs/upgrade6to7.rst +++ b/docs/upgrade6to7.rst @@ -4,6 +4,11 @@ Upgrading 6 to 7 ============================================ +Python version +---------------------------------------------------- + +RDFLib 7 requires Python 3.8.1 or later. + New behaviour for ``publicID`` in ``parse`` methods. ---------------------------------------------------- diff --git a/examples/secure_with_audit.py b/examples/secure_with_audit.py index 434be5a49..f49ccd164 100644 --- a/examples/secure_with_audit.py +++ b/examples/secure_with_audit.py @@ -61,15 +61,8 @@ def main() -> None: ), ) - if sys.version_info < (3, 8): - logging.warn("This example requires Python 3.8 or higher") - return None - # Install the audit hook - # - # note on type error: This is needed because we are running mypy with python - # 3.7 mode, so mypy thinks the previous condition will always be true. - sys.addaudithook(audit_hook) # type: ignore[unreachable] + sys.addaudithook(audit_hook) graph = Graph() diff --git a/poetry.lock b/poetry.lock index 340769ca5..917b750df 100644 --- a/poetry.lock +++ b/poetry.lock @@ -76,7 +76,6 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -193,7 +192,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "colorama" @@ -410,7 +408,6 @@ files = [ ] [package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] @@ -596,7 +593,6 @@ files = [ [package.dependencies] mdurl = ">=0.1,<1.0" -typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark"] @@ -746,7 +742,6 @@ files = [ [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} typing-extensions = ">=3.10" [package.extras] @@ -784,7 +779,6 @@ markdown-it-py = ">=1.0.0,<3.0.0" mdit-py-plugins = ">=0.3.4,<0.4.0" pyyaml = "*" sphinx = ">=5,<7" -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] code-style = ["pre-commit (>=3.0,<4.0)"] @@ -869,9 +863,6 @@ files = [ {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} - [package.extras] docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] @@ -887,9 +878,6 @@ files = [ {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] @@ -958,7 +946,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" @@ -1284,39 +1271,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "typed-ast" -version = "1.5.4" -description = "a fork of Python 2 and 3 ast modules with type comment support" -optional = false -python-versions = ">=3.6" -files = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, -] - [[package]] name = "types-setuptools" version = "67.8.0.0" @@ -1389,5 +1343,5 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" -python-versions = "^3.7" -content-hash = "36084be60ae6a80f19b7aab7044c7c7d6fb11a304dae08992060f46f1c457213" +python-versions = "^3.8.1" +content-hash = "1c56b77bc9381ed73f90bdc11243e8fae40be3fe06aec26c74eef94937698017" diff --git a/pyproject.toml b/pyproject.toml index 4d37e57c9..23c51b12c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,6 @@ license = "BSD-3-Clause" classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", @@ -39,10 +38,9 @@ rdfs2dot = 'rdflib.tools.rdfs2dot:main' rdfgraphisomorphism = 'rdflib.tools.graphisomorphism:main' [tool.poetry.dependencies] -python = "^3.7" +python = "^3.8.1" isodate = "^0.6.0" pyparsing = ">=2.1.0,<4" -importlib-metadata = {version = ">=4,<7", python = ">=3.7,<3.8"} berkeleydb = {version = "^18.1.0", optional = true} networkx = {version = "^2.0.0", optional = true} html5lib = {version = "^1.0", optional = true} @@ -69,9 +67,9 @@ sphinx-autodoc-typehints = "^1.17.1" typing-extensions = "^4.5.0" [tool.poetry.group.flake8.dependencies] -flake8 = {version = ">=4.0.1", python = ">=3.8"} # flakeheaven is incompatible with flake8 >=5.0 (https://github.com/flakeheaven/flakeheaven/issues/132) -flakeheaven = {version = "^3.2.1", python = ">=3.8"} -pep8-naming = {version = "^0.13.2", python = ">=3.8"} +flake8 = {version = ">=4.0.1"} # flakeheaven is incompatible with flake8 >=5.0 (https://github.com/flakeheaven/flakeheaven/issues/132) +flakeheaven = {version = "^3.2.1"} +pep8-naming = {version = "^0.13.2"} [tool.poetry.extras] berkeleydb = ["berkeleydb"] @@ -204,7 +202,7 @@ skip = [ [tool.mypy] files = ['rdflib', 'test', 'devtools', 'examples'] -python_version = "3.7" +python_version = "3.8" warn_unused_configs = true ignore_missing_imports = true disallow_subclassing_any = false diff --git a/rdflib/__init__.py b/rdflib/__init__.py index dc32be8ee..4677e0a95 100644 --- a/rdflib/__init__.py +++ b/rdflib/__init__.py @@ -44,12 +44,7 @@ """ import logging import sys - -if sys.version_info < (3, 8): - # importlib is only available in Python 3.8+; for 3.7 we must do this: - import importlib_metadata as metadata -else: - from importlib import metadata +from importlib import metadata _DISTRIBUTION_METADATA = metadata.metadata("rdflib") diff --git a/rdflib/_type_checking.py b/rdflib/_type_checking.py index ac6e2b8b8..c9e0202ea 100644 --- a/rdflib/_type_checking.py +++ b/rdflib/_type_checking.py @@ -14,18 +14,13 @@ and this module is not part the the RDFLib public API. """ -import sys - __all__ = [ "_NamespaceSetString", "_MulPathMod", ] -if sys.version_info >= (3, 8): - from typing import Literal as PyLiteral -else: - from typing_extensions import Literal as PyLiteral +from typing import Literal as PyLiteral _NamespaceSetString = PyLiteral["core", "rdflib", "none"] _MulPathMod = PyLiteral["*", "+", "?"] # noqa: F722 diff --git a/rdflib/plugin.py b/rdflib/plugin.py index 9d2f8540b..676ffbaa8 100644 --- a/rdflib/plugin.py +++ b/rdflib/plugin.py @@ -25,7 +25,7 @@ """ -import sys +from importlib.metadata import EntryPoint, entry_points from typing import ( TYPE_CHECKING, Any, @@ -52,11 +52,6 @@ from rdflib.serializer import Serializer from rdflib.store import Store -if sys.version_info < (3, 8): - from importlib_metadata import EntryPoint, entry_points -else: - from importlib.metadata import EntryPoint, entry_points - __all__ = [ "register", "get", diff --git a/rdflib/plugins/sparql/__init__.py b/rdflib/plugins/sparql/__init__.py index 011b7b591..a11a6e004 100644 --- a/rdflib/plugins/sparql/__init__.py +++ b/rdflib/plugins/sparql/__init__.py @@ -4,7 +4,7 @@ .. versionadded:: 4.0 """ -import sys +from importlib.metadata import entry_points from typing import TYPE_CHECKING SPARQL_LOAD_GRAPHS = True @@ -40,10 +40,6 @@ assert operators assert parserutils -if sys.version_info < (3, 8): - from importlib_metadata import entry_points -else: - from importlib.metadata import entry_points all_entry_points = entry_points() if hasattr(all_entry_points, "select"): diff --git a/test/conftest.py b/test/conftest.py index 38f4dabc1..01153f9fa 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -85,13 +85,10 @@ def function_httpmocks( @pytest.fixture(scope="session", autouse=True) -def audit_hook_dispatcher() -> Generator[Optional[AuditHookDispatcher], None, None]: - if sys.version_info >= (3, 8): - dispatcher = AuditHookDispatcher() - sys.addaudithook(dispatcher.audit) - yield dispatcher - else: - yield None +def audit_hook_dispatcher() -> Generator[AuditHookDispatcher, None, None]: + dispatcher = AuditHookDispatcher() + sys.addaudithook(dispatcher.audit) + yield dispatcher @pytest.fixture(scope="function") diff --git a/test/test_misc/test_security.py b/test/test_misc/test_security.py index b4c8fc229..652de6e73 100644 --- a/test/test_misc/test_security.py +++ b/test/test_misc/test_security.py @@ -8,7 +8,7 @@ from test.utils.httpfileserver import HTTPFileServer, ProtoFileResource from test.utils.urlopen import context_urlopener from textwrap import dedent -from typing import Any, Iterable, Optional, Tuple +from typing import Any, Iterable, Tuple from urllib.request import HTTPHandler, OpenerDirector, Request import pytest @@ -74,17 +74,12 @@ def generate_make_block_file_cases() -> Iterable[ParameterSet]: @pytest.mark.parametrize(["defence", "uri_kind"], generate_make_block_file_cases()) def test_block_file( tmp_path: Path, - audit_hook_dispatcher: Optional[AuditHookDispatcher], + audit_hook_dispatcher: AuditHookDispatcher, http_file_server: HTTPFileServer, exit_stack: ExitStack, defence: Defence, uri_kind: URIKind, ) -> None: - if audit_hook_dispatcher is None: - pytest.skip( - "audit hook dispatcher not available, likely because of Python version" - ) - context_file = tmp_path / "context.jsonld" context_file.write_text(dedent(JSONLD_CONTEXT)) context_file_served = http_file_server.add_file_with_caching( From 46ff6cff00f4b4903615113af9f63c2657ac2495 Mon Sep 17 00:00:00 2001 From: WhiteGobo Date: Fri, 16 Jun 2023 00:17:49 +0200 Subject: [PATCH 096/114] Fix: SPARQL count with optionals (#2448) Change SPARQL count aggregate to ignore optional that are unbound instead of raising an exception when they are encountered. Co-authored-by: WhiteGobo --- rdflib/plugins/sparql/aggregates.py | 6 ++++- test/test_sparql/test_agg_distinct.py | 39 ++++++++++++++++++++++++++- 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/rdflib/plugins/sparql/aggregates.py b/rdflib/plugins/sparql/aggregates.py index 67e143a7e..d4a7d6592 100644 --- a/rdflib/plugins/sparql/aggregates.py +++ b/rdflib/plugins/sparql/aggregates.py @@ -89,7 +89,11 @@ def eval_full_row(self, row: FrozenBindings) -> FrozenBindings: return row def use_row(self, row: FrozenBindings) -> bool: - return self.eval_row(row) not in self.seen + try: + return self.eval_row(row) not in self.seen + except NotBoundError: + # happens when counting zero optional nodes. See issue #2229 + return False @overload diff --git a/test/test_sparql/test_agg_distinct.py b/test/test_sparql/test_agg_distinct.py index 7c11bf6ec..9b5113260 100644 --- a/test/test_sparql/test_agg_distinct.py +++ b/test/test_sparql/test_agg_distinct.py @@ -1,4 +1,5 @@ -from rdflib import Graph +from rdflib import Graph, URIRef +from rdflib.term import Literal query_tpl = """ SELECT ?x (MIN(?y_) as ?y) (%s(DISTINCT ?z_) as ?z) { @@ -116,3 +117,39 @@ def test_count_distinct(): """ ) assert list(results)[0][0].toPython() == 2 + + +def test_count_optional_values(): + """Problematic query because ?inst may be not bound. + So when counting over not bound variables it throws a NotBoundError. + """ + g = Graph() + g.bind("ex", "http://example.com/") + g.parse( + format="ttl", + data="""@prefix ex: . + ex:1 a ex:a; + ex:d ex:b. + ex:2 a ex:a; + ex:d ex:c; + ex:d ex:b. + ex:3 a ex:a. + """, + ) + + query = """ + SELECT DISTINCT ?x (COUNT(DISTINCT ?inst) as ?cnt) + WHERE { + ?x a ex:a + OPTIONAL { + VALUES ?inst {ex:b ex:c}. + ?x ex:d ?inst. + } + } GROUP BY ?x + """ + results = dict(g.query(query)) + assert results == { + URIRef("http://example.com/1"): Literal(1), + URIRef("http://example.com/2"): Literal(2), + URIRef("http://example.com/3"): Literal(0), + } From 4e42d10351ab9bf3702247106fec86f2b4f3b575 Mon Sep 17 00:00:00 2001 From: Kibubu <85624897+Kibubu@users.noreply.github.com> Date: Fri, 16 Jun 2023 00:45:06 +0200 Subject: [PATCH 097/114] docs: fix comment that doesn't describe behavior (#2443) Comment refers to a person that knows bob and the code would return a name, but this would only work if the triple `person foaf:name bob .` is part of the dataset As this is a very uncommon way to model a `foaf:knows` the code was adjusted to match the description. --- docs/intro_to_graphs.rst | 2 +- pyproject.toml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/intro_to_graphs.rst b/docs/intro_to_graphs.rst index 4227634a5..c061a3c7b 100644 --- a/docs/intro_to_graphs.rst +++ b/docs/intro_to_graphs.rst @@ -105,7 +105,7 @@ node, not a generator: # get any name of bob name = g.value(bob, FOAF.name) # get the one person that knows bob and raise an exception if more are found - mbox = g.value(predicate = FOAF.name, object=bob, any=False) + person = g.value(predicate=FOAF.knows, object=bob, any=False) :class:`~rdflib.graph.Graph` methods for accessing triples diff --git a/pyproject.toml b/pyproject.toml index 23c51b12c..0d7d4c8c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -115,6 +115,9 @@ pep8-naming = ["-N815"] pep8-naming = ["-N802"] [tool.flakeheaven.exceptions."rdflib/plugins/parsers/trix.py"] pep8-naming = ["-N802"] +[tool.flakeheaven.exceptions."docs/*.rst"] +pyflakes = ["-F821"] + [tool.black] required-version = "23.3.0" From 12beed4902a18506d8628e620abe6b4abe47f6c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 21:17:23 +0200 Subject: [PATCH 098/114] build(deps): bump library/python in /docker/unstable (#2453) Bumps library/python from `1966141` to `53a67c0`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/unstable/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/unstable/Dockerfile b/docker/unstable/Dockerfile index 569bb9e17..d76f15cd6 100644 --- a/docker/unstable/Dockerfile +++ b/docker/unstable/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.4-slim@sha256:1966141ab594e175852a033da2a38f0cb042b5b92896c22073f8477f96f43b06 +FROM docker.io/library/python:3.11.4-slim@sha256:53a67c012da3b807905559fa59fac48a3a68600d73c5da10c2f0d8adc96dbd01 # This file is generated from docker:unstable in Taskfile.yml COPY var/requirements.txt /var/tmp/build/ From 186f3791e873b17074a551db42e35b962f6a3af8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 21:17:39 +0200 Subject: [PATCH 099/114] build(deps): bump library/python in /docker/latest (#2454) Bumps library/python from `1966141` to `53a67c0`. --- updated-dependencies: - dependency-name: library/python dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/latest/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/latest/Dockerfile b/docker/latest/Dockerfile index aed8f07ad..29e44d715 100644 --- a/docker/latest/Dockerfile +++ b/docker/latest/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.4-slim@sha256:1966141ab594e175852a033da2a38f0cb042b5b92896c22073f8477f96f43b06 +FROM docker.io/library/python:3.11.4-slim@sha256:53a67c012da3b807905559fa59fac48a3a68600d73c5da10c2f0d8adc96dbd01 COPY docker/latest/requirements.txt /var/tmp/build/ From 122d8b308c00fb65fb48e7bc9ee1d2f6db6c013f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 21:18:15 +0200 Subject: [PATCH 100/114] build(deps): bump pyparsing from 3.0.9 to 3.1.0 (#2455) Bumps [pyparsing](https://github.com/pyparsing/pyparsing) from 3.0.9 to 3.1.0. - [Release notes](https://github.com/pyparsing/pyparsing/releases) - [Changelog](https://github.com/pyparsing/pyparsing/blob/master/CHANGES) - [Commits](https://github.com/pyparsing/pyparsing/compare/pyparsing_3.0.9...3.1.0) --- updated-dependencies: - dependency-name: pyparsing dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 917b750df..eb9a8218c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -920,13 +920,13 @@ plugins = ["importlib-metadata"] [[package]] name = "pyparsing" -version = "3.0.9" +version = "3.1.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, + {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, + {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, ] [package.extras] From f278b86cba41d89fe3b448fdc7954b10b24226ad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Jun 2023 21:18:49 +0200 Subject: [PATCH 101/114] build(deps-dev): bump isort from 5.11.5 to 5.12.0 (#2457) Bumps [isort](https://github.com/pycqa/isort) from 5.11.5 to 5.12.0. - [Release notes](https://github.com/pycqa/isort/releases) - [Changelog](https://github.com/PyCQA/isort/blob/main/CHANGELOG.md) - [Commits](https://github.com/pycqa/isort/compare/5.11.5...5.12.0) --- updated-dependencies: - dependency-name: isort dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index eb9a8218c..be0da07d8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -442,17 +442,17 @@ six = "*" [[package]] name = "isort" -version = "5.11.5" +version = "5.12.0" description = "A Python utility / library to sort Python imports." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"}, - {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"}, + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, ] [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] +colors = ["colorama (>=0.4.3)"] pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] From 10f9ebe162d2dccda637b98297fec9ead8c970b7 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Tue, 20 Jun 2023 21:39:59 +0200 Subject: [PATCH 102/114] fix: typing errors from dmypy (#2451) Fix various typing errors that are reported when running with `dmypy`, the mypy daemon. Also add a task for running `dmypy` to the Taskfile that can be selected as the default mypy variant by setting the `MYPY_VARIANT` environment variable to `dmypy`. --- Taskfile.yml | 10 +++++++++- docs/conf.py | 1 + rdflib/events.py | 9 +++++++-- rdflib/plugins/parsers/notation3.py | 2 +- rdflib/plugins/sparql/parserutils.py | 4 ++-- rdflib/plugins/stores/memory.py | 4 +++- rdflib/store.py | 2 +- rdflib/tools/csv2rdf.py | 14 ++++++++------ test/jsonld/test_compaction.py | 8 +++++--- test/test_graph/test_graph_context.py | 5 ++++- test/test_sparql/test_prefixed_name.py | 5 +++-- test/utils/sparql_checker.py | 10 ++++++---- test/utils/test/test_result.py | 5 +++-- 13 files changed, 53 insertions(+), 26 deletions(-) diff --git a/Taskfile.yml b/Taskfile.yml index b2febc570..febb1c202 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -35,6 +35,7 @@ vars: PIP_COMPILE: pip-compile DOCKER: docker OCI_REFERENCE: ghcr.io/rdflib/rdflib + MYPY_VARIANT: '{{ env "MYPY_VARIANT" | default "mypy" }}' tasks: install:system-deps: desc: Install system dependencies @@ -130,10 +131,17 @@ tasks: cmds: - '{{.VENV_PYTHON}} -m isort {{if (mustFromJson (.CHECK | default "false"))}}--check --diff {{end}}{{.CLI_ARGS | default "."}}' mypy: + desc: Run mypy + cmds: + - task: "mypy:{{ .MYPY_VARIANT }}" + mypy:mypy: desc: Run mypy cmds: - "{{.VENV_PYTHON}} -m mypy --show-error-context --show-error-codes {{.CLI_ARGS}}" - + mypy:dmypy: + desc: Run dmypy + cmds: + - "{{.RUN_PREFIX}} dmypy run {{.CLI_ARGS}}" lint:fix: desc: Fix auto-fixable linting errors cmds: diff --git a/docs/conf.py b/docs/conf.py index 93d78d8a0..05a702883 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -302,6 +302,7 @@ def find_version(filename): ("py:class", "ParseFailAction"), ("py:class", "pyparsing.core.TokenConverter"), ("py:class", "pyparsing.results.ParseResults"), + ("py:class", "pyparsing.core.ParserElement"), # These are related to BerkeleyDB ("py:class", "db.DBEnv"), ] diff --git a/rdflib/events.py b/rdflib/events.py index d0290d5cd..84c9f07a0 100644 --- a/rdflib/events.py +++ b/rdflib/events.py @@ -1,3 +1,5 @@ +from __future__ import annotations + __doc__ = """ Dirt Simple Events @@ -23,6 +25,9 @@ """ + +from typing import Any, Dict, Optional + __all__ = ["Event", "Dispatcher"] @@ -53,9 +58,9 @@ class Dispatcher: subscribers. """ - _dispatch_map = None + _dispatch_map: Optional[Dict[Any, Any]] = None - def set_map(self, amap): + def set_map(self, amap: Dict[Any, Any]): self._dispatch_map = amap return self diff --git a/rdflib/plugins/parsers/notation3.py b/rdflib/plugins/parsers/notation3.py index 2a64be24f..290e7d04b 100755 --- a/rdflib/plugins/parsers/notation3.py +++ b/rdflib/plugins/parsers/notation3.py @@ -276,7 +276,7 @@ def _fixslash(s: str) -> str: N3_Empty = (SYMBOL, List_NS + "Empty") -runNamespaceValue = None +runNamespaceValue: Optional[str] = None def runNamespace() -> str: diff --git a/rdflib/plugins/sparql/parserutils.py b/rdflib/plugins/sparql/parserutils.py index b625f3646..2c5bc38bd 100644 --- a/rdflib/plugins/sparql/parserutils.py +++ b/rdflib/plugins/sparql/parserutils.py @@ -14,7 +14,7 @@ Union, ) -from pyparsing import ParseResults, TokenConverter, originalTextFor +from pyparsing import ParserElement, ParseResults, TokenConverter, originalTextFor from rdflib.term import BNode, Identifier, Variable @@ -241,7 +241,7 @@ class Comp(TokenConverter): Returns CompValue / Expr objects - depending on whether evalFn is set. """ - def __init__(self, name: str, expr): + def __init__(self, name: str, expr: ParserElement): self.expr = expr TokenConverter.__init__(self, expr) self.setName(name) diff --git a/rdflib/plugins/stores/memory.py b/rdflib/plugins/stores/memory.py index 13c15218a..68f0ece50 100644 --- a/rdflib/plugins/stores/memory.py +++ b/rdflib/plugins/stores/memory.py @@ -1,5 +1,7 @@ # # +from __future__ import annotations + from typing import ( TYPE_CHECKING, Any, @@ -34,7 +36,7 @@ __all__ = ["SimpleMemory", "Memory"] -ANY = None +ANY: None = None class SimpleMemory(Store): diff --git a/rdflib/store.py b/rdflib/store.py index e3c9f7ab2..a3f6b6959 100644 --- a/rdflib/store.py +++ b/rdflib/store.py @@ -65,7 +65,7 @@ VALID_STORE = 1 CORRUPTED_STORE = 0 NO_STORE = -1 -UNKNOWN = None +UNKNOWN: None = None Pickler = pickle.Pickler diff --git a/rdflib/tools/csv2rdf.py b/rdflib/tools/csv2rdf.py index fe740356a..b519a78fc 100644 --- a/rdflib/tools/csv2rdf.py +++ b/rdflib/tools/csv2rdf.py @@ -6,6 +6,7 @@ try: ``csv2rdf --help`` """ +from __future__ import annotations import codecs import configparser @@ -17,11 +18,12 @@ import sys import time import warnings +from typing import Any, Dict, List, Optional, Tuple from urllib.parse import quote import rdflib -from rdflib import RDF, RDFS -from rdflib.namespace import split_uri +from rdflib.namespace import RDF, RDFS, split_uri +from rdflib.term import URIRef __all__ = ["CSV2RDF"] @@ -88,7 +90,7 @@ """ # bah - ugly global -uris = {} +uris: Dict[Any, Tuple[URIRef, Optional[URIRef]]] = {} def toProperty(label): @@ -113,7 +115,7 @@ def toPropertyLabel(label): return label -def index(l_, i): +def index(l_: List[int], i: Tuple[int, ...]) -> Tuple[int, ...]: """return a set of indexes from a list >>> index([1,2,3],(0,2)) (1, 3) @@ -127,7 +129,7 @@ def csv_reader(csv_data, dialect=csv.excel, **kwargs): yield row -def prefixuri(x, prefix, class_=None): +def prefixuri(x, prefix, class_: Optional[URIRef] = None): if prefix: r = rdflib.URIRef(prefix + quote(x.encode("utf8").replace(" ", "_"), safe="")) else: @@ -143,7 +145,7 @@ class NodeMaker: def range(self): return rdflib.RDFS.Literal - def __call__(self, x): + def __call__(self, x: Any): return rdflib.Literal(x) diff --git a/test/jsonld/test_compaction.py b/test/jsonld/test_compaction.py index e76de5580..f6cdae14b 100644 --- a/test/jsonld/test_compaction.py +++ b/test/jsonld/test_compaction.py @@ -1,8 +1,10 @@ # -*- coding: UTF-8 -*- +from __future__ import annotations import itertools import json import re +from typing import Any, Dict, List, Tuple import pytest @@ -13,11 +15,11 @@ register("json-ld", Serializer, "rdflib.plugins.serializers.jsonld", "JsonLDSerializer") -cases = [] +cases: List[Tuple[str, Dict[str, Any]]] = [] -def case(*args): - cases.append(args) +def case(source: str, data: Dict[str, Any]): + cases.append((source, data)) case( diff --git a/test/test_graph/test_graph_context.py b/test/test_graph/test_graph_context.py index f6ef5c3e4..adb133826 100644 --- a/test/test_graph/test_graph_context.py +++ b/test/test_graph/test_graph_context.py @@ -1,8 +1,11 @@ +from __future__ import annotations + import os import shutil import sys import unittest from tempfile import mkdtemp, mkstemp +from typing import Optional import pytest @@ -13,7 +16,7 @@ class ContextTestCase(unittest.TestCase): store = "default" slow = True - tmppath = None + tmppath: Optional[str] = None def setUp(self): try: diff --git a/test/test_sparql/test_prefixed_name.py b/test/test_sparql/test_prefixed_name.py index 99d2fb108..9ac37b281 100644 --- a/test/test_sparql/test_prefixed_name.py +++ b/test/test_sparql/test_prefixed_name.py @@ -1,11 +1,12 @@ +from __future__ import annotations + import itertools import logging from contextlib import ExitStack -from typing import Type, Union +from typing import Optional, Type, Union import pyparsing import pytest -from pyparsing import Optional import rdflib from rdflib import Graph diff --git a/test/utils/sparql_checker.py b/test/utils/sparql_checker.py index 477c9d3c8..680742100 100644 --- a/test/utils/sparql_checker.py +++ b/test/utils/sparql_checker.py @@ -1,5 +1,7 @@ """This runs the nt tests for the W3C RDF Working Group's N-Quads test suite.""" +from __future__ import annotations + import enum import logging import pprint @@ -290,11 +292,11 @@ def check_syntax(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: if entry.type_info.negative: catcher = xstack.enter_context(pytest.raises(Exception)) if entry.type_info.query_type is QueryType.UPDATE: - tree = parseUpdate(query_text) - translateUpdate(tree) + parse_tree = parseUpdate(query_text) + translateUpdate(parse_tree) elif entry.type_info.query_type is QueryType.QUERY: - tree = parseQuery(query_text) - translateQuery(tree) + query_tree = parseQuery(query_text) + translateQuery(query_tree) if catcher is not None: assert catcher.value is not None logging.info("catcher.value = %s", catcher.value) diff --git a/test/utils/test/test_result.py b/test/utils/test/test_result.py index 1d9325791..d30e2d55e 100644 --- a/test/utils/test/test_result.py +++ b/test/utils/test/test_result.py @@ -1,9 +1,10 @@ +from __future__ import annotations + from contextlib import ExitStack from test.utils.result import BindingsCollectionType, assert_bindings_collections_equal -from typing import Type, Union +from typing import Optional, Type, Union import pytest -from pyparsing import Optional from rdflib.namespace import XSD from rdflib.term import BNode, Literal, URIRef, Variable From 9573f59b52e51b826fb4030c478fcdc6bc55a551 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 21:33:54 +0200 Subject: [PATCH 103/114] build(deps-dev): bump mypy from 1.3.0 to 1.4.1 (#2462) Bumps [mypy](https://github.com/python/mypy) from 1.3.0 to 1.4.1. - [Commits](https://github.com/python/mypy/compare/v1.3.0...v1.4.1) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 56 ++++++++++++++++++++++++++--------------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index be0da07d8..dce2ed3b0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -706,43 +706,43 @@ files = [ [[package]] name = "mypy" -version = "1.3.0" +version = "1.4.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"}, - {file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"}, - {file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"}, - {file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"}, - {file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"}, - {file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"}, - {file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"}, - {file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"}, - {file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"}, - {file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"}, - {file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"}, - {file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"}, - {file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"}, - {file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"}, - {file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"}, - {file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"}, - {file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"}, - {file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"}, - {file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"}, - {file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"}, - {file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"}, - {file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"}, - {file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"}, - {file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"}, - {file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"}, - {file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] From 04baf8c8c50bd7df5d8dbacd0ccc91bbf88573e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 21:34:23 +0200 Subject: [PATCH 104/114] build(deps-dev): bump types-setuptools from 67.8.0.0 to 68.0.0.0 (#2463) Bumps [types-setuptools](https://github.com/python/typeshed) from 67.8.0.0 to 68.0.0.0. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-setuptools dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index dce2ed3b0..470c3af30 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1273,13 +1273,13 @@ files = [ [[package]] name = "types-setuptools" -version = "67.8.0.0" +version = "68.0.0.0" description = "Typing stubs for setuptools" optional = false python-versions = "*" files = [ - {file = "types-setuptools-67.8.0.0.tar.gz", hash = "sha256:95c9ed61871d6c0e258433373a4e1753c0a7c3627a46f4d4058c7b5a08ab844f"}, - {file = "types_setuptools-67.8.0.0-py3-none-any.whl", hash = "sha256:6df73340d96b238a4188b7b7668814b37e8018168aef1eef94a3b1872e3f60ff"}, + {file = "types-setuptools-68.0.0.0.tar.gz", hash = "sha256:fc958b4123b155ffc069a66d3af5fe6c1f9d0600c35c0c8444b2ab4147112641"}, + {file = "types_setuptools-68.0.0.0-py3-none-any.whl", hash = "sha256:cc00e09ba8f535362cbe1ea8b8407d15d14b59c57f4190cceaf61a9e57616446"}, ] [[package]] @@ -1344,4 +1344,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "1c56b77bc9381ed73f90bdc11243e8fae40be3fe06aec26c74eef94937698017" +content-hash = "6143a36eb3f26087a9ade34ff007ea5c092f3966ea7e5193d89445b642a5a530" diff --git a/pyproject.toml b/pyproject.toml index 0d7d4c8c6..05b2cba89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,7 @@ lxml-stubs = "^0.4.0" pytest = "^7.1.3" pytest-cov = "^4.0.0" coverage = {version = "^7.0.1", extras = ["toml"]} -types-setuptools = ">=65.6.0.3,<68.0.0.0" +types-setuptools = ">=65.6.0.3,<69.0.0.0" setuptools = ">=65.6.3,<68.0.0" [tool.poetry.group.docs.dependencies] From bddf8ec4dcd46ee5abcebef5590a2a9c3bd11686 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 21:34:35 +0200 Subject: [PATCH 105/114] build(deps-dev): bump pytest from 7.3.2 to 7.4.0 (#2464) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.3.2 to 7.4.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.3.2...7.4.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 470c3af30..4b0e7b151 100644 --- a/poetry.lock +++ b/poetry.lock @@ -934,13 +934,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.3.2" +version = "7.4.0" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.2-py3-none-any.whl", hash = "sha256:cdcbd012c9312258922f8cd3f1b62a6580fdced17db6014896053d47cddf9295"}, - {file = "pytest-7.3.2.tar.gz", hash = "sha256:ee990a3cc55ba808b80795a79944756f315c67c12b56abd3ac993a7b8c17030b"}, + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] [package.dependencies] From 2545a3d56675c60e26575417afeae1701470e899 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 21:34:55 +0200 Subject: [PATCH 106/114] build(deps-dev): bump typing-extensions from 4.6.3 to 4.7.1 (#2466) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.6.3 to 4.7.1. - [Release notes](https://github.com/python/typing_extensions/releases) - [Changelog](https://github.com/python/typing_extensions/blob/main/CHANGELOG.md) - [Commits](https://github.com/python/typing_extensions/compare/4.6.3...4.7.1) --- updated-dependencies: - dependency-name: typing-extensions dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4b0e7b151..27f8e763f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1284,13 +1284,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.6.3" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, - {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] From 8582691bb8320ffc660e4bc67348a4413d26a32e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 21:37:13 +0200 Subject: [PATCH 107/114] build(deps-dev): bump setuptools from 67.8.0 to 68.0.0 (#2465) Bumps [setuptools](https://github.com/pypa/setuptools) from 67.8.0 to 68.0.0. - [Release notes](https://github.com/pypa/setuptools/releases) - [Changelog](https://github.com/pypa/setuptools/blob/main/NEWS.rst) - [Commits](https://github.com/pypa/setuptools/compare/v67.8.0...v68.0.0) --- updated-dependencies: - dependency-name: setuptools dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 27f8e763f..de0432df0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1055,13 +1055,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "67.8.0" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] @@ -1344,4 +1344,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "6143a36eb3f26087a9ade34ff007ea5c092f3966ea7e5193d89445b642a5a530" +content-hash = "bd72cf9ef87c3080a305f3b2d90f1e82fd1f8a6be2a5d5ffdfc039b5c77f292e" diff --git a/pyproject.toml b/pyproject.toml index 05b2cba89..c9bc2c002 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ pytest = "^7.1.3" pytest-cov = "^4.0.0" coverage = {version = "^7.0.1", extras = ["toml"]} types-setuptools = ">=65.6.0.3,<69.0.0.0" -setuptools = ">=65.6.3,<68.0.0" +setuptools = ">=65.6.3,<69.0.0" [tool.poetry.group.docs.dependencies] sphinx = "^5.3.0" From 5c7643c28cecd64452186ec3409e2ed7026140ea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 19:38:24 +0000 Subject: [PATCH 108/114] build(deps-dev): bump sphinx from 5.3.0 to 6.2.1 Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 5.3.0 to 6.2.1. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v5.3.0...v6.2.1) --- updated-dependencies: - dependency-name: sphinx dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- poetry.lock | 20 ++++++++++---------- pyproject.toml | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/poetry.lock b/poetry.lock index de0432df0..687d78ba4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1093,26 +1093,26 @@ files = [ [[package]] name = "sphinx" -version = "5.3.0" +version = "6.2.1" description = "Python documentation generator" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, + {file = "Sphinx-6.2.1.tar.gz", hash = "sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b"}, + {file = "sphinx-6.2.1-py3-none-any.whl", hash = "sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" +docutils = ">=0.18.1,<0.20" imagesize = ">=1.3" importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" +Pygments = ">=2.13" +requests = ">=2.25.0" snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" @@ -1123,8 +1123,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-autodoc-typehints" @@ -1344,4 +1344,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "bd72cf9ef87c3080a305f3b2d90f1e82fd1f8a6be2a5d5ffdfc039b5c77f292e" +content-hash = "c5774b73f06388570fcf7497bf4039472463139a1ef5f483a3da22aea57571cb" diff --git a/pyproject.toml b/pyproject.toml index c9bc2c002..01f3bd009 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ types-setuptools = ">=65.6.0.3,<69.0.0.0" setuptools = ">=65.6.3,<69.0.0" [tool.poetry.group.docs.dependencies] -sphinx = "^5.3.0" +sphinx = ">=5.3,<7.0" myst-parser = "^1.0.0" sphinxcontrib-apidoc = "^0.3.0" sphinx-autodoc-typehints = "^1.17.1" From afea615557346347145128f77478b013ffcc0d45 Mon Sep 17 00:00:00 2001 From: WhiteGobo Date: Wed, 5 Jul 2023 20:57:16 +0200 Subject: [PATCH 109/114] fix: TriG parser error handling for nested graphs (#2468) Raise an error when nested graphs occur in TriG. With this change, the test passes. --------- Co-authored-by: WhiteGobo Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Iwan Aucamp --- rdflib/plugins/parsers/trig.py | 3 +++ test/test_w3c_spec/test_trig_w3c.py | 3 --- test_reports/rdflib_w3c_trig-HEAD.ttl | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/rdflib/plugins/parsers/trig.py b/rdflib/plugins/parsers/trig.py index d28198bce..71593b5ce 100644 --- a/rdflib/plugins/parsers/trig.py +++ b/rdflib/plugins/parsers/trig.py @@ -98,6 +98,9 @@ def graph(self, argstr: str, i: int) -> int: j = i + 1 + if self._context is not None: + self.BadSyntax(argstr, i, "Nested graphs are not allowed") + oldParentContext = self._parentContext self._parentContext = self._context reason2 = self._reason2 diff --git a/test/test_w3c_spec/test_trig_w3c.py b/test/test_w3c_spec/test_trig_w3c.py index ea2b02edd..d7c843340 100644 --- a/test/test_w3c_spec/test_trig_w3c.py +++ b/test/test_w3c_spec/test_trig_w3c.py @@ -176,9 +176,6 @@ def check_entry(entry: ManifestEntry) -> None: f"{REMOTE_BASE_IRI}#trig-graph-bad-01": pytest.mark.xfail( reason="accepts GRAPH with no name" ), - f"{REMOTE_BASE_IRI}#trig-graph-bad-07": pytest.mark.xfail( - reason="accepts nested GRAPH" - ), } diff --git a/test_reports/rdflib_w3c_trig-HEAD.ttl b/test_reports/rdflib_w3c_trig-HEAD.ttl index 7c22104d2..78a28d61c 100644 --- a/test_reports/rdflib_w3c_trig-HEAD.ttl +++ b/test_reports/rdflib_w3c_trig-HEAD.ttl @@ -971,7 +971,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:passed ] ; earl:subject ; earl:test . From 9a901ae98f940bb2c15a22736fd6809a8e75ccee Mon Sep 17 00:00:00 2001 From: WhiteGobo Date: Wed, 5 Jul 2023 21:09:00 +0200 Subject: [PATCH 110/114] test: skip two SPARQL 1.0 tests that should not pass with SPARQL 1.1 Skip the following tests that rely on SPARQL 1.0 grammar and are incompatible with the SPARQL 1.1 grammar: - - --------- Co-authored-by: WhiteGobo Co-authored-by: Iwan Aucamp --- test/test_w3c_spec/test_sparql10_w3c.py | 10 +++++++--- test_reports/rdflib_w3c_sparql10-HEAD.ttl | 4 ++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/test/test_w3c_spec/test_sparql10_w3c.py b/test/test_w3c_spec/test_sparql10_w3c.py index 70df2d066..71bdbcfa6 100644 --- a/test/test_w3c_spec/test_sparql10_w3c.py +++ b/test/test_w3c_spec/test_sparql10_w3c.py @@ -23,10 +23,14 @@ (REMOTE_BASE_IRI, ensure_suffix(LOCAL_BASE_DIR.as_uri(), "/")), ) MARK_DICT: MarksDictType = { - f"{REMOTE_BASE_IRI}basic/manifest#term-6": pytest.mark.xfail( - reason="query misinterpreted." + f"{REMOTE_BASE_IRI}basic/manifest#term-6": pytest.mark.skip( + reason="using Sparql 1.1 which is not backwards compatible. " + "'456.' will be interpreted differently in query and data." + ), + f"{REMOTE_BASE_IRI}basic/manifest#term-7": pytest.mark.skip( + reason="using Sparql 1.1 which is not backwards compatible. " + "'456.' will be interpreted differently in query and data." ), - f"{REMOTE_BASE_IRI}basic/manifest#term-7": pytest.mark.xfail(reason="..."), f"{REMOTE_BASE_IRI}expr-builtin/manifest#dawg-datatype-2": pytest.mark.xfail( reason="additional row in output" ), diff --git a/test_reports/rdflib_w3c_sparql10-HEAD.ttl b/test_reports/rdflib_w3c_sparql10-HEAD.ttl index f43162420..78997b01c 100644 --- a/test_reports/rdflib_w3c_sparql10-HEAD.ttl +++ b/test_reports/rdflib_w3c_sparql10-HEAD.ttl @@ -323,7 +323,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:untested ] ; earl:subject ; earl:test . @@ -331,7 +331,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:untested ] ; earl:subject ; earl:test . From 8c9608b4e9db07085884e705579307ef6a3e8d48 Mon Sep 17 00:00:00 2001 From: WhiteGobo Date: Wed, 5 Jul 2023 22:03:14 +0200 Subject: [PATCH 111/114] fix: TriG handling of GRAPH keyword without a graph ID (#2469) The RDF 1.1 TriG grammar only allows the `GRAPH` keyword if it is followed by a graph identifier [[ref](https://www.w3.org/TR/trig/#grammar-production-block)]. This change enforces this rule so that the test passes. --------- Co-authored-by: WhiteGobo Co-authored-by: Iwan Aucamp --- rdflib/plugins/parsers/trig.py | 4 ++++ test/test_w3c_spec/test_trig_w3c.py | 3 --- test_reports/rdflib_w3c_trig-HEAD.ttl | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/rdflib/plugins/parsers/trig.py b/rdflib/plugins/parsers/trig.py index 71593b5ce..cc4cf131e 100644 --- a/rdflib/plugins/parsers/trig.py +++ b/rdflib/plugins/parsers/trig.py @@ -69,16 +69,20 @@ def graph(self, argstr: str, i: int) -> int: raise Exception if it looks like a graph, but isn't. """ + need_graphid = False # import pdb; pdb.set_trace() j = self.sparqlTok("GRAPH", argstr, i) # optional GRAPH keyword if j >= 0: i = j + need_graphid = True r: MutableSequence[Any] = [] j = self.labelOrSubject(argstr, i, r) if j >= 0: graph = r[0] i = j + elif need_graphid: + self.BadSyntax(argstr, i, "GRAPH keyword must be followed by graph name") else: graph = self._store.graph.identifier # hack diff --git a/test/test_w3c_spec/test_trig_w3c.py b/test/test_w3c_spec/test_trig_w3c.py index d7c843340..9f49616fb 100644 --- a/test/test_w3c_spec/test_trig_w3c.py +++ b/test/test_w3c_spec/test_trig_w3c.py @@ -173,9 +173,6 @@ def check_entry(entry: ManifestEntry) -> None: f"{REMOTE_BASE_IRI}#trig-syntax-bad-list-04": pytest.mark.xfail( reason="ignores badly formed quad" ), - f"{REMOTE_BASE_IRI}#trig-graph-bad-01": pytest.mark.xfail( - reason="accepts GRAPH with no name" - ), } diff --git a/test_reports/rdflib_w3c_trig-HEAD.ttl b/test_reports/rdflib_w3c_trig-HEAD.ttl index 78a28d61c..02e67f8f2 100644 --- a/test_reports/rdflib_w3c_trig-HEAD.ttl +++ b/test_reports/rdflib_w3c_trig-HEAD.ttl @@ -923,7 +923,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:passed ] ; earl:subject ; earl:test . From 0ea6ca579442219d67ffb1fc7313f05fd16d8d49 Mon Sep 17 00:00:00 2001 From: Iwan Aucamp Date: Thu, 6 Jul 2023 23:16:23 +0200 Subject: [PATCH 112/114] test: modernize literal tests (#2472) This change moves tests out of classes and into top-level functions, and merges some test functions that were testing the same thing. --- test/test_literal/test_literal.py | 1690 ++++++++++++++--------------- 1 file changed, 827 insertions(+), 863 deletions(-) diff --git a/test/test_literal/test_literal.py b/test/test_literal/test_literal.py index 074abe1e6..2c78c11b7 100644 --- a/test/test_literal/test_literal.py +++ b/test/test_literal/test_literal.py @@ -49,894 +49,858 @@ def clear_bindings() -> Generator[None, None, None]: _reset_bindings() -class TestLiteral: - def test_repr_apostrophe(self) -> None: - a = rdflib.Literal("'") - b = eval(repr(a)) - assert a == b - - def test_repr_quote(self) -> None: - a = rdflib.Literal('"') - b = eval(repr(a)) - assert a == b - - def test_backslash(self) -> None: - d = r""" +def test_repr_apostrophe() -> None: + a = rdflib.Literal("'") + b = eval(repr(a)) + assert a == b + + +def test_repr_quote() -> None: + a = rdflib.Literal('"') + b = eval(repr(a)) + assert a == b + + +def test_backslash() -> None: + d = r""" - - a\b - +xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" +xmlns:foo="http://example.org/foo#"> + + a\b + """ - g = rdflib.Graph() - g.parse(data=d, format="xml") - a = rdflib.Literal("a\\b") - b = list(g.objects())[0] - assert a == b - - def test_literal_from_bool(self) -> None: - _l = rdflib.Literal(True) - assert _l.datatype == rdflib.XSD["boolean"] - - -class TestNewPT: - # NOTE: TestNewPT is written for pytest so that pytest features like - # parametrize can be used. - # New tests should be added here instead of in TestNew. - @pytest.mark.parametrize( - "lang, exception_type", - [ - ({}, TypeError), - ([], TypeError), - (1, TypeError), - (b"en", TypeError), - ("999", ValueError), - ("-", ValueError), - ], - ) - def test_cant_pass_invalid_lang( - self, - lang: Any, - exception_type: Type[Exception], - ) -> None: - """ - Construction of Literal fails if the language tag is invalid. - """ - with pytest.raises(exception_type): - Literal("foo", lang=lang) - - @pytest.mark.parametrize( - "lexical, datatype, is_ill_typed", - [ - ("true", XSD.boolean, False), - ("1", XSD.boolean, False), - (b"false", XSD.boolean, False), - (b"0", XSD.boolean, False), - ("yes", XSD.boolean, True), - ("200", XSD.byte, True), - (b"-128", XSD.byte, False), - ("127", XSD.byte, False), - ("255", XSD.unsignedByte, False), - ("-100", XSD.unsignedByte, True), - (b"200", XSD.unsignedByte, False), - (b"64300", XSD.short, True), - ("-6000", XSD.short, False), - ("1000000", XSD.nonNegativeInteger, False), - ("-100", XSD.nonNegativeInteger, True), - ("a", XSD.double, True), - ("0", XSD.double, False), - ("0.1", XSD.double, False), - ("0.1", XSD.decimal, False), - ("0.g", XSD.decimal, True), - ("b", XSD.integer, True), - ("2147483647", XSD.int, False), - ("2147483648", XSD.int, True), - ("2147483648", XSD.integer, False), - ("valid ASCII", XSD.string, False), - pytest.param("هذا رجل ثلج⛄", XSD.string, False, id="snowman-ar"), - ("More ASCII", None, None), - ("Not a valid time", XSD.time, True), - ("Not a valid date", XSD.date, True), - ("7264666c6962", XSD.hexBinary, False), - # RDF.langString is not a recognized datatype IRI as we assign no literal value to it, though this should likely change. - ("English string", RDF.langString, None), - # The datatypes IRIs below should never be recognized. - ("[p]", EGNS.unrecognized, None), - ], - ) - def test_ill_typed_literals( - self, - lexical: Union[bytes, str], - datatype: Optional[URIRef], - is_ill_typed: Optional[bool], - ) -> None: - """ - ill_typed has the correct value. - """ - lit = Literal(lexical, datatype=datatype) - assert lit.ill_typed is is_ill_typed - if is_ill_typed is False: - # If the literal is not ill typed it should have a value associated with it. - assert lit.value is not None - - @pytest.mark.parametrize( - "a, b, op, expected_result", - [ - pytest.param( - Literal("20:00:00", datatype=_XSD_STRING), - Literal("23:30:00", datatype=_XSD_STRING), - "bminusa", - TypeError(r"unsupported operand type\(s\) for -: 'str' and 'str'"), - id="Attempt to subtract strings", - ), - pytest.param( - Literal("20:00:00", datatype=_XSD_TIME), - Literal("23:30:00", datatype=_XSD_STRING), - "aplusb", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#string to a Literal of datatype http://www.w3.org/2001/XMLSchema#time" - ), - id="Attempt to add string to time", - ), - pytest.param( - Literal("20:00:00", datatype=_XSD_TIME), - Literal("23:30:00", datatype=_XSD_STRING), - "bminusa", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#time from a Literal of datatype http://www.w3.org/2001/XMLSchema#string" - ), - id="Attempt to subtract string from time", - ), - pytest.param( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12", datatype=_XSD_INTEGER), - "aplusb", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#integer to a Literal of datatype http://www.w3.org/2001/XMLSchema#time" - ), - id="Attempt to add integer to time", - ), - pytest.param( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12", datatype=_XSD_INTEGER), - "bplusa", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#time to a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" - ), - id="Attempt to add time to integer", - ), - pytest.param( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12", datatype=_XSD_INTEGER), - "aminusb", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#integer from a Literal of datatype http://www.w3.org/2001/XMLSchema#time" - ), - id="Attempt to subtract integer from time", - ), - pytest.param( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12", datatype=_XSD_INTEGER), - "bminusa", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#time from a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" - ), - id="Attempt to subtract time from integer", - ), - pytest.param( - Literal("12", datatype=_XSD_INTEGER), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "aplusb", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#duration to a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" - ), - id="Attempt to add duration to integer", - ), - pytest.param( - Literal("12", datatype=_XSD_INTEGER), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "bplusa", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#integer to a Literal of datatype http://www.w3.org/2001/XMLSchema#duration" - ), - id="Attempt to add integer to duration", - ), - pytest.param( - Literal("12", datatype=_XSD_INTEGER), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "aminusb", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#duration from a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" - ), - id="Attempt to subtract duration from integer", - ), - pytest.param( - Literal("12", datatype=_XSD_INTEGER), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "bminusa", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#integer from a Literal of datatype http://www.w3.org/2001/XMLSchema#duration" - ), - id="Attempt to subtract integer from duration", - ), - ( - Literal("2006-01-01T20:50:00", datatype=_XSD_DATETIME), - Literal("2006-02-01T20:50:00", datatype=_XSD_DATETIME), - "bminusa", - Literal("P31D", datatype=_XSD_DURATION), - ), - ( - Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), - Literal("2006-05-01T20:50:00", datatype=_XSD_DATETIME), - "bminusa", - Literal("P119D", datatype=_XSD_DURATION), - ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), - Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), - "aminusb", - Literal("-P122DT15H58M", datatype=_XSD_DURATION), - ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), - Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), - "bminusa", - Literal("P122DT15H58M", datatype=_XSD_DURATION), - ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATE), - Literal("2006-11-01T12:50:00", datatype=_XSD_DATE), - "bminusa", - Literal("P123D", datatype=_XSD_DURATION), - ), - ( - Literal("2006-08-01", datatype=_XSD_DATE), - Literal("2006-11-01", datatype=_XSD_DATE), - "bminusa", - Literal("P92D", datatype=_XSD_DURATION), + g = rdflib.Graph() + g.parse(data=d, format="xml") + a = rdflib.Literal("a\\b") + b = list(g.objects())[0] + assert a == b + + +def test_literal_from_bool() -> None: + _l = rdflib.Literal(True) + assert _l.datatype == rdflib.XSD["boolean"] + + +@pytest.mark.parametrize( + "lang, exception_type", + [ + ({}, TypeError), + ([], TypeError), + (1, TypeError), + (b"en", TypeError), + ("999", ValueError), + ("-", ValueError), + ], +) +def test_cant_pass_invalid_lang( + lang: Any, + exception_type: Type[Exception], +) -> None: + """ + Construction of Literal fails if the language tag is invalid. + """ + with pytest.raises(exception_type): + Literal("foo", lang=lang) + + +@pytest.mark.parametrize( + "lexical, datatype, is_ill_typed", + [ + ("true", XSD.boolean, False), + ("1", XSD.boolean, False), + (b"false", XSD.boolean, False), + (b"0", XSD.boolean, False), + ("yes", XSD.boolean, True), + ("200", XSD.byte, True), + (b"-128", XSD.byte, False), + ("127", XSD.byte, False), + ("255", XSD.unsignedByte, False), + ("-100", XSD.unsignedByte, True), + (b"200", XSD.unsignedByte, False), + (b"64300", XSD.short, True), + ("-6000", XSD.short, False), + ("1000000", XSD.nonNegativeInteger, False), + ("-100", XSD.nonNegativeInteger, True), + ("a", XSD.double, True), + ("0", XSD.double, False), + ("0.1", XSD.double, False), + ("0.1", XSD.decimal, False), + ("0.g", XSD.decimal, True), + ("b", XSD.integer, True), + ("2147483647", XSD.int, False), + ("2147483648", XSD.int, True), + ("2147483648", XSD.integer, False), + ("valid ASCII", XSD.string, False), + pytest.param("هذا رجل ثلج⛄", XSD.string, False, id="snowman-ar"), + ("More ASCII", None, None), + ("Not a valid time", XSD.time, True), + ("Not a valid date", XSD.date, True), + ("7264666c6962", XSD.hexBinary, False), + # RDF.langString is not a recognized datatype IRI as we assign no literal value to it, though this should likely change. + ("English string", RDF.langString, None), + # The datatypes IRIs below should never be recognized. + ("[p]", EGNS.unrecognized, None), + ], +) +def test_ill_typed_literals( + lexical: Union[bytes, str], + datatype: Optional[URIRef], + is_ill_typed: Optional[bool], +) -> None: + """ + ill_typed has the correct value. + """ + lit = Literal(lexical, datatype=datatype) + assert lit.ill_typed is is_ill_typed + if is_ill_typed is False: + # If the literal is not ill typed it should have a value associated with it. + assert lit.value is not None + + +@pytest.mark.parametrize( + "a, b, op, expected_result", + [ + pytest.param( + Literal("20:00:00", datatype=_XSD_STRING), + Literal("23:30:00", datatype=_XSD_STRING), + "bminusa", + TypeError(r"unsupported operand type\(s\) for -: 'str' and 'str'"), + id="Attempt to subtract strings", + ), + pytest.param( + Literal("20:00:00", datatype=_XSD_TIME), + Literal("23:30:00", datatype=_XSD_STRING), + "aplusb", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#string to a Literal of datatype http://www.w3.org/2001/XMLSchema#time" + ), + id="Attempt to add string to time", + ), + pytest.param( + Literal("20:00:00", datatype=_XSD_TIME), + Literal("23:30:00", datatype=_XSD_STRING), + "bminusa", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#time from a Literal of datatype http://www.w3.org/2001/XMLSchema#string" + ), + id="Attempt to subtract string from time", + ), + pytest.param( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12", datatype=_XSD_INTEGER), + "aplusb", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#integer to a Literal of datatype http://www.w3.org/2001/XMLSchema#time" + ), + id="Attempt to add integer to time", + ), + pytest.param( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12", datatype=_XSD_INTEGER), + "bplusa", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#time to a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" + ), + id="Attempt to add time to integer", + ), + pytest.param( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12", datatype=_XSD_INTEGER), + "aminusb", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#integer from a Literal of datatype http://www.w3.org/2001/XMLSchema#time" + ), + id="Attempt to subtract integer from time", + ), + pytest.param( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12", datatype=_XSD_INTEGER), + "bminusa", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#time from a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" + ), + id="Attempt to subtract time from integer", + ), + pytest.param( + Literal("12", datatype=_XSD_INTEGER), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "aplusb", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#duration to a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" + ), + id="Attempt to add duration to integer", + ), + pytest.param( + Literal("12", datatype=_XSD_INTEGER), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "bplusa", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#integer to a Literal of datatype http://www.w3.org/2001/XMLSchema#duration" + ), + id="Attempt to add integer to duration", + ), + pytest.param( + Literal("12", datatype=_XSD_INTEGER), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "aminusb", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#duration from a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" + ), + id="Attempt to subtract duration from integer", + ), + pytest.param( + Literal("12", datatype=_XSD_INTEGER), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "bminusa", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#integer from a Literal of datatype http://www.w3.org/2001/XMLSchema#duration" + ), + id="Attempt to subtract integer from duration", + ), + ( + Literal("2006-01-01T20:50:00", datatype=_XSD_DATETIME), + Literal("2006-02-01T20:50:00", datatype=_XSD_DATETIME), + "bminusa", + Literal("P31D", datatype=_XSD_DURATION), + ), + ( + Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), + Literal("2006-05-01T20:50:00", datatype=_XSD_DATETIME), + "bminusa", + Literal("P119D", datatype=_XSD_DURATION), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), + Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), + "aminusb", + Literal("-P122DT15H58M", datatype=_XSD_DURATION), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), + Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), + "bminusa", + Literal("P122DT15H58M", datatype=_XSD_DURATION), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATE), + Literal("2006-11-01T12:50:00", datatype=_XSD_DATE), + "bminusa", + Literal("P123D", datatype=_XSD_DURATION), + ), + ( + Literal("2006-08-01", datatype=_XSD_DATE), + Literal("2006-11-01", datatype=_XSD_DATE), + "bminusa", + Literal("P92D", datatype=_XSD_DURATION), + ), + ( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12:50:00", datatype=_XSD_TIME), + "bminusa", + Literal("-PT8H2M", datatype=_XSD_DURATION), + ), + ( + Literal("20:00:00", datatype=_XSD_TIME), + Literal("23:30:00", datatype=_XSD_TIME), + "bminusa", + Literal("PT3H30M", datatype=_XSD_DURATION), + ), + ( + Literal("2006-01-01T20:50:00", datatype=_XSD_DATETIME), + Literal("P31D", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-02-01T20:50:00", datatype=_XSD_DATETIME), + ), + ( + Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), + Literal("P119D", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-05-01T20:50:00", datatype=_XSD_DATETIME), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATE), + Literal("P123D", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-11-01T12:50:00", datatype=_XSD_DATE), + ), + ( + Literal("2006-08-01", datatype=_XSD_DATE), + Literal("P92D", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-11-01", datatype=_XSD_DATE), + ), + ( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("-PT8H2M", datatype=_XSD_DURATION), + "aplusb", + Literal("12:50:00", datatype=_XSD_TIME), + ), + ( + Literal("20:00:00", datatype=_XSD_TIME), + Literal("PT3H30M", datatype=_XSD_DURATION), + "aplusb", + Literal("23:30:00", datatype=_XSD_TIME), + ), + ( + Literal("3", datatype=_XSD_INTEGER), + Literal("5", datatype=_XSD_INTEGER), + "aplusb", + Literal("8", datatype=_XSD_INTEGER), + ), + ( + Literal("3", datatype=_XSD_INTEGER), + Literal("5", datatype=_XSD_INTEGER), + "bminusa", + Literal("2", datatype=_XSD_INTEGER), + ), + ( + Literal("5.3", datatype=_XSD_FLOAT), + Literal("8.5", datatype=_XSD_FLOAT), + "bminusa", + Literal("3.2", datatype=_XSD_FLOAT), + ), + ( + Literal("5.3", datatype=_XSD_DECIMAL), + Literal("8.5", datatype=_XSD_DECIMAL), + "bminusa", + Literal("3.2", datatype=_XSD_DECIMAL), + ), + ( + Literal("5.3", datatype=_XSD_DOUBLE), + Literal("8.5", datatype=_XSD_DOUBLE), + "aminusb", + Literal("-3.2", datatype=_XSD_DOUBLE), + ), + ( + Literal("8.5", datatype=_XSD_DOUBLE), + Literal("5.3", datatype=_XSD_DOUBLE), + "aminusb", + Literal("3.2", datatype=_XSD_DOUBLE), + ), + ( + Literal(isodate.Duration(hours=1)), + Literal(isodate.Duration(hours=1)), + "aplusb", + Literal(isodate.Duration(hours=2)), + ), + ( + Literal(datetime.timedelta(days=1)), + Literal(datetime.timedelta(days=1)), + "aplusb", + Literal(datetime.timedelta(days=2)), + ), + ( + Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(isodate.Duration(hours=1)), + "aplusb", + Literal("05:23:01.000384", datatype=XSD.time), + ), + ( + Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(isodate.Duration(days=1)), + "aplusb", + Literal("2011-11-05", datatype=XSD.date), + ), + ( + Literal(datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00")), + Literal(isodate.Duration(days=1)), + "aplusb", + Literal("2011-11-05T00:05:23.283000+00:00", datatype=XSD.dateTime), + ), + ( + Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(datetime.timedelta(hours=1)), + "aplusb", + Literal("05:23:01.000384", datatype=XSD.time), + ), + ( + Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(datetime.timedelta(days=1)), + "aplusb", + Literal("2011-11-05", datatype=XSD.date), + ), + ( + Literal(datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00")), + Literal(datetime.timedelta(days=1)), + "aplusb", + Literal("2011-11-05T00:05:23.283000+00:00", datatype=XSD.dateTime), + ), + ( + Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(isodate.Duration(hours=1)), + "aminusb", + Literal("03:23:01.000384", datatype=XSD.time), + ), + ( + Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(isodate.Duration(days=1)), + "aminusb", + Literal("2011-11-03", datatype=XSD.date), + ), + ( + Literal(datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00")), + Literal(isodate.Duration(days=1)), + "aminusb", + Literal("2011-11-03T00:05:23.283000+00:00", datatype=XSD.dateTime), + ), + ( + Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(datetime.timedelta(hours=1)), + "aminusb", + Literal("03:23:01.000384", datatype=XSD.time), + ), + ( + Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(datetime.timedelta(days=1)), + "aminusb", + Literal("2011-11-03", datatype=XSD.date), + ), + ( + Literal(datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00")), + Literal(datetime.timedelta(days=1)), + "aminusb", + Literal("2011-11-03T00:05:23.283000+00:00", datatype=XSD.dateTime), + ), + ( + Literal("5", datatype=XSD.integer), + Literal("10", datatype=XSD.integer), + "bminusa", + Literal("5", datatype=XSD.integer), + ), + ( + Literal("5"), + Literal("10", datatype=_XSD_INTEGER), + "aminusb", + TypeError( + "Minuend Literal must have Numeric, Date, Datetime or Time datatype." ), - ( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12:50:00", datatype=_XSD_TIME), - "bminusa", - Literal("-PT8H2M", datatype=_XSD_DURATION), + ), + ( + Literal("5"), + Literal("10", datatype=_XSD_INTEGER), + "bminusa", + TypeError( + "Subtrahend Literal must have Numeric, Date, Datetime or Time datatype." ), + ), + *affix_tuples( ( - Literal("20:00:00", datatype=_XSD_TIME), - Literal("23:30:00", datatype=_XSD_TIME), - "bminusa", - Literal("PT3H30M", datatype=_XSD_DURATION), - ), + Literal("5", datatype=_XSD_INTEGER), + Literal("10", datatype=_XSD_FLOAT), + ), + [ + ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), + ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), + ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), + ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), + ], + None, + ), + *affix_tuples( + ( + Literal("5", datatype=_XSD_FLOAT), + Literal("10", datatype=_XSD_DECIMAL), + ), + [ + ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), + ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), + ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), + ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), + ], + None, + ), + *affix_tuples( + ( + Literal("5", datatype=_XSD_FLOAT), + Literal("10", datatype=_XSD_DOUBLE), + ), + [ + ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), + ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), + ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), + ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), + ], + None, + ), + *affix_tuples( + ( + Literal(Decimal("1.2121214312312")), + Literal(1), + ), + [ + ("aminusb", Literal(Decimal("0.212121"))), + ("aplusb", Literal(Decimal("2.212121"))), + ("bminusa", Literal(Decimal("-0.212121"))), + ("bplusa", Literal(Decimal("2.212121"))), + ], + None, + ), + *affix_tuples( ( - Literal("2006-01-01T20:50:00", datatype=_XSD_DATETIME), Literal("P31D", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-02-01T20:50:00", datatype=_XSD_DATETIME), + Literal("P5D", datatype=_XSD_DURATION), ), + [ + ("aplusb", Literal("P36D", datatype=_XSD_DURATION)), + ("aminusb", Literal("P26D", datatype=_XSD_DURATION)), + ], + None, + ), + *affix_tuples( ( - Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), Literal("P119D", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-05-01T20:50:00", datatype=_XSD_DATETIME), - ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), - ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATE), - Literal("P123D", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-11-01T12:50:00", datatype=_XSD_DATE), - ), - ( - Literal("2006-08-01", datatype=_XSD_DATE), - Literal("P92D", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-11-01", datatype=_XSD_DATE), - ), - ( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("-PT8H2M", datatype=_XSD_DURATION), - "aplusb", - Literal("12:50:00", datatype=_XSD_TIME), - ), - ( - Literal("20:00:00", datatype=_XSD_TIME), - Literal("PT3H30M", datatype=_XSD_DURATION), - "aplusb", - Literal("23:30:00", datatype=_XSD_TIME), - ), - ( - Literal("3", datatype=_XSD_INTEGER), - Literal("5", datatype=_XSD_INTEGER), - "aplusb", - Literal("8", datatype=_XSD_INTEGER), - ), - ( - Literal("3", datatype=_XSD_INTEGER), - Literal("5", datatype=_XSD_INTEGER), - "bminusa", - Literal("2", datatype=_XSD_INTEGER), - ), - ( - Literal("5.3", datatype=_XSD_FLOAT), - Literal("8.5", datatype=_XSD_FLOAT), - "bminusa", - Literal("3.2", datatype=_XSD_FLOAT), - ), - ( - Literal("5.3", datatype=_XSD_DECIMAL), - Literal("8.5", datatype=_XSD_DECIMAL), - "bminusa", - Literal("3.2", datatype=_XSD_DECIMAL), - ), - ( - Literal("5.3", datatype=_XSD_DOUBLE), - Literal("8.5", datatype=_XSD_DOUBLE), - "aminusb", - Literal("-3.2", datatype=_XSD_DOUBLE), - ), - ( - Literal("8.5", datatype=_XSD_DOUBLE), - Literal("5.3", datatype=_XSD_DOUBLE), - "aminusb", - Literal("3.2", datatype=_XSD_DOUBLE), - ), - ( - Literal(isodate.Duration(hours=1)), - Literal(isodate.Duration(hours=1)), - "aplusb", - Literal(isodate.Duration(hours=2)), + Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), ), + [ + ("aplusb", TypeError(r".*datatype.*")), + ("aminusb", TypeError(r".*datatype.*")), + ], + None, + ), + *affix_tuples( ( + Literal(isodate.Duration(days=4)), Literal(datetime.timedelta(days=1)), - Literal(datetime.timedelta(days=1)), - "aplusb", - Literal(datetime.timedelta(days=2)), ), - ( - Literal(datetime.time.fromisoformat("04:23:01.000384")), - Literal(isodate.Duration(hours=1)), - "aplusb", - Literal("05:23:01.000384", datatype=XSD.time), - ), - ( - Literal(datetime.date.fromisoformat("2011-11-04")), - Literal(isodate.Duration(days=1)), - "aplusb", - Literal("2011-11-05", datatype=XSD.date), - ), - ( - Literal( - datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00") + [ + ( + "aplusb", + TypeError( + r"Cannot add a Literal of datatype.*to a Literal of datatype.*" + ), ), - Literal(isodate.Duration(days=1)), - "aplusb", - Literal("2011-11-05T00:05:23.283000+00:00", datatype=XSD.dateTime), - ), - ( - Literal(datetime.time.fromisoformat("04:23:01.000384")), - Literal(datetime.timedelta(hours=1)), - "aplusb", - Literal("05:23:01.000384", datatype=XSD.time), - ), - ( - Literal(datetime.date.fromisoformat("2011-11-04")), - Literal(datetime.timedelta(days=1)), - "aplusb", - Literal("2011-11-05", datatype=XSD.date), - ), - ( - Literal( - datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00") + ( + "aminusb", + TypeError( + r"Cannot subtract a Literal of datatype.*from a Literal of datatype.*" + ), ), - Literal(datetime.timedelta(days=1)), - "aplusb", - Literal("2011-11-05T00:05:23.283000+00:00", datatype=XSD.dateTime), - ), - ( - Literal(datetime.time.fromisoformat("04:23:01.000384")), - Literal(isodate.Duration(hours=1)), - "aminusb", - Literal("03:23:01.000384", datatype=XSD.time), - ), + ], + None, + ), + *affix_tuples( ( - Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(isodate.Duration(days=4)), Literal(isodate.Duration(days=1)), - "aminusb", - Literal("2011-11-03", datatype=XSD.date), - ), - ( - Literal( - datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00") - ), - Literal(isodate.Duration(days=1)), - "aminusb", - Literal("2011-11-03T00:05:23.283000+00:00", datatype=XSD.dateTime), ), + [ + ("aplusb", Literal(isodate.Duration(days=5))), + ("aminusb", Literal(isodate.Duration(days=3))), + ], + None, + ), + *affix_tuples( ( - Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(datetime.timedelta(hours=4)), Literal(datetime.timedelta(hours=1)), - "aminusb", - Literal("03:23:01.000384", datatype=XSD.time), - ), - ( - Literal(datetime.date.fromisoformat("2011-11-04")), - Literal(datetime.timedelta(days=1)), - "aminusb", - Literal("2011-11-03", datatype=XSD.date), - ), - ( - Literal( - datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00") - ), - Literal(datetime.timedelta(days=1)), - "aminusb", - Literal("2011-11-03T00:05:23.283000+00:00", datatype=XSD.dateTime), - ), - ( - Literal("5", datatype=XSD.integer), - Literal("10", datatype=XSD.integer), - "bminusa", - Literal("5", datatype=XSD.integer), - ), - ( - Literal("5"), - Literal("10", datatype=_XSD_INTEGER), - "aminusb", - TypeError( - "Minuend Literal must have Numeric, Date, Datetime or Time datatype." - ), - ), - ( - Literal("5"), - Literal("10", datatype=_XSD_INTEGER), - "bminusa", - TypeError( - "Subtrahend Literal must have Numeric, Date, Datetime or Time datatype." - ), - ), - *affix_tuples( - ( - Literal("5", datatype=_XSD_INTEGER), - Literal("10", datatype=_XSD_FLOAT), - ), - [ - ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), - ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), - ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), - ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), - ], - None, - ), - *affix_tuples( - ( - Literal("5", datatype=_XSD_FLOAT), - Literal("10", datatype=_XSD_DECIMAL), - ), - [ - ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), - ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), - ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), - ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), - ], - None, - ), - *affix_tuples( - ( - Literal("5", datatype=_XSD_FLOAT), - Literal("10", datatype=_XSD_DOUBLE), - ), - [ - ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), - ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), - ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), - ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), - ], - None, - ), - *affix_tuples( - ( - Literal(Decimal("1.2121214312312")), - Literal(1), - ), - [ - ("aminusb", Literal(Decimal("0.212121"))), - ("aplusb", Literal(Decimal("2.212121"))), - ("bminusa", Literal(Decimal("-0.212121"))), - ("bplusa", Literal(Decimal("2.212121"))), - ], - None, - ), - *affix_tuples( - ( - Literal("P31D", datatype=_XSD_DURATION), - Literal("P5D", datatype=_XSD_DURATION), - ), - [ - ("aplusb", Literal("P36D", datatype=_XSD_DURATION)), - ("aminusb", Literal("P26D", datatype=_XSD_DURATION)), - ], - None, ), - *affix_tuples( - ( - Literal("P119D", datatype=_XSD_DURATION), - Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), - ), - [ - ("aplusb", TypeError(r".*datatype.*")), - ("aminusb", TypeError(r".*datatype.*")), - ], - None, - ), - *affix_tuples( - ( - Literal(isodate.Duration(days=4)), - Literal(datetime.timedelta(days=1)), - ), - [ - ( - "aplusb", - TypeError( - r"Cannot add a Literal of datatype.*to a Literal of datatype.*" - ), - ), - ( - "aminusb", - TypeError( - r"Cannot subtract a Literal of datatype.*from a Literal of datatype.*" - ), - ), - ], - None, - ), - *affix_tuples( - ( - Literal(isodate.Duration(days=4)), - Literal(isodate.Duration(days=1)), - ), - [ - ("aplusb", Literal(isodate.Duration(days=5))), - ("aminusb", Literal(isodate.Duration(days=3))), - ], - None, - ), - *affix_tuples( - ( - Literal(datetime.timedelta(hours=4)), - Literal(datetime.timedelta(hours=1)), - ), - [ - ("aplusb", Literal(datetime.timedelta(hours=5))), - ("aminusb", Literal(datetime.timedelta(hours=3))), - ], - None, - ), - ], - ) - def test_literal_addsub( - self, - a: Literal, - b: Literal, - op: str, - expected_result: Union[Literal, Type[Exception], Exception], - ) -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - expected_exception: Optional[Exception] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass( - expected_result, Exception - ): - catcher = xstack.enter_context(pytest.raises(expected_result)) - elif isinstance(expected_result, Exception): - expected_exception = expected_result - catcher = xstack.enter_context(pytest.raises(type(expected_exception))) - if op == "aplusb": - result = a + b - - elif op == "aminusb": - result = a - b - elif op == "bminusa": - result = b - a - elif op == "bplusa": - result = b + a - else: - raise ValueError(f"invalid operation {op}") - logging.debug("result = %r", result) - if catcher is not None or expected_exception is not None: - assert catcher is not None - assert catcher.value is not None - if expected_exception is not None: - assert catcher.match(expected_exception.args[0]) + [ + ("aplusb", Literal(datetime.timedelta(hours=5))), + ("aminusb", Literal(datetime.timedelta(hours=3))), + ], + None, + ), + ], +) +def test_literal_addsub( + a: Literal, + b: Literal, + op: str, + expected_result: Union[Literal, Type[Exception], Exception], +) -> None: + catcher: Optional[pytest.ExceptionInfo[Exception]] = None + expected_exception: Optional[Exception] = None + with ExitStack() as xstack: + if isinstance(expected_result, type) and issubclass(expected_result, Exception): + catcher = xstack.enter_context(pytest.raises(expected_result)) + elif isinstance(expected_result, Exception): + expected_exception = expected_result + catcher = xstack.enter_context(pytest.raises(type(expected_exception))) + if op == "aplusb": + result = a + b + + elif op == "aminusb": + result = a - b + elif op == "bminusa": + result = b - a + elif op == "bplusa": + result = b + a else: - assert isinstance(expected_result, Literal) - assert expected_result == result - - @pytest.mark.parametrize( - "a_value, b_value, result_value, datatype", - [ - [3, 5, 2, XSD.integer], - [5.3, 8.5, 3.2, XSD.decimal], - [5.3, 8.5, 3.2, XSD.double], - [5.3, 8.5, 3.2, XSD.float], - # [XSD.byte")], - [3, 5, 2, XSD.int], - [5.3, 8.5, 3.2, XSD.long], - [-3, -5, -2, XSD.negativeInteger], - [3, 5, 2, XSD.nonNegativeInteger], - [-5.3, -8.5, -3.2, XSD.nonPositiveInteger], - [3, 5, 2, XSD.positiveInteger], - [3, 5, 2, XSD.short], - [0, 0, 0, XSD.unsignedByte], - [3, 5, 2, XSD.unsignedInt], - [5.3, 8.5, 3.2, XSD.unsignedLong], - [5.3, 8.5, 3.2, XSD.unsignedShort], - ], - ) - def test_numeric_literals( - self, - a_value: Union[int, float], - b_value: Union[int, float], - result_value: Union[int, float], - datatype: URIRef, - ) -> None: - a = Literal(a_value, datatype=datatype) - b = Literal(b_value, datatype=datatype) - - result = b - a - expected = Literal(result_value, datatype=datatype) - assert result == expected, repr(result) - - -class TestNew: - # NOTE: Please use TestNewPT for new tests instead of this which is written - # for unittest. - def test_cant_pass_lang_and_datatype(self) -> None: - with pytest.raises(TypeError): - Literal("foo", lang="en", datatype=URIRef("http://example.com/")) - - def test_cant_pass_invalid_lang(self) -> None: - with pytest.raises(ValueError): - Literal("foo", lang="999") - - def test_from_other_literal(self) -> None: - l = Literal(1) - l2 = Literal(l) - assert isinstance(l.value, int) - assert isinstance(l2.value, int) - - # change datatype - l = Literal("1") - l2 = Literal(l, datatype=rdflib.XSD.integer) - assert isinstance(l2.value, int) - - def test_datatype_gets_auto_uri_ref_conversion(self) -> None: - # drewp disapproves of this behavior, but it should be - # represented in the tests - x = Literal("foo", datatype="http://example.com/") - assert isinstance(x.datatype, URIRef) - - x = Literal("foo", datatype=Literal("pennies")) - assert x.datatype == URIRef("pennies") - - -class TestRepr: - def test_omits_missing_datatype_and_lang(self) -> None: - assert repr(Literal("foo")) == "rdflib.term.Literal('foo')" - - def test_omits_missing_datatype(self) -> None: - assert ( - repr(Literal("foo", lang="en")) == "rdflib.term.Literal('foo', lang='en')" - ) - - def test_omits_missing_lang(self) -> None: - assert ( - repr(Literal("foo", datatype=URIRef("http://example.com/"))) - == "rdflib.term.Literal('foo', datatype=rdflib.term.URIRef('http://example.com/'))" - ) - - def test_subclass_name_appears_in_repr(self) -> None: - class MyLiteral(Literal): - pass - - x = MyLiteral("foo") - assert repr(x) == "MyLiteral('foo')" - - -class TestDoubleOutput: - def test_no_dangling_point(self) -> None: - """confirms the fix for https://github.com/RDFLib/rdflib/issues/237""" - vv = Literal("0.88", datatype=_XSD_DOUBLE) - out = vv._literal_n3(use_plain=True) - assert out in ["8.8e-01", "0.88"], out - - -class TestParseBoolean: - """confirms the fix for https://github.com/RDFLib/rdflib/issues/913""" - - def test_true_boolean(self) -> None: - test_value = Literal("tRue", datatype=_XSD_BOOLEAN) - assert test_value.value - test_value = Literal("1", datatype=_XSD_BOOLEAN) - assert test_value.value - - def test_false_boolean(self) -> None: - test_value = Literal("falsE", datatype=_XSD_BOOLEAN) - assert test_value.value is False - test_value = Literal("0", datatype=_XSD_BOOLEAN) - assert test_value.value is False - - def test_non_false_boolean(self) -> None: - with pytest.warns( - UserWarning, - match=r"Parsing weird boolean, 'abcd' does not map to True or False", - ): - test_value = Literal("abcd", datatype=_XSD_BOOLEAN) - assert test_value.value is False - - with pytest.warns( - UserWarning, - match=r"Parsing weird boolean, '10' does not map to True or False", - ): - test_value = Literal("10", datatype=_XSD_BOOLEAN) - assert test_value.value is False - - -class TestBindings: - def test_binding(self, clear_bindings: None) -> None: - class a: - def __init__(self, v: str) -> None: - self.v = v[3:-3] - - def __str__(self) -> str: - return "<<<%s>>>" % self.v - - dtA = rdflib.URIRef("urn:dt:a") - bind(dtA, a) - - va = a("<<<2>>>") - la = Literal(va, normalize=True) - assert la.value == va - assert la.datatype == dtA - - la2 = Literal("<<<2>>>", datatype=dtA) - assert isinstance(la2.value, a) - assert la2.value.v == va.v - - class b: - def __init__(self, v: str) -> None: - self.v = v[3:-3] - - def __str__(self) -> str: - return "B%s" % self.v - - dtB = rdflib.URIRef("urn:dt:b") - bind(dtB, b, None, lambda x: "<<<%s>>>" % x) - - vb = b("<<<3>>>") - lb = Literal(vb, normalize=True) - assert lb.value == vb - assert lb.datatype == dtB - - def test_specific_binding(self, clear_bindings: None) -> None: - def lexify(s: str) -> str: - return "--%s--" % s - - def unlexify(s: str) -> str: - return s[2:-2] - - datatype = rdflib.URIRef("urn:dt:mystring") - - # Datatype-specific rule - bind(datatype, str, unlexify, lexify, datatype_specific=True) - - s = "Hello" - normal_l = Literal(s) - assert str(normal_l) == s - assert normal_l.toPython() == s - assert normal_l.datatype is None - - specific_l = Literal("--%s--" % s, datatype=datatype) - assert str(specific_l) == lexify(s) - assert specific_l.toPython() == s - assert specific_l.datatype == datatype - - -class TestXsdLiterals: - @pytest.mark.parametrize( - ["lexical", "literal_type", "value_cls"], - [ - # these literals do not get converted to Python types - ("ABCD", XSD.integer, None), - ("ABCD", XSD.gYear, None), - ("-10000", XSD.gYear, None), - ("-1921-00", XSD.gYearMonth, None), - ("1921-00", XSD.gMonthDay, None), - ("1921-13", XSD.gMonthDay, None), - ("-1921-00", XSD.gMonthDay, None), - ("10", XSD.gDay, None), - ("-1", XSD.gDay, None), - ("0000", XSD.gYear, None), - ("0000-00-00", XSD.date, None), - ("NOT A VALID HEX STRING", XSD.hexBinary, None), - ("NOT A VALID BASE64 STRING", XSD.base64Binary, None), - # these literals get converted to python types - ("1921-05-01", XSD.date, datetime.date), - ("1921-05-01T00:00:00", XSD.dateTime, datetime.datetime), - ("1921-05", XSD.gYearMonth, datetime.date), - ("0001-01", XSD.gYearMonth, datetime.date), - ("0001-12", XSD.gYearMonth, datetime.date), - ("2002-01", XSD.gYearMonth, datetime.date), - ("9999-01", XSD.gYearMonth, datetime.date), - ("9999-12", XSD.gYearMonth, datetime.date), - ("1921", XSD.gYear, datetime.date), - ("2000", XSD.gYear, datetime.date), - ("0001", XSD.gYear, datetime.date), - ("9999", XSD.gYear, datetime.date), - ("1982", XSD.gYear, datetime.date), - ("2002", XSD.gYear, datetime.date), - ("1921-05-01T00:00:00+00:30", XSD.dateTime, datetime.datetime), - ("1921-05-01T00:00:00-00:30", XSD.dateTime, datetime.datetime), - ("true", XSD.boolean, bool), - ("abcdef0123", XSD.hexBinary, bytes), - ("", XSD.hexBinary, bytes), - ("UkRGTGli", XSD.base64Binary, bytes), - ("", XSD.base64Binary, bytes), - ("0.0000000000000000000000000000001", XSD.decimal, Decimal), - ("0.1", XSD.decimal, Decimal), - ("1", XSD.integer, int), - ], - ) - def test_make_literals( - self, lexical: str, literal_type: URIRef, value_cls: Optional[type] - ) -> None: - """ - Tests literal construction. - """ - self.check_make_literals(lexical, literal_type, value_cls) - - @pytest.mark.parametrize( - ["lexical", "literal_type", "value_cls"], - [ - pytest.param(*params, marks=pytest.mark.xfail(raises=AssertionError)) - for params in [ - ("1921-01Z", XSD.gYearMonth, datetime.date), - ("1921Z", XSD.gYear, datetime.date), - ("1921-00", XSD.gYearMonth, datetime.date), - ("1921-05-01Z", XSD.date, datetime.date), - ("1921-05-01+00:30", XSD.date, datetime.date), - ("1921-05-01+00:30", XSD.date, datetime.date), - ("1921-05-01+00:00", XSD.date, datetime.date), - ("1921-05-01+00:00", XSD.date, datetime.date), - ("1921-05-01T00:00:00Z", XSD.dateTime, datetime.datetime), - ("1e-31", XSD.decimal, None), # This is not a valid decimal value - ] - ], + raise ValueError(f"invalid operation {op}") + logging.debug("result = %r", result) + if catcher is not None or expected_exception is not None: + assert catcher is not None + assert catcher.value is not None + if expected_exception is not None: + assert catcher.match(expected_exception.args[0]) + else: + assert isinstance(expected_result, Literal) + assert expected_result == result + + +@pytest.mark.parametrize( + "a_value, b_value, result_value, datatype", + [ + [3, 5, 2, XSD.integer], + [5.3, 8.5, 3.2, XSD.decimal], + [5.3, 8.5, 3.2, XSD.double], + [5.3, 8.5, 3.2, XSD.float], + # [XSD.byte")], + [3, 5, 2, XSD.int], + [5.3, 8.5, 3.2, XSD.long], + [-3, -5, -2, XSD.negativeInteger], + [3, 5, 2, XSD.nonNegativeInteger], + [-5.3, -8.5, -3.2, XSD.nonPositiveInteger], + [3, 5, 2, XSD.positiveInteger], + [3, 5, 2, XSD.short], + [0, 0, 0, XSD.unsignedByte], + [3, 5, 2, XSD.unsignedInt], + [5.3, 8.5, 3.2, XSD.unsignedLong], + [5.3, 8.5, 3.2, XSD.unsignedShort], + ], +) +def test_numeric_literals( + a_value: Union[int, float], + b_value: Union[int, float], + result_value: Union[int, float], + datatype: URIRef, +) -> None: + a = Literal(a_value, datatype=datatype) + b = Literal(b_value, datatype=datatype) + + result = b - a + expected = Literal(result_value, datatype=datatype) + assert result == expected, repr(result) + + +def test_cant_pass_lang_and_datatype() -> None: + with pytest.raises(TypeError): + Literal("foo", lang="en", datatype=URIRef("http://example.com/")) + + +def test_cant_pass_invalid_lang_int() -> None: + with pytest.raises(ValueError): + Literal("foo", lang="999") + + +def test_from_other_literal() -> None: + l = Literal(1) + l2 = Literal(l) + assert isinstance(l.value, int) + assert isinstance(l2.value, int) + + # change datatype + l = Literal("1") + l2 = Literal(l, datatype=rdflib.XSD.integer) + assert isinstance(l2.value, int) + + +def test_datatype_gets_auto_uri_ref_conversion() -> None: + # drewp disapproves of this behavior, but it should be + # represented in the tests + x = Literal("foo", datatype="http://example.com/") + assert isinstance(x.datatype, URIRef) + + x = Literal("foo", datatype=Literal("pennies")) + assert x.datatype == URIRef("pennies") + + +def test_omits_missing_datatype_and_lang() -> None: + assert repr(Literal("foo")) == "rdflib.term.Literal('foo')" + + +def test_omits_missing_datatype() -> None: + assert repr(Literal("foo", lang="en")) == "rdflib.term.Literal('foo', lang='en')" + + +def test_omits_missing_lang() -> None: + assert ( + repr(Literal("foo", datatype=URIRef("http://example.com/"))) + == "rdflib.term.Literal('foo', datatype=rdflib.term.URIRef('http://example.com/'))" ) - def test_make_literals_ki( - self, lexical: str, literal_type: URIRef, value_cls: Optional[type] - ) -> None: - """ - Known issues with literal construction. - """ - self.check_make_literals(lexical, literal_type, value_cls) - - @classmethod - def check_make_literals( - cls, lexical: str, literal_type: URIRef, value_cls: Optional[type] - ) -> None: - literal = Literal(lexical, datatype=literal_type) - if value_cls is not None: - assert isinstance(literal.value, value_cls) - else: - assert literal.value is None - assert lexical == f"{literal}" + + +def test_subclass_name_appears_in_repr() -> None: + class MyLiteral(Literal): + pass + + x = MyLiteral("foo") + assert repr(x) == "MyLiteral('foo')" + + +def test_no_dangling_point() -> None: + """confirms the fix for https://github.com/RDFLib/rdflib/issues/237""" + vv = Literal("0.88", datatype=_XSD_DOUBLE) + out = vv._literal_n3(use_plain=True) + assert out in ["8.8e-01", "0.88"], out + + +def test_true_boolean() -> None: + test_value = Literal("tRue", datatype=_XSD_BOOLEAN) + assert test_value.value + test_value = Literal("1", datatype=_XSD_BOOLEAN) + assert test_value.value + + +def test_false_boolean() -> None: + test_value = Literal("falsE", datatype=_XSD_BOOLEAN) + assert test_value.value is False + test_value = Literal("0", datatype=_XSD_BOOLEAN) + assert test_value.value is False + + +def test_non_false_boolean() -> None: + with pytest.warns( + UserWarning, + match=r"Parsing weird boolean, 'abcd' does not map to True or False", + ): + test_value = Literal("abcd", datatype=_XSD_BOOLEAN) + assert test_value.value is False + + with pytest.warns( + UserWarning, + match=r"Parsing weird boolean, '10' does not map to True or False", + ): + test_value = Literal("10", datatype=_XSD_BOOLEAN) + assert test_value.value is False + + +def test_binding(clear_bindings: None) -> None: + class a: + def __init__(self, v: str) -> None: + self.v = v[3:-3] + + def __str__(self) -> str: + return "<<<%s>>>" % self.v + + dtA = rdflib.URIRef("urn:dt:a") + bind(dtA, a) + + va = a("<<<2>>>") + la = Literal(va, normalize=True) + assert la.value == va + assert la.datatype == dtA + + la2 = Literal("<<<2>>>", datatype=dtA) + assert isinstance(la2.value, a) + assert la2.value.v == va.v + + class b: + def __init__(self, v: str) -> None: + self.v = v[3:-3] + + def __str__(self) -> str: + return "B%s" % self.v + + dtB = rdflib.URIRef("urn:dt:b") + bind(dtB, b, None, lambda x: "<<<%s>>>" % x) + + vb = b("<<<3>>>") + lb = Literal(vb, normalize=True) + assert lb.value == vb + assert lb.datatype == dtB + + +def test_specific_binding(clear_bindings: None) -> None: + def lexify(s: str) -> str: + return "--%s--" % s + + def unlexify(s: str) -> str: + return s[2:-2] + + datatype = rdflib.URIRef("urn:dt:mystring") + + # Datatype-specific rule + bind(datatype, str, unlexify, lexify, datatype_specific=True) + + s = "Hello" + normal_l = Literal(s) + assert str(normal_l) == s + assert normal_l.toPython() == s + assert normal_l.datatype is None + + specific_l = Literal("--%s--" % s, datatype=datatype) + assert str(specific_l) == lexify(s) + assert specific_l.toPython() == s + assert specific_l.datatype == datatype + + +@pytest.mark.parametrize( + ["lexical", "literal_type", "value_cls"], + [ + # these literals do not get converted to Python types + ("ABCD", XSD.integer, None), + ("ABCD", XSD.gYear, None), + ("-10000", XSD.gYear, None), + ("-1921-00", XSD.gYearMonth, None), + ("1921-00", XSD.gMonthDay, None), + ("1921-13", XSD.gMonthDay, None), + ("-1921-00", XSD.gMonthDay, None), + ("10", XSD.gDay, None), + ("-1", XSD.gDay, None), + ("0000", XSD.gYear, None), + ("0000-00-00", XSD.date, None), + ("NOT A VALID HEX STRING", XSD.hexBinary, None), + ("NOT A VALID BASE64 STRING", XSD.base64Binary, None), + # these literals get converted to python types + ("1921-05-01", XSD.date, datetime.date), + ("1921-05-01T00:00:00", XSD.dateTime, datetime.datetime), + ("1921-05", XSD.gYearMonth, datetime.date), + ("0001-01", XSD.gYearMonth, datetime.date), + ("0001-12", XSD.gYearMonth, datetime.date), + ("2002-01", XSD.gYearMonth, datetime.date), + ("9999-01", XSD.gYearMonth, datetime.date), + ("9999-12", XSD.gYearMonth, datetime.date), + ("1921", XSD.gYear, datetime.date), + ("2000", XSD.gYear, datetime.date), + ("0001", XSD.gYear, datetime.date), + ("9999", XSD.gYear, datetime.date), + ("1982", XSD.gYear, datetime.date), + ("2002", XSD.gYear, datetime.date), + ("1921-05-01T00:00:00+00:30", XSD.dateTime, datetime.datetime), + ("1921-05-01T00:00:00-00:30", XSD.dateTime, datetime.datetime), + ("true", XSD.boolean, bool), + ("abcdef0123", XSD.hexBinary, bytes), + ("", XSD.hexBinary, bytes), + ("UkRGTGli", XSD.base64Binary, bytes), + ("", XSD.base64Binary, bytes), + ("0.0000000000000000000000000000001", XSD.decimal, Decimal), + ("0.1", XSD.decimal, Decimal), + ("1", XSD.integer, int), + ] + + [ + pytest.param(*params, marks=pytest.mark.xfail(raises=AssertionError)) + for params in [ + ("1921-01Z", XSD.gYearMonth, datetime.date), + ("1921Z", XSD.gYear, datetime.date), + ("1921-00", XSD.gYearMonth, datetime.date), + ("1921-05-01Z", XSD.date, datetime.date), + ("1921-05-01+00:30", XSD.date, datetime.date), + ("1921-05-01+00:30", XSD.date, datetime.date), + ("1921-05-01+00:00", XSD.date, datetime.date), + ("1921-05-01+00:00", XSD.date, datetime.date), + ("1921-05-01T00:00:00Z", XSD.dateTime, datetime.datetime), + ("1e-31", XSD.decimal, None), # This is not a valid decimal value + ] + ], +) +def test_literal_construction_value_class( + lexical: str, literal_type: URIRef, value_cls: Optional[type] +) -> None: + literal = Literal(lexical, datatype=literal_type) + if value_cls is not None: + assert isinstance(literal.value, value_cls) + else: + assert literal.value is None + assert lexical == f"{literal}" def test_exception_in_converter( From e94c25245dda4986839c3b2de3d08e7087251dd1 Mon Sep 17 00:00:00 2001 From: WhiteGobo Date: Sat, 8 Jul 2023 22:38:46 +0200 Subject: [PATCH 113/114] fix: `GROUP_CONCAT` handling of empty separator (issue) (#2474) `GROUP_CONCAT` was handling an empty separator (i.e. `""`) incorrectly, it would handle it as if the separator were not set, so essentially it was treated as a single space (i.e. `" "`). This change fixes it so that an empty separator with `GROUP_CONCAT` results in a value with nothing between concatenated values. Fixes --------- Co-authored-by: WhiteGobo --- rdflib/plugins/sparql/aggregates.py | 9 +++++++-- test/test_sparql/test_translate_algebra.py | 23 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/rdflib/plugins/sparql/aggregates.py b/rdflib/plugins/sparql/aggregates.py index d4a7d6592..84ac8936d 100644 --- a/rdflib/plugins/sparql/aggregates.py +++ b/rdflib/plugins/sparql/aggregates.py @@ -245,11 +245,16 @@ def get_value(self) -> None: class GroupConcat(Accumulator): - def __init__(self, aggregation): + value: List[Literal] + + def __init__(self, aggregation: CompValue): super(GroupConcat, self).__init__(aggregation) # only GROUPCONCAT needs to have a list as accumulator self.value = [] - self.separator = aggregation.separator or " " + if aggregation.separator is None: + self.separator = " " + else: + self.separator = aggregation.separator def update(self, row: FrozenBindings, aggregator: "Aggregator") -> None: try: diff --git a/test/test_sparql/test_translate_algebra.py b/test/test_sparql/test_translate_algebra.py index 20b23327a..ca9e67bdf 100644 --- a/test/test_sparql/test_translate_algebra.py +++ b/test/test_sparql/test_translate_algebra.py @@ -11,6 +11,7 @@ import rdflib.plugins.sparql.algebra as algebra import rdflib.plugins.sparql.parser as parser +from rdflib import Graph, Literal, URIRef from rdflib.plugins.sparql.algebra import translateAlgebra @@ -304,3 +305,25 @@ def test_roundtrip(test_spec: AlgebraTest, data_path: Path) -> None: # TODO: Execute the raw query (query_text) and the reconstituted query # (query_from_query_from_algebra) against a well defined graph and ensure # they yield the same result. + + +def test_sparql_group_concat(): + """Tests if GROUP_CONCAT correctly uses the separator keyword""" + query = """ + PREFIX : + + SELECT ?subject (GROUP_CONCAT(?object; separator="") + AS ?concatenatedObjects) + WHERE { + VALUES (?subject ?object) { + (:pred "a") + (:pred "b") + (:pred "c") + } + } + GROUP BY ?subject + """ + + g = Graph() + q = dict(g.query(query)) + assert q[URIRef("http://example.org/pred")] == Literal("abc") From 9173357d716f627c3fe63495f61bec9a0eddf7eb Mon Sep 17 00:00:00 2001 From: WhiteGobo Date: Wed, 5 Jul 2023 22:03:14 +0200 Subject: [PATCH 114/114] test: add helper for parameterized outcome tests Add an abstract class, `test.until.outcome.OutcomeChecker`, and implementations that make it easier to check for specific outcomes in parameterized tests. Also changed existing tests that use similar patterns to use `OutcomeChecker` instead. --- test/test_graph/test_graph.py | 23 +- test/test_literal/test_literal.py | 60 ++---- test/test_misc/test_input_source.py | 13 +- test/test_misc/test_networking_redirect.py | 5 +- test/test_namespace/test_namespace.py | 23 +- test/test_namespace/test_namespacemanager.py | 79 +++---- test/test_sparql/test_prefixed_name.py | 21 +- test/test_sparql/test_service.py | 32 +-- test/utils/exceptions.py | 57 ----- test/utils/literal.py | 5 +- test/utils/outcome.py | 213 +++++++++++++++++++ test/utils/test/test_outcome.py | 70 ++++++ 12 files changed, 355 insertions(+), 246 deletions(-) delete mode 100644 test/utils/exceptions.py create mode 100644 test/utils/outcome.py create mode 100644 test/utils/test/test_outcome.py diff --git a/test/test_graph/test_graph.py b/test/test_graph/test_graph.py index 289d577ab..cf5c88eef 100644 --- a/test/test_graph/test_graph.py +++ b/test/test_graph/test_graph.py @@ -1,13 +1,12 @@ # -*- coding: utf-8 -*- import logging import os -from contextlib import ExitStack from pathlib import Path from test.data import TEST_DATA_DIR, bob, cheese, hates, likes, michel, pizza, tarek from test.utils import GraphHelper, get_unique_plugin_names -from test.utils.exceptions import ExceptionChecker from test.utils.httpfileserver import HTTPFileServer, ProtoFileResource -from typing import Callable, Optional, Set, Tuple, Union +from test.utils.outcome import ExceptionChecker, OutcomeChecker, OutcomePrimitive +from typing import Callable, Optional, Set, Tuple from urllib.error import HTTPError, URLError import pytest @@ -373,7 +372,7 @@ def test_guess_format_for_parse_http( http_file_server: HTTPFileServer, file: Path, content_type: Optional[str], - expected_result: Union[int, ExceptionChecker], + expected_result: OutcomePrimitive[int], ) -> None: graph = make_graph() headers: Tuple[Tuple[str, str], ...] = tuple() @@ -384,21 +383,11 @@ def test_guess_format_for_parse_http( ProtoFileResource(headers, file), suffix=f"/{file.name}", ) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - + checker = OutcomeChecker.from_primitive(expected_result) assert 0 == len(graph) - with ExitStack() as exit_stack: - if isinstance(expected_result, ExceptionChecker): - catcher = exit_stack.enter_context(pytest.raises(expected_result.type)) + with checker.context(): graph.parse(location=file_info.request_url) - - if catcher is not None: - # assert catcher.value is not None - assert isinstance(expected_result, ExceptionChecker) - logging.debug("graph = %s", list(graph.triples((None, None, None)))) - else: - assert isinstance(expected_result, int) - assert expected_result == len(graph) + checker.check(len(graph)) def test_parse_file_uri(make_graph: GraphFactory): diff --git a/test/test_literal/test_literal.py b/test/test_literal/test_literal.py index 2c78c11b7..51f504a14 100644 --- a/test/test_literal/test_literal.py +++ b/test/test_literal/test_literal.py @@ -9,11 +9,11 @@ import datetime import logging -from contextlib import ExitStack from decimal import Decimal from test.utils import affix_tuples from test.utils.literal import LiteralChecker -from typing import Any, Callable, Generator, Iterable, Optional, Type, Union +from test.utils.outcome import OutcomeChecker, OutcomePrimitive, OutcomePrimitives +from typing import Any, Callable, Generator, Optional, Type, Union import isodate import pytest @@ -614,16 +614,10 @@ def test_literal_addsub( a: Literal, b: Literal, op: str, - expected_result: Union[Literal, Type[Exception], Exception], + expected_result: OutcomePrimitive[Literal], ) -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - expected_exception: Optional[Exception] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(expected_result)) - elif isinstance(expected_result, Exception): - expected_exception = expected_result - catcher = xstack.enter_context(pytest.raises(type(expected_exception))) + checker = OutcomeChecker[Literal].from_primitive(expected_result) + with checker.context(): if op == "aplusb": result = a + b @@ -636,14 +630,7 @@ def test_literal_addsub( else: raise ValueError(f"invalid operation {op}") logging.debug("result = %r", result) - if catcher is not None or expected_exception is not None: - assert catcher is not None - assert catcher.value is not None - if expected_exception is not None: - assert catcher.match(expected_exception.args[0]) - else: - assert isinstance(expected_result, Literal) - assert expected_result == result + checker.check(result) @pytest.mark.parametrize( @@ -930,7 +917,7 @@ def unlexify(s: str) -> str: @pytest.mark.parametrize( - ["literal_maker", "checks"], + ["literal_maker", "outcome"], [ ( lambda: Literal("foo"), @@ -969,32 +956,9 @@ def unlexify(s: str) -> str: ) def test_literal_construction( literal_maker: Callable[[], Literal], - checks: Union[ - Iterable[Union[LiteralChecker, Literal]], - LiteralChecker, - Literal, - Type[Exception], - ], + outcome: OutcomePrimitives[Literal], ) -> None: - check_error: Optional[Type[Exception]] = None - if isinstance(checks, type) and issubclass(checks, Exception): - check_error = checks - checks = [] - elif not isinstance(checks, Iterable): - checks = [checks] - - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if check_error is not None: - catcher = xstack.enter_context(pytest.raises(check_error)) - literal = literal_maker() - - if check_error is not None: - assert catcher is not None - assert catcher.value is not None - - for check in checks: - if isinstance(check, LiteralChecker): - check.check(literal) - else: - check = literal + checker = OutcomeChecker[Literal].from_primitives(outcome) + with checker.context(): + actual_outcome = literal_maker() + checker.check(actual_outcome) diff --git a/test/test_misc/test_input_source.py b/test/test_misc/test_input_source.py index 90e6e238a..2280bcd5e 100644 --- a/test/test_misc/test_input_source.py +++ b/test/test_misc/test_input_source.py @@ -7,11 +7,8 @@ import re from contextlib import ExitStack, contextmanager from dataclasses import dataclass - -# from itertools import product from pathlib import Path from test.utils import GraphHelper -from test.utils.exceptions import ExceptionChecker from test.utils.httpfileserver import ( HTTPFileInfo, HTTPFileServer, @@ -19,6 +16,7 @@ ProtoFileResource, ProtoRedirectResource, ) +from test.utils.outcome import ExceptionChecker from typing import ( # Callable, IO, BinaryIO, @@ -648,9 +646,7 @@ def test_create_input_source( input_source: Optional[InputSource] = None with ExitStack() as xstack: if isinstance(test_params.expected_result, ExceptionChecker): - catcher = xstack.enter_context( - pytest.raises(test_params.expected_result.type) - ) + catcher = xstack.enter_context(test_params.expected_result.context()) input_source = xstack.enter_context( call_create_input_source( @@ -670,8 +666,3 @@ def test_create_input_source( ) logging.debug("input_source = %s, catcher = %s", input_source, catcher) - - if isinstance(test_params.expected_result, ExceptionChecker): - assert catcher is not None - assert input_source is None - test_params.expected_result.check(catcher.value) diff --git a/test/test_misc/test_networking_redirect.py b/test/test_misc/test_networking_redirect.py index acde10d71..a1c0cf98b 100644 --- a/test/test_misc/test_networking_redirect.py +++ b/test/test_misc/test_networking_redirect.py @@ -1,7 +1,7 @@ from contextlib import ExitStack from copy import deepcopy -from test.utils.exceptions import ExceptionChecker from test.utils.http import headers_as_message as headers_as_message +from test.utils.outcome import ExceptionChecker from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union from urllib.error import HTTPError from urllib.request import HTTPRedirectHandler, Request @@ -197,14 +197,13 @@ def test_make_redirect_request( result: Optional[Request] = None with ExitStack() as stack: if isinstance(expected_result, ExceptionChecker): - catcher = stack.enter_context(pytest.raises(expected_result.type)) + catcher = stack.enter_context(expected_result.context()) elif expected_result is RaisesIdentity: catcher = stack.enter_context(pytest.raises(HTTPError)) result = _make_redirect_request(http_request, http_error) if isinstance(expected_result, ExceptionChecker): assert catcher is not None - expected_result.check(catcher.value) elif isinstance(expected_result, type): assert catcher is not None assert http_error is catcher.value diff --git a/test/test_namespace/test_namespace.py b/test/test_namespace/test_namespace.py index 3f439133c..00668127c 100644 --- a/test/test_namespace/test_namespace.py +++ b/test/test_namespace/test_namespace.py @@ -1,5 +1,5 @@ -from contextlib import ExitStack -from typing import Any, Optional, Type, Union +from test.utils.outcome import OutcomeChecker, OutcomePrimitive +from typing import Any, Optional from warnings import warn import pytest @@ -306,22 +306,15 @@ def test_expand_curie_exception_messages(self) -> None: ], ) def test_expand_curie( - self, curie: Any, expected_result: Union[Type[Exception], URIRef, None] + self, curie: Any, expected_result: OutcomePrimitive[URIRef] ) -> None: g = Graph(bind_namespaces="none") nsm = g.namespace_manager nsm.bind("ex", "urn:example:") + + checker = OutcomeChecker.from_primitive(expected_result) + result: Optional[URIRef] = None - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass( - expected_result, Exception - ): - catcher = xstack.enter_context(pytest.raises(expected_result)) + with checker.context(): result = g.namespace_manager.expand_curie(curie) - - if catcher is not None: - assert result is None - assert catcher.value is not None - else: - assert expected_result == result + checker.check(result) diff --git a/test/test_namespace/test_namespacemanager.py b/test/test_namespace/test_namespacemanager.py index a35f3ac63..bfeb5326c 100644 --- a/test/test_namespace/test_namespacemanager.py +++ b/test/test_namespace/test_namespacemanager.py @@ -1,11 +1,10 @@ from __future__ import annotations import logging -import re import sys from contextlib import ExitStack from pathlib import Path -from test.utils.exceptions import ExceptionChecker +from test.utils.outcome import ExceptionChecker, OutcomeChecker, OutcomePrimitive from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Set, Tuple, Type, Union import pytest @@ -374,7 +373,7 @@ def test_compute_qname( manager_prefixes: Optional[Mapping[str, Namespace]], graph_prefixes: Optional[Mapping[str, Namespace]], store_prefixes: Optional[Mapping[str, Namespace]], - expected_result: Union[Tuple[str, URIRef, str], Type[Exception], Exception], + expected_result: OutcomePrimitive[Tuple[str, URIRef, str]], ) -> None: """ :param uri: argument to compute_qname() @@ -403,25 +402,13 @@ def test_compute_qname( nm.bind(prefix, ns) def check() -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass( - expected_result, Exception - ): - catcher = xstack.enter_context(pytest.raises(expected_result)) - if isinstance(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(type(expected_result))) + checker = OutcomeChecker[Tuple[str, URIRef, str]].from_primitive( + expected_result + ) + with checker.context(): actual_result = nm.compute_qname(uri, generate) logging.debug("actual_result = %s", actual_result) - if catcher is not None: - assert catcher is not None - assert catcher.value is not None - if isinstance(expected_result, Exception): - assert re.match(expected_result.args[0], f"{catcher.value}") - else: - assert isinstance(expected_result, tuple) - assert isinstance(actual_result, tuple) - assert actual_result == expected_result + checker.check(actual_result) check() # Run a second time to check caching @@ -452,7 +439,7 @@ def test_compute_qname_strict( generate: bool, bind_namespaces: _NamespaceSetString, additional_prefixes: Optional[Mapping[str, Namespace]], - expected_result: Union[Tuple[str, URIRef, str], Type[Exception], Exception], + expected_result: OutcomePrimitive[Tuple[str, str, str]], ) -> None: graph = Graph(bind_namespaces=bind_namespaces) nm = graph.namespace_manager @@ -462,25 +449,11 @@ def test_compute_qname_strict( nm.bind(prefix, ns) def check() -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass( - expected_result, Exception - ): - catcher = xstack.enter_context(pytest.raises(expected_result)) - if isinstance(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(type(expected_result))) + checker = OutcomeChecker[Tuple[str, str, str]].from_primitive(expected_result) + with checker.context(): actual_result = nm.compute_qname_strict(uri, generate) logging.debug("actual_result = %s", actual_result) - if catcher is not None: - assert catcher is not None - assert catcher.value is not None - if isinstance(expected_result, Exception): - assert re.match(expected_result.args[0], f"{catcher.value}") - else: - assert isinstance(expected_result, tuple) - assert isinstance(actual_result, tuple) - assert actual_result == expected_result + checker.check(actual_result) check() # Run a second time to check caching @@ -538,16 +511,15 @@ def test_nsm_function() -> NamespaceManager: def test_expand_curie( test_nsm_session: NamespaceManager, curie: str, - expected_result: Union[ExceptionChecker, str], + expected_result: OutcomePrimitive[str], ) -> None: nsm = test_nsm_session - with ExitStack() as xstack: - if isinstance(expected_result, ExceptionChecker): - xstack.enter_context(expected_result) - result = nsm.expand_curie(curie) - - if not isinstance(expected_result, ExceptionChecker): - assert URIRef(expected_result) == result + if isinstance(expected_result, str): + expected_result = URIRef(expected_result) + checker = OutcomeChecker[str].from_primitive(expected_result) + with checker.context(): + actual_result = nsm.expand_curie(curie) + checker.check(actual_result) @pytest.mark.parametrize( @@ -578,7 +550,7 @@ def test_generate_curie( test_nsm_function: NamespaceManager, uri: str, generate: Optional[bool], - expected_result: Union[ExceptionChecker, str], + expected_result: OutcomePrimitive[str], ) -> None: """ .. note:: @@ -587,13 +559,10 @@ def test_generate_curie( effects and will modify the namespace manager. """ nsm = test_nsm_function - with ExitStack() as xstack: - if isinstance(expected_result, ExceptionChecker): - xstack.enter_context(expected_result) + checker = OutcomeChecker[str].from_primitive(expected_result) + with checker.context(): if generate is None: - result = nsm.curie(uri) + actual_result = nsm.curie(uri) else: - result = nsm.curie(uri, generate=generate) - - if not isinstance(expected_result, ExceptionChecker): - assert expected_result == result + actual_result = nsm.curie(uri, generate=generate) + checker.check(actual_result) diff --git a/test/test_sparql/test_prefixed_name.py b/test/test_sparql/test_prefixed_name.py index 9ac37b281..e1976dac1 100644 --- a/test/test_sparql/test_prefixed_name.py +++ b/test/test_sparql/test_prefixed_name.py @@ -2,8 +2,7 @@ import itertools import logging -from contextlib import ExitStack -from typing import Optional, Type, Union +from test.utils.outcome import OutcomeChecker, OutcomePrimitive import pyparsing import pytest @@ -11,7 +10,7 @@ import rdflib from rdflib import Graph from rdflib.namespace import Namespace -from rdflib.term import URIRef +from rdflib.term import Node, URIRef RESERVED_PCHARS = [ "%20", @@ -101,17 +100,15 @@ def blank_graph() -> Graph: def test_pnames( pname_ns: str, pname: str, - expected_result: Union[URIRef, Type[Exception]], + expected_result: OutcomePrimitive[Node], blank_graph: Graph, ) -> None: """ The given pname produces the expected result. """ - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + checker = OutcomeChecker[Node].from_primitive(expected_result) - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(expected_result)) + with checker.context(): query_string = f"""\ PREFIX {pname_ns}: <{PNAME_PREFIX}> @@ -127,10 +124,4 @@ def test_pnames( triple = triples[0] result = triple[2] logging.debug("result = %s", result) - - if catcher is not None: - assert isinstance(catcher, pytest.ExceptionInfo) - assert catcher.value is not None - else: - assert isinstance(expected_result, URIRef) - assert expected_result == result + checker.check(result) diff --git a/test/test_sparql/test_service.py b/test/test_sparql/test_service.py index 61c317ac6..ef75a8b5d 100644 --- a/test/test_sparql/test_service.py +++ b/test/test_sparql/test_service.py @@ -1,19 +1,9 @@ import json -from contextlib import ExitStack from test.utils import helper from test.utils.http import MethodName, MockHTTPResponse from test.utils.httpservermock import ServedBaseHTTPServerMock -from typing import ( - Dict, - FrozenSet, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - Union, -) +from test.utils.outcome import OutcomeChecker +from typing import Dict, FrozenSet, List, Mapping, Sequence, Tuple, Type, Union import pytest @@ -337,19 +327,15 @@ def test_with_mock( # dependent on the size of the service query. function_httpmock.responses[MethodName.GET].append(mock_response) function_httpmock.responses[MethodName.POST].append(mock_response) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(expected_result)) - else: - expected_bindings = [{Variable("var"): item} for item in expected_result] + checker = OutcomeChecker[Sequence[Mapping[Variable, Identifier]]].from_primitive( + [{Variable("var"): item} for item in expected_result] + if isinstance(expected_result, List) + else expected_result + ) + with checker.context(): bindings = graph.query(query).bindings - if catcher is not None: - assert catcher is not None - assert catcher.value is not None - else: - assert expected_bindings == bindings + checker.check(bindings) if __name__ == "__main__": diff --git a/test/utils/exceptions.py b/test/utils/exceptions.py deleted file mode 100644 index 94cfd9c29..000000000 --- a/test/utils/exceptions.py +++ /dev/null @@ -1,57 +0,0 @@ -from __future__ import annotations - -import logging -import re -from dataclasses import dataclass -from types import TracebackType -from typing import Any, ContextManager, Dict, Optional, Pattern, Type, Union - -import pytest -from pytest import ExceptionInfo - - -@dataclass -class ExceptionChecker(ContextManager[ExceptionInfo[Exception]]): - type: Type[Exception] - pattern: Optional[Union[Pattern[str], str]] = None - attributes: Optional[Dict[str, Any]] = None - - def __post_init__(self) -> None: - self._catcher = pytest.raises(self.type, match=self.pattern) - self._exception_info: Optional[ExceptionInfo[Exception]] = None - - def _check_attributes(self, exception: Exception) -> None: - if self.attributes is not None: - for key, value in self.attributes.items(): - logging.debug("checking exception attribute %s=%r", key, value) - assert hasattr(exception, key) - assert getattr(exception, key) == value - - def check(self, exception: Exception) -> None: - logging.debug("checking exception %s/%r", type(exception), exception) - pattern = self.pattern - if pattern is not None and not isinstance(pattern, re.Pattern): - pattern = re.compile(pattern) - try: - assert isinstance(exception, self.type) - if pattern is not None: - assert pattern.match(f"{exception}") - self._check_attributes(exception) - except Exception: - logging.error("problem checking exception", exc_info=exception) - raise - - def __enter__(self) -> ExceptionInfo[Exception]: - self._exception_info = self._catcher.__enter__() - return self._exception_info - - def __exit__( - self, - __exc_type: Optional[Type[BaseException]], - __exc_value: Optional[BaseException], - __traceback: Optional[TracebackType], - ) -> bool: - result = self._catcher.__exit__(__exc_type, __exc_value, __traceback) - if self._exception_info is not None: - self._check_attributes(self._exception_info.value) - return result diff --git a/test/utils/literal.py b/test/utils/literal.py index 1b3f37988..b4b8cbf43 100644 --- a/test/utils/literal.py +++ b/test/utils/literal.py @@ -2,13 +2,14 @@ import builtins from dataclasses import dataclass +from test.utils.outcome import NoExceptionChecker from typing import Any, Union from rdflib.term import Literal, URIRef -@dataclass -class LiteralChecker: +@dataclass(frozen=True) +class LiteralChecker(NoExceptionChecker[Literal]): value: Union[builtins.ellipsis, Any] = ... language: Union[builtins.ellipsis, str, None] = ... datatype: Union[builtins.ellipsis, URIRef, None] = ... diff --git a/test/utils/outcome.py b/test/utils/outcome.py new file mode 100644 index 000000000..82a96138c --- /dev/null +++ b/test/utils/outcome.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +import abc +import contextlib +import logging +from collections.abc import Iterable as IterableABC +from dataclasses import dataclass +from typing import ( + Any, + Callable, + Dict, + Generator, + Generic, + Iterable, + NoReturn, + Optional, + Pattern, + Sequence, + Type, + TypeVar, + Union, + cast, +) + +import pytest +from pytest import ExceptionInfo + +AnyT = TypeVar("AnyT") + +OutcomePrimitive = Union[ + AnyT, Callable[[AnyT], None], "OutcomeChecker[AnyT]", Type[Exception], Exception +] + +OutcomePrimitives = Union[ + Iterable[Union[AnyT, Callable[[AnyT], None], "OutcomeChecker[AnyT]"]], + OutcomePrimitive, +] + + +class OutcomeChecker(abc.ABC, Generic[AnyT]): + """ + Validates expected outcomes for tests. + + Useful for parameterized test that can result in values or + exceptions. + """ + + @abc.abstractmethod + def check(self, actual: AnyT) -> None: + """ + Check the actual outcome against the expectation. + + This should run inside the checker's context. + + :param outcome: The actual outcome of the test. + :raises AssertionError: If the outcome does not match the + expectation. + :raises RuntimeError: If this method is called when no outcome + is expected. + """ + ... + + @contextlib.contextmanager + @abc.abstractmethod + def context(self) -> Generator[Optional[ExceptionInfo[Exception]], None, None]: + """ + The context in which the test code should run. + + This is necessary for checking exception outcomes. + + :return: A context manager that yields the exception info for + any exceptions that were raised in this context. + :raises AssertionError: If the test does not raise an exception + when one is expected, or if the exception does not match the + expectation. + """ + ... + + @classmethod + def from_primitive( + cls, + primitive: OutcomePrimitive[AnyT], + ) -> OutcomeChecker[AnyT]: + checker = cls._from_special(primitive) + if checker is not None: + return checker + return ValueChecker(cast(AnyT, primitive)) + + @classmethod + def _from_special( + cls, + primitive: Union[ + AnyT, + Callable[[AnyT], None], + OutcomeChecker[AnyT], + Type[Exception], + Exception, + ], + ) -> Optional[OutcomeChecker[AnyT]]: + if isinstance(primitive, OutcomeChecker): + return primitive + if isinstance(primitive, type) and issubclass(primitive, Exception): + return ExceptionChecker(primitive) + if isinstance(primitive, Exception): + return ExceptionChecker(type(primitive), match=primitive.args[0]) + if callable(primitive): + return CallableChecker(cast(Callable[[AnyT], None], primitive)) + return None + + @classmethod + def from_primitives( + cls, + primitives: OutcomePrimitives[AnyT], + ) -> OutcomeChecker[AnyT]: + checker = cls._from_special(primitives) # type: ignore[arg-type] + if checker is not None: + return checker + if isinstance(primitives, IterableABC) and not isinstance( + primitives, (str, bytes) + ): + primitives = iter(primitives) + return AggregateChecker([cls.from_primitive(p) for p in primitives]) + return ValueChecker(cast(AnyT, primitives)) + + +@dataclass(frozen=True) +class NoExceptionChecker(OutcomeChecker[AnyT]): + """ + Base class for checkers that do not expect exceptions. + """ + + @contextlib.contextmanager + def context(self) -> Generator[None, None, None]: + yield None + + +@dataclass(frozen=True) +class AggregateChecker(NoExceptionChecker[AnyT]): + """ + Validates that the outcome matches all of the given checkers. + """ + + checkers: Sequence[OutcomeChecker[AnyT]] + + def check(self, actual: AnyT) -> None: + for checker in self.checkers: + if isinstance(checker, ExceptionChecker): + raise ValueError( + "AggregateChecker should never contain ExceptionChecker" + ) + checker.check(actual) + + +@dataclass(frozen=True) +class ValueChecker(NoExceptionChecker[AnyT]): + """ + Validates that the outcome is a specific value. + + :param value: The expected value. + """ + + expected: AnyT + + def check(self, actual: AnyT) -> None: + assert self.expected == actual + + +@dataclass(frozen=True) +class CallableChecker(NoExceptionChecker[AnyT]): + """ + Validates the outcome with a callable. + + :param callable: The callable that will be called with the outcome + to validate it. + """ + + callable: Callable[[AnyT], None] + + def check(self, actual: AnyT) -> None: + self.callable(actual) + + +@dataclass(frozen=True) +class ExceptionChecker(OutcomeChecker[AnyT]): + """ + Validates that the outcome is a specific exception. + + :param type: The expected exception type. + :param match: A regular expression or string that the exception + message must match. + :param attributes: A dictionary of attributes that the exception + must have and their expected values. + """ + + type: Type[Exception] + match: Optional[Union[Pattern[str], str]] = None + attributes: Optional[Dict[str, Any]] = None + + def check(self, actual: AnyT) -> NoReturn: + raise RuntimeError("ExceptionResult.check_result should never be called") + + def _check_attributes(self, exception: Exception) -> None: + if self.attributes is not None: + for key, value in self.attributes.items(): + logging.debug("checking exception attribute %s=%r", key, value) + assert hasattr(exception, key) + assert getattr(exception, key) == value + + @contextlib.contextmanager + def context(self) -> Generator[ExceptionInfo[Exception], None, None]: + with pytest.raises(self.type, match=self.match) as catcher: + yield catcher + self._check_attributes(catcher.value) diff --git a/test/utils/test/test_outcome.py b/test/utils/test/test_outcome.py new file mode 100644 index 000000000..56a730052 --- /dev/null +++ b/test/utils/test/test_outcome.py @@ -0,0 +1,70 @@ +from contextlib import ExitStack +from test.utils.outcome import ExceptionChecker, OutcomeChecker +from typing import Any, Callable, NoReturn, Optional, Type, Union + +import pytest + + +def _raise( + what: Union[Type[Exception], Callable[..., Exception]], + *args: Any, + **kwargs: Any, +) -> NoReturn: + if isinstance(what, type) and issubclass(what, Exception): + raise what(*args, **kwargs) + elif callable(what): + raise what(*args, **kwargs) + + +@pytest.mark.parametrize( + ("action", "checker", "expected_exception"), + [ + (lambda: _raise(ValueError), ExceptionChecker(ValueError), None), + (None, ExceptionChecker(ValueError), RuntimeError), + ( + lambda: _raise(ValueError, "zzz"), + OutcomeChecker.from_primitive(ValueError(r"z.z")), + None, + ), + ( + lambda: _raise(ValueError, "zzz"), + OutcomeChecker.from_primitive(ValueError(r"zaz")), + AssertionError, + ), + ( + lambda: _raise(ValueError, "ae"), + ExceptionChecker(ValueError, r"ae", {"Not": "Found"}), + AssertionError, + ), + (33, OutcomeChecker.from_primitive(33), None), + (33, OutcomeChecker.from_primitive(44), AssertionError), + ( + lambda: _raise(TypeError, "something"), + OutcomeChecker.from_primitive(TypeError), + None, + ), + ( + lambda: 3, + OutcomeChecker.from_primitive(TypeError), + RuntimeError, + ), + ], +) +def test_checker( + action: Union[Callable[[], Any], Any], + checker: ExceptionChecker, + expected_exception: Optional[Type[BaseException]], +) -> None: + """ + Given the action, the checker raises the expected exception, or does + not raise anything if ``expected_exception`` is None. + """ + with ExitStack() as xstack: + if expected_exception is not None: + xstack.enter_context(pytest.raises(expected_exception)) + with checker.context(): + if callable(action): + actual_result = action() + else: + actual_result = action + checker.check(actual_result)