diff --git a/.vscode/cspell.json b/.vscode/cspell.json index 1f6dfc986aa7..8285dbb6881f 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -469,6 +469,10 @@ "filename": "sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_query_text*.py", "words": ["groovin", "Olufsen"] }, + { + "filename": "sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/**/_serialization.py", + "words": ["ctxt", "wday", "mday", "astimezone", "unflattened", "JSONify", "deseralize"] + }, { "filename": "sdk/cognitiveservices/azure-cognitiveservices-language-spellcheck/tests/*.py", "words": ["Apim", "cognituve"] diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/CHANGELOG.md b/sdk/cognitivelanguage/azure-ai-language-questionanswering/CHANGELOG.md index 909e06590023..68495932cc63 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/CHANGELOG.md +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/CHANGELOG.md @@ -5,6 +5,8 @@ ### Features Added ### Breaking Changes +* `QuestionAnsweringProjectsClient` was renamed to `QuestionAnsweringAuthoringClient`. +* The `azure.ai.language.questionanswering.projects` namespace was renamed to `azure.ai.language.questionanswering.authoring` ### Bugs Fixed diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/README.md b/sdk/cognitivelanguage/azure-ai-language-questionanswering/README.md index c5f60cfffdae..70b8072df2d2 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/README.md +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/README.md @@ -28,7 +28,7 @@ pip install azure-ai-language-questionanswering --pre ### Authenticate the client -In order to interact with the Question Answering service, you'll need to create an instance of the [QuestionAnsweringClient][questionanswering_client_class] class or an instance of the [QuestionAnsweringProjectsClient][questionansweringprojects_client_class] for managing projects within your resource. You will need an **endpoint**, and an **API key** to instantiate a client object. For more information regarding authenticating with Cognitive Services, see [Authenticate requests to Azure Cognitive Services][cognitive_auth]. +In order to interact with the Question Answering service, you'll need to create an instance of the [QuestionAnsweringClient][questionanswering_client_class] class or an instance of the [QuestionAnsweringAuthoringClient][questionansweringauthoring_client_class] for managing projects within your resource. You will need an **endpoint**, and an **API key** to instantiate a client object. For more information regarding authenticating with Cognitive Services, see [Authenticate requests to Azure Cognitive Services][cognitive_auth]. #### Get an API key @@ -54,17 +54,17 @@ credential = AzureKeyCredential("{api-key}") client = QuestionAnsweringClient(endpoint, credential) ``` -#### Create QuestionAnsweringProjectsClient -With your endpoint and API key, you can instantiate a [QuestionAnsweringProjectsClient][questionansweringprojects_client_class]: +#### Create QuestionAnsweringAuthoringClient +With your endpoint and API key, you can instantiate a [QuestionAnsweringAuthoringClient][questionansweringauthoring_client_class]: ```python from azure.core.credentials import AzureKeyCredential -from azure.ai.language.questionanswering.projects import QuestionAnsweringProjectsClient +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient endpoint = "https://{myaccount}.api.cognitive.microsoft.com" credential = AzureKeyCredential("{api-key}") -client = QuestionAnsweringProjectsClient(endpoint, credential) +client = QuestionAnsweringAuthoringClient(endpoint, credential) ``` #### Create a client with an Azure Active Directory Credential @@ -105,8 +105,8 @@ client = QuestionAnsweringClient(endpoint="https://.cogniti The [QuestionAnsweringClient][questionanswering_client_class] is the primary interface for asking questions using a knowledge base with your own information, or text input using pre-trained models. For asynchronous operations, an async `QuestionAnsweringClient` is in the `azure.ai.language.questionanswering.aio` namespace. -### QuestionAnsweringProjectsClient -The [QuestionAnsweringProjectsClient][questionansweringprojects_client_class] provides an interface for managing Question Answering projects. Examples of the available operations include creating and deploying projects, updating your knowledge sources, and updating question and answer pairs. It provides both synchronous and asynchronous APIs. +### QuestionAnsweringAuthoringClient +The [QuestionAnsweringAuthoringClient][questionansweringauthoring_client_class] provides an interface for managing Question Answering projects. Examples of the available operations include creating and deploying projects, updating your knowledge sources, and updating question and answer pairs. It provides both synchronous and asynchronous APIs. ## Examples @@ -175,21 +175,21 @@ output = await client.get_answers( ) ``` -### QuestionAnsweringProjectsClient +### QuestionAnsweringAuthoringClient #### Create a new project ```python import os from azure.core.credentials import AzureKeyCredential -from azure.ai.language.questionanswering.projects import QuestionAnsweringProjectsClient +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient # get service secrets endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] key = os.environ["AZURE_QUESTIONANSWERING_KEY"] # create client -client = QuestionAnsweringProjectsClient(endpoint, AzureKeyCredential(key)) +client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) with client: # create project @@ -335,7 +335,7 @@ This project has adopted the [Microsoft Open Source Code of Conduct][code_of_con [azure_core_readme]: https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/core/azure-core/README.md [pip_link]: https://pypi.org/project/pip/ [questionanswering_client_class]: https://azuresdkdocs.blob.core.windows.net/$web/python/azure-ai-language-questionanswering/latest/azure.ai.language.questionanswering.html#azure.ai.language.questionanswering.QuestionAnsweringClient -[questionansweringprojects_client_class]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_question_answering_projects_client.py +[questionansweringauthoring_client_class]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_client.py [questionanswering_refdocs_prompts]: https://azuresdkdocs.blob.core.windows.net/$web/python/azure-ai-language-questionanswering/latest/azure.ai.language.questionanswering.models.html#azure.ai.language.questionanswering.models.KnowledgeBaseAnswerDialog [questionanswering_client_src]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cognitivelanguage/azure-ai-language-questionanswering/ [questionanswering_docs]: https://azure.microsoft.com/services/cognitive-services/qna-maker/ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/__init__.py index 58be7b78a585..71784be05c81 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/__init__.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/__init__.py @@ -6,13 +6,16 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._question_answering_client import QuestionAnsweringClient +from ._client import QuestionAnsweringClient from ._version import VERSION __version__ = VERSION -from ._patch import __all__ as _patch_all -from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = ["QuestionAnsweringClient"] diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_question_answering_client.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_client.py similarity index 93% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_question_answering_client.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_client.py index 37c12b0717ca..bb845a6d3173 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_question_answering_client.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_client.py @@ -9,8 +9,6 @@ from copy import deepcopy from typing import Any -from msrest import Deserializer, Serializer - from azure.core import PipelineClient from azure.core.credentials import AzureKeyCredential from azure.core.rest import HttpRequest, HttpResponse @@ -18,9 +16,12 @@ from . import models from ._configuration import QuestionAnsweringClientConfiguration from ._operations import QuestionAnsweringClientOperationsMixin +from ._serialization import Deserializer, Serializer -class QuestionAnsweringClient(QuestionAnsweringClientOperationsMixin): +class QuestionAnsweringClient( + QuestionAnsweringClientOperationsMixin +): # pylint: disable=client-accepts-api-version-keyword """The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language @@ -28,9 +29,9 @@ class QuestionAnsweringClient(QuestionAnsweringClientOperationsMixin): href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview">https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview`. :param endpoint: Supported Cognitive Services endpoint (e.g., - https://:code:``.api.cognitiveservices.azure.com). + https://:code:``.api.cognitiveservices.azure.com). Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.AzureKeyCredential :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this default value may result in unsupported behavior. @@ -56,7 +57,7 @@ def send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: >>> response = client.send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_configuration.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_configuration.py index 92116d7067c7..fb7891c73940 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_configuration.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_configuration.py @@ -22,9 +22,9 @@ class QuestionAnsweringClientConfiguration(Configuration): # pylint: disable=to attributes. :param endpoint: Supported Cognitive Services endpoint (e.g., - https://:code:``.api.cognitiveservices.azure.com). + https://:code:``.api.cognitiveservices.azure.com). Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.AzureKeyCredential :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this default value may result in unsupported behavior. diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_operations.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_operations.py index 4ea213bc7585..2491b0aa25a5 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_operations.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_operations.py @@ -6,129 +6,188 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar - -from msrest import Serializer +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, + ResourceNotModifiedError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from .. import models as _models +from .._serialization import Serializer from .._vendor import MixinABC T = TypeVar("T") -JSONType = Any ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_get_answers_request( - *, project_name: str, deployment_name: str, json: JSONType = None, content: Any = None, **kwargs: Any -) -> HttpRequest: - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", None) # type: Optional[str] +def build_get_answers_request(*, project_name: str, deployment_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") - accept = "application/json" # Construct URL _url = "/:query-knowledgebases" # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters["projectName"] = _SERIALIZER.query("project_name", project_name, "str") - _query_parameters["deploymentName"] = _SERIALIZER.query("deployment_name", deployment_name, "str") - _query_parameters["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["projectName"] = _SERIALIZER.query("project_name", project_name, "str") + _params["deploymentName"] = _SERIALIZER.query("deployment_name", deployment_name, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_get_answers_from_text_request(*, json: JSONType = None, content: Any = None, **kwargs: Any) -> HttpRequest: - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", None) # type: Optional[str] +def build_get_answers_from_text_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") - accept = "application/json" # Construct URL _url = "/:query-text" # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str") + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - json=json, - content=content, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) class QuestionAnsweringClientOperationsMixin(MixinABC): - @distributed_trace + @overload def get_answers( - self, options: "_models.AnswersOptions", *, project_name: str, deployment_name: str, **kwargs: Any - ) -> "_models.AnswersResult": + self, + options: _models.AnswersOptions, + *, + project_name: str, + deployment_name: str, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AnswersResult: """Answers the specified question using your knowledge base. Answers the specified question using your knowledge base. - :param options: Post body of the request. + :param options: Post body of the request. Required. :type options: ~azure.ai.language.questionanswering.models.AnswersOptions - :keyword project_name: The name of the project to use. + :keyword project_name: The name of the project to use. Required. + :paramtype project_name: str + :keyword deployment_name: The name of the specific deployment of the project to use. Required. + :paramtype deployment_name: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: AnswersResult + :rtype: ~azure.ai.language.questionanswering.models.AnswersResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_answers( + self, + options: IO, + *, + project_name: str, + deployment_name: str, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AnswersResult: + """Answers the specified question using your knowledge base. + + Answers the specified question using your knowledge base. + + :param options: Post body of the request. Required. + :type options: IO + :keyword project_name: The name of the project to use. Required. + :paramtype project_name: str + :keyword deployment_name: The name of the specific deployment of the project to use. Required. + :paramtype deployment_name: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: AnswersResult + :rtype: ~azure.ai.language.questionanswering.models.AnswersResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_answers( + self, options: Union[_models.AnswersOptions, IO], *, project_name: str, deployment_name: str, **kwargs: Any + ) -> _models.AnswersResult: + """Answers the specified question using your knowledge base. + + Answers the specified question using your knowledge base. + + :param options: Post body of the request. Is either a model type or a IO type. Required. + :type options: ~azure.ai.language.questionanswering.models.AnswersOptions or IO + :keyword project_name: The name of the project to use. Required. :paramtype project_name: str - :keyword deployment_name: The name of the specific deployment of the project to use. + :keyword deployment_name: The name of the specific deployment of the project to use. Required. :paramtype deployment_name: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :return: AnswersResult :rtype: ~azure.ai.language.questionanswering.models.AnswersResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType["_models.AnswersResult"] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} - _json = self._serialize.body(options, "AnswersOptions") + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AnswersResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(options, (IO, bytes)): + _content = options + else: + _json = self._serialize.body(options, "AnswersOptions") request = build_get_answers_request( - api_version=api_version, - content_type=content_type, project_name=project_name, deployment_name=deployment_name, + content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), @@ -138,6 +197,7 @@ def get_answers( pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -152,33 +212,88 @@ def get_answers( return deserialized - @distributed_trace + @overload def get_answers_from_text( - self, options: "_models.AnswersFromTextOptions", **kwargs: Any - ) -> "_models.AnswersFromTextResult": + self, options: _models.AnswersFromTextOptions, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.AnswersFromTextResult: """Answers the specified question using the provided text in the body. Answers the specified question using the provided text in the body. - :param options: Post body of the request. + :param options: Post body of the request. Required. :type options: ~azure.ai.language.questionanswering.models.AnswersFromTextOptions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: AnswersFromTextResult :rtype: ~azure.ai.language.questionanswering.models.AnswersFromTextResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType["_models.AnswersFromTextResult"] + @overload + def get_answers_from_text( + self, options: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.AnswersFromTextResult: + """Answers the specified question using the provided text in the body. + + Answers the specified question using the provided text in the body. - _json = self._serialize.body(options, "AnswersFromTextOptions") + :param options: Post body of the request. Required. + :type options: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: AnswersFromTextResult + :rtype: ~azure.ai.language.questionanswering.models.AnswersFromTextResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_answers_from_text( + self, options: Union[_models.AnswersFromTextOptions, IO], **kwargs: Any + ) -> _models.AnswersFromTextResult: + """Answers the specified question using the provided text in the body. + + Answers the specified question using the provided text in the body. + + :param options: Post body of the request. Is either a model type or a IO type. Required. + :type options: ~azure.ai.language.questionanswering.models.AnswersFromTextOptions or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: AnswersFromTextResult + :rtype: ~azure.ai.language.questionanswering.models.AnswersFromTextResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AnswersFromTextResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(options, (IO, bytes)): + _content = options + else: + _json = self._serialize.body(options, "AnswersFromTextOptions") request = build_get_answers_from_text_request( - api_version=api_version, content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), @@ -188,6 +303,7 @@ def get_answers_from_text( pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_patch.py index 44e32cc5b317..879238de209f 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_patch.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_operations/_patch.py @@ -18,17 +18,13 @@ def _validate_text_records(records): if not records: raise ValueError("Input documents can not be empty or None") - if isinstance(records, six.string_types): raise TypeError("Input documents cannot be a string.") - if isinstance(records, dict): raise TypeError("Input documents cannot be a dict") - if not all(isinstance(x, six.string_types) for x in records): if not all(isinstance(x, (dict, TextDocument)) for x in records): raise TypeError("Mixing string and dictionary/object document input unsupported.") - request_batch = [] for idx, doc in enumerate(records): if isinstance(doc, six.string_types): @@ -81,7 +77,6 @@ def _handle_metadata_filter_conversion(options_input): raise ValueError("'metadata' must be a sequence of key-value tuples.") except TypeError: raise ValueError("'metadata' must be a sequence of key-value tuples.") - metadata_modified = [{"key": m[0], "value": m[1]} for m in metadata_input] if in_class: filters.metadata_filter.metadata = metadata_modified diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_patch.py index b3d0ab26c56d..a858cddc21a9 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_patch.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_patch.py @@ -7,7 +7,7 @@ from typing import Union, Any from azure.core.credentials import AzureKeyCredential, TokenCredential from azure.core.pipeline.policies import AzureKeyCredentialPolicy, BearerTokenCredentialPolicy -from ._question_answering_client import QuestionAnsweringClient as QuestionAnsweringClientGenerated +from ._client import QuestionAnsweringClient as QuestionAnsweringClientGenerated def _authentication_policy(credential, **kwargs): @@ -51,7 +51,6 @@ def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, TokenCre endpoint = endpoint.rstrip("/") except AttributeError: raise ValueError("Parameter 'endpoint' must be a string.") - super().__init__( endpoint=endpoint, credential=credential, # type: ignore diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_serialization.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_serialization.py new file mode 100644 index 000000000000..7c1dedb5133d --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_serialization.py @@ -0,0 +1,1970 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pylint: skip-file + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote # type: ignore +import xml.etree.ElementTree as ET + +import isodate + +from typing import Dict, Any, cast, TYPE_CHECKING + +from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +if TYPE_CHECKING: + from typing import Optional, Union, AnyStr, IO, Mapping + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data, content_type=None): + # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes, headers): + # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str # type: ignore + unicode_str = str # type: ignore + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes=None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) + continue + if xml_desc.get("text", False): + serialized.text = new_attr + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) + else: # JSON + for k in reversed(keys): + unflattened = {k: new_attr} + new_attr = unflattened + + _new_attr = new_attr + _serialized = serialized + for k in keys: + if k not in _serialized: + _serialized.update(_new_attr) + _new_attr = _new_attr[k] + _serialized = _serialized[k] + except ValueError: + continue + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + + # Just in case this is a dict + internal_data_type = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] + if not kwargs.get("skip_quote", False): + data = [quote(str(d), safe="") for d in data] + return str(self.serialize_iter(data, internal_data_type, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback(SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError: + serialized.append(None) + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) + return result + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key.""" + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes=None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [k for k, v in response._validation.items() if v.get("readonly")] + const = [k for k, v in response._validation.items() if v.get("constant")] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ["true", "1"]: + return True + elif attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) + attr = attr + padding + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_vendor.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_vendor.py index a0a065c82668..f867886df323 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_vendor.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/_vendor.py @@ -12,10 +12,10 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from msrest import Deserializer, Serializer - from azure.core import PipelineClient + from ._serialization import Deserializer, Serializer + class MixinABC(ABC): """DO NOT use this class. It is for internal typing use only.""" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/__init__.py index 5f67f3f62fe2..9e1ff427869d 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/__init__.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/__init__.py @@ -6,10 +6,13 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._question_answering_client import QuestionAnsweringClient +from ._client import QuestionAnsweringClient -from ._patch import __all__ as _patch_all -from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = ["QuestionAnsweringClient"] diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_question_answering_client.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_client.py similarity index 93% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_question_answering_client.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_client.py index 03992a1cd465..968b1171f57d 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_question_answering_client.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_client.py @@ -9,18 +9,19 @@ from copy import deepcopy from typing import Any, Awaitable -from msrest import Deserializer, Serializer - from azure.core import AsyncPipelineClient from azure.core.credentials import AzureKeyCredential from azure.core.rest import AsyncHttpResponse, HttpRequest from .. import models +from .._serialization import Deserializer, Serializer from ._configuration import QuestionAnsweringClientConfiguration from ._operations import QuestionAnsweringClientOperationsMixin -class QuestionAnsweringClient(QuestionAnsweringClientOperationsMixin): +class QuestionAnsweringClient( + QuestionAnsweringClientOperationsMixin +): # pylint: disable=client-accepts-api-version-keyword """The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language @@ -28,9 +29,9 @@ class QuestionAnsweringClient(QuestionAnsweringClientOperationsMixin): href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview">https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview`. :param endpoint: Supported Cognitive Services endpoint (e.g., - https://:code:``.api.cognitiveservices.azure.com). + https://:code:``.api.cognitiveservices.azure.com). Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.AzureKeyCredential :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this default value may result in unsupported behavior. @@ -56,7 +57,7 @@ def send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHt >>> response = await client.send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_configuration.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_configuration.py index 8799a34d214e..28afaaba413c 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_configuration.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_configuration.py @@ -22,9 +22,9 @@ class QuestionAnsweringClientConfiguration(Configuration): # pylint: disable=to attributes. :param endpoint: Supported Cognitive Services endpoint (e.g., - https://:code:``.api.cognitiveservices.azure.com). + https://:code:``.api.cognitiveservices.azure.com). Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.AzureKeyCredential :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this default value may result in unsupported behavior. diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_operations/_operations.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_operations/_operations.py index 1b4c77e7dbbd..df7c35eb3d8d 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_operations/_operations.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_operations/_operations.py @@ -6,63 +6,139 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, + ResourceNotModifiedError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ... import models as _models from ..._operations._operations import build_get_answers_from_text_request, build_get_answers_request from .._vendor import MixinABC T = TypeVar("T") -JSONType = Any ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class QuestionAnsweringClientOperationsMixin(MixinABC): - @distributed_trace_async + @overload async def get_answers( - self, options: "_models.AnswersOptions", *, project_name: str, deployment_name: str, **kwargs: Any - ) -> "_models.AnswersResult": + self, + options: _models.AnswersOptions, + *, + project_name: str, + deployment_name: str, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AnswersResult: """Answers the specified question using your knowledge base. Answers the specified question using your knowledge base. - :param options: Post body of the request. + :param options: Post body of the request. Required. :type options: ~azure.ai.language.questionanswering.models.AnswersOptions - :keyword project_name: The name of the project to use. + :keyword project_name: The name of the project to use. Required. + :paramtype project_name: str + :keyword deployment_name: The name of the specific deployment of the project to use. Required. + :paramtype deployment_name: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: AnswersResult + :rtype: ~azure.ai.language.questionanswering.models.AnswersResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_answers( + self, + options: IO, + *, + project_name: str, + deployment_name: str, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.AnswersResult: + """Answers the specified question using your knowledge base. + + Answers the specified question using your knowledge base. + + :param options: Post body of the request. Required. + :type options: IO + :keyword project_name: The name of the project to use. Required. + :paramtype project_name: str + :keyword deployment_name: The name of the specific deployment of the project to use. Required. + :paramtype deployment_name: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: AnswersResult + :rtype: ~azure.ai.language.questionanswering.models.AnswersResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def get_answers( + self, options: Union[_models.AnswersOptions, IO], *, project_name: str, deployment_name: str, **kwargs: Any + ) -> _models.AnswersResult: + """Answers the specified question using your knowledge base. + + Answers the specified question using your knowledge base. + + :param options: Post body of the request. Is either a model type or a IO type. Required. + :type options: ~azure.ai.language.questionanswering.models.AnswersOptions or IO + :keyword project_name: The name of the project to use. Required. :paramtype project_name: str - :keyword deployment_name: The name of the specific deployment of the project to use. + :keyword deployment_name: The name of the specific deployment of the project to use. Required. :paramtype deployment_name: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :return: AnswersResult :rtype: ~azure.ai.language.questionanswering.models.AnswersResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType["_models.AnswersResult"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AnswersResult] - _json = self._serialize.body(options, "AnswersOptions") + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(options, (IO, bytes)): + _content = options + else: + _json = self._serialize.body(options, "AnswersOptions") request = build_get_answers_request( - api_version=api_version, - content_type=content_type, project_name=project_name, deployment_name=deployment_name, + content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), @@ -72,6 +148,7 @@ async def get_answers( pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -86,33 +163,88 @@ async def get_answers( return deserialized - @distributed_trace_async + @overload async def get_answers_from_text( - self, options: "_models.AnswersFromTextOptions", **kwargs: Any - ) -> "_models.AnswersFromTextResult": + self, options: _models.AnswersFromTextOptions, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.AnswersFromTextResult: """Answers the specified question using the provided text in the body. Answers the specified question using the provided text in the body. - :param options: Post body of the request. + :param options: Post body of the request. Required. :type options: ~azure.ai.language.questionanswering.models.AnswersFromTextOptions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: AnswersFromTextResult + :rtype: ~azure.ai.language.questionanswering.models.AnswersFromTextResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_answers_from_text( + self, options: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.AnswersFromTextResult: + """Answers the specified question using the provided text in the body. + + Answers the specified question using the provided text in the body. + + :param options: Post body of the request. Required. + :type options: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :return: AnswersFromTextResult :rtype: ~azure.ai.language.questionanswering.models.AnswersFromTextResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - cls = kwargs.pop("cls", None) # type: ClsType["_models.AnswersFromTextResult"] + @distributed_trace_async + async def get_answers_from_text( + self, options: Union[_models.AnswersFromTextOptions, IO], **kwargs: Any + ) -> _models.AnswersFromTextResult: + """Answers the specified question using the provided text in the body. - _json = self._serialize.body(options, "AnswersFromTextOptions") + Answers the specified question using the provided text in the body. + + :param options: Post body of the request. Is either a model type or a IO type. Required. + :type options: ~azure.ai.language.questionanswering.models.AnswersFromTextOptions or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: AnswersFromTextResult + :rtype: ~azure.ai.language.questionanswering.models.AnswersFromTextResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AnswersFromTextResult] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(options, (IO, bytes)): + _content = options + else: + _json = self._serialize.body(options, "AnswersFromTextOptions") request = build_get_answers_from_text_request( - api_version=api_version, content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), @@ -122,6 +254,7 @@ async def get_answers_from_text( pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_patch.py index 7d933bbbcfea..a59f55c1b6e3 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_patch.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_patch.py @@ -10,7 +10,7 @@ from azure.core.credentials import AzureKeyCredential from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline.policies import AzureKeyCredentialPolicy, AsyncBearerTokenCredentialPolicy -from ._question_answering_client import QuestionAnsweringClient as QuestionAnsweringClientGenerated +from ._client import QuestionAnsweringClient as QuestionAnsweringClientGenerated def _authentication_policy(credential, **kwargs): @@ -56,7 +56,6 @@ def __init__( endpoint = endpoint.rstrip("/") except AttributeError: raise ValueError("Parameter 'endpoint' must be a string.") - super().__init__( endpoint=endpoint, credential=credential, # type: ignore diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_vendor.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_vendor.py index f025a8a40bb3..ad55c64d2a73 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_vendor.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/aio/_vendor.py @@ -12,10 +12,10 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from msrest import Deserializer, Serializer - from azure.core import AsyncPipelineClient + from .._serialization import Deserializer, Serializer + class MixinABC(ABC): """DO NOT use this class. It is for internal typing use only.""" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/__init__.py similarity index 57% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/__init__.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/__init__.py index a2801a59af36..5bdb7bde30c1 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/__init__.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/__init__.py @@ -6,14 +6,19 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._question_answering_projects_client import QuestionAnsweringProjectsClient +from ._client import QuestionAnsweringAuthoringClient from ._version import VERSION __version__ = VERSION -__all__ = ["QuestionAnsweringProjectsClient"] -# `._patch.py` is used for handwritten extensions to the generated code -# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md -from ._patch import patch_sdk +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk -patch_sdk() +__all__ = ["QuestionAnsweringAuthoringClient"] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_question_answering_projects_client.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_client.py similarity index 75% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_question_answering_projects_client.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_client.py index 4dcf5d4b1d0a..75747bdfb210 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_question_answering_projects_client.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_client.py @@ -7,24 +7,24 @@ # -------------------------------------------------------------------------- from copy import deepcopy -from typing import TYPE_CHECKING - -from msrest import Deserializer, Serializer +from typing import Any, TYPE_CHECKING from azure.core import PipelineClient +from azure.core.credentials import AzureKeyCredential +from azure.core.rest import HttpRequest, HttpResponse -from ._configuration import QuestionAnsweringProjectsClientConfiguration -from ._operations import QuestionAnsweringProjectsClientOperationsMixin +from ._configuration import QuestionAnsweringAuthoringClientConfiguration +from ._operations import QuestionAnsweringAuthoringClientOperationsMixin +from ._serialization import Deserializer, Serializer if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Dict - - from azure.core.credentials import AzureKeyCredential - from azure.core.rest import HttpRequest, HttpResponse + from typing import Dict -class QuestionAnsweringProjectsClient(QuestionAnsweringProjectsClientOperationsMixin): +class QuestionAnsweringAuthoringClient( + QuestionAnsweringAuthoringClientOperationsMixin +): # pylint: disable=client-accepts-api-version-keyword """The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language @@ -32,38 +32,27 @@ class QuestionAnsweringProjectsClient(QuestionAnsweringProjectsClientOperationsM href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview">https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview`. :param endpoint: Supported Cognitive Services endpoint (e.g., - https://:code:``.api.cognitiveservices.azure.com). + https://:code:``.api.cognitiveservices.azure.com). Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.AzureKeyCredential - :keyword api_version: Api Version. The default value is "2021-10-01". Note that overriding this + :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ - def __init__( - self, - endpoint, # type: str - credential, # type: AzureKeyCredential - **kwargs # type: Any - ): - # type: (...) -> None + def __init__(self, endpoint: str, credential: AzureKeyCredential, **kwargs: Any) -> None: _endpoint = "{Endpoint}/language" - self._config = QuestionAnsweringProjectsClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + self._config = QuestionAnsweringAuthoringClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) self._client = PipelineClient(base_url=_endpoint, config=self._config, **kwargs) self._serialize = Serializer() self._deserialize = Deserializer() self._serialize.client_side_validation = False - def send_request( - self, - request, # type: HttpRequest - **kwargs # type: Any - ): - # type: (...) -> HttpResponse + def send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -72,7 +61,7 @@ def send_request( >>> response = client.send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest @@ -94,7 +83,7 @@ def close(self): self._client.close() def __enter__(self): - # type: () -> QuestionAnsweringProjectsClient + # type: () -> QuestionAnsweringAuthoringClient self._client.__enter__() return self diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_configuration.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_configuration.py similarity index 78% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_configuration.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_configuration.py index 6d8633e032a8..9c8723c38be9 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_configuration.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_configuration.py @@ -6,44 +6,33 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from typing import Any from azure.core.configuration import Configuration +from azure.core.credentials import AzureKeyCredential from azure.core.pipeline import policies from ._version import VERSION -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any - from azure.core.credentials import AzureKeyCredential - - -class QuestionAnsweringProjectsClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes - """Configuration for QuestionAnsweringProjectsClient. +class QuestionAnsweringAuthoringClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for QuestionAnsweringAuthoringClient. Note that all parameters used to create this instance are saved as instance attributes. :param endpoint: Supported Cognitive Services endpoint (e.g., - https://:code:``.api.cognitiveservices.azure.com). + https://:code:``.api.cognitiveservices.azure.com). Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.AzureKeyCredential - :keyword api_version: Api Version. The default value is "2021-10-01". Note that overriding this + :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ - def __init__( - self, - endpoint, # type: str - credential, # type: AzureKeyCredential - **kwargs # type: Any - ): - # type: (...) -> None - super(QuestionAnsweringProjectsClientConfiguration, self).__init__(**kwargs) + def __init__(self, endpoint: str, credential: AzureKeyCredential, **kwargs: Any) -> None: + super(QuestionAnsweringAuthoringClientConfiguration, self).__init__(**kwargs) api_version = kwargs.pop("api_version", "2021-10-01") # type: str if endpoint is None: diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_operations/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/__init__.py similarity index 56% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_operations/__init__.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/__init__.py index 54d46cdf0343..361d46bf72bd 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_operations/__init__.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/__init__.py @@ -6,8 +6,14 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._operations import QuestionAnsweringProjectsClientOperationsMixin +from ._operations import QuestionAnsweringAuthoringClientOperationsMixin + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - "QuestionAnsweringProjectsClientOperationsMixin", + "QuestionAnsweringAuthoringClientOperationsMixin", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/_operations.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/_operations.py new file mode 100644 index 000000000000..0ed7a24dee25 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/_operations.py @@ -0,0 +1,2804 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import sys +from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .._serialization import Serializer +from .._vendor import MixinABC, _format_url_section + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_projects_request(*, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_project_details_request(project_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_project_request(project_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_project_request(project_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_export_request( + project_name: str, *, format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/:export" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if format is not None: + _params["format"] = _SERIALIZER.query("format", format, "str") + if asset_kind is not None: + _params["assetKind"] = _SERIALIZER.query("asset_kind", asset_kind, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_import_assets_request( + project_name: str, *, format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/:import" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if format is not None: + _params["format"] = _SERIALIZER.query("format", format, "str") + if asset_kind is not None: + _params["assetKind"] = _SERIALIZER.query("asset_kind", asset_kind, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_deploy_project_request(project_name: str, deployment_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/deployments/{deploymentName}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_deployments_request( + project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/deployments" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_synonyms_request( + project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/synonyms" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_synonyms_request(project_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/synonyms" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_sources_request( + project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/sources" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_sources_request(project_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/sources" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_qnas_request( + project_name: str, + *, + source: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/qnas" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if source is not None: + _params["source"] = _SERIALIZER.query("source", source, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_qnas_request(project_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/qnas" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_add_feedback_request(project_name: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01")) # type: str + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/query-knowledgebases/projects/{projectName}/feedback" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str", max_length=100), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class QuestionAnsweringAuthoringClientOperationsMixin(MixinABC): # pylint: disable=too-many-public-methods + @distributed_trace + def list_projects(self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any) -> Iterable[JSON]: + """Gets all projects for a user. + + Gets all projects for a user. + + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of JSON object + :rtype: ~azure.core.paging.ItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation + date-time. + "description": "str", # Optional. Description of the project. + "language": "str", # Optional. Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. + "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last deployment date-time. + "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last modified date-time. + "multilingualResource": bool, # Optional. Resource enabled for multiple + languages across projects or not. + "projectName": "str", # Optional. Name of the project. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } + } + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_projects_request( + top=top, + skip=skip, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + return request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = deserialized["value"] + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.get("nextLink", None), iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_project_details(self, project_name: str, **kwargs: Any) -> JSON: + """Get the requested project metadata. + + Get the requested project metadata. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation + date-time. + "description": "str", # Optional. Description of the project. + "language": "str", # Optional. Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. + "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last deployment date-time. + "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last modified date-time. + "multilingualResource": bool, # Optional. Resource enabled for multiple + languages across projects or not. + "projectName": "str", # Optional. Name of the project. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } + } + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + request = build_get_project_details_request( + project_name=project_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if response.content: + deserialized = response.json() + else: + deserialized = None + + if cls: + return cls(pipeline_response, cast(JSON, deserialized), {}) + + return cast(JSON, deserialized) + + @overload + def create_project( + self, project_name: str, options: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> JSON: + """Create or update a project. + + Create or update a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Parameters needed to create the project. Required. + :type options: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + options = { + "language": "str", # Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. Required. + "description": "str", # Optional. Description of the project. + "multilingualResource": bool, # Optional. Set to true to enable creating + knowledgebases in different languages for the same resource. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } + } + + # response body for status code(s): 200, 201 + response == { + "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation + date-time. + "description": "str", # Optional. Description of the project. + "language": "str", # Optional. Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. + "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last deployment date-time. + "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last modified date-time. + "multilingualResource": bool, # Optional. Resource enabled for multiple + languages across projects or not. + "projectName": "str", # Optional. Name of the project. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } + } + """ + + @overload + def create_project( + self, project_name: str, options: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> JSON: + """Create or update a project. + + Create or update a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Parameters needed to create the project. Required. + :type options: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200, 201 + response == { + "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation + date-time. + "description": "str", # Optional. Description of the project. + "language": "str", # Optional. Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. + "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last deployment date-time. + "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last modified date-time. + "multilingualResource": bool, # Optional. Resource enabled for multiple + languages across projects or not. + "projectName": "str", # Optional. Name of the project. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } + } + """ + + @distributed_trace + def create_project(self, project_name: str, options: Union[JSON, IO], **kwargs: Any) -> JSON: + """Create or update a project. + + Create or update a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Parameters needed to create the project. Is either a model type or a IO type. + Required. + :type options: JSON or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200, 201 + response == { + "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation + date-time. + "description": "str", # Optional. Description of the project. + "language": "str", # Optional. Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. + "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last deployment date-time. + "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last modified date-time. + "multilingualResource": bool, # Optional. Resource enabled for multiple + languages across projects or not. + "projectName": "str", # Optional. Name of the project. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } + } + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(options, (IO, bytes)): + _content = options + else: + _json = options + + request = build_create_project_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if response.status_code == 200: + if response.content: + deserialized = response.json() + else: + deserialized = None + + if response.status_code == 201: + if response.content: + deserialized = response.json() + else: + deserialized = None + + if cls: + return cls(pipeline_response, cast(JSON, deserialized), {}) + + return cast(JSON, deserialized) + + def _delete_project_initial( # pylint: disable=inconsistent-return-statements + self, project_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_project_request( + project_name=project_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + @distributed_trace + def begin_delete_project(self, project_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete the project. + + Delete the project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_project_initial( # type: ignore + project_name=project_name, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + def _export_initial( + self, project_name: str, *, format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any + ) -> Optional[JSON]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSON]] + + request = build_export_request( + project_name=project_name, + format=format, + asset_kind=asset_kind, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + response_headers = {} + if response.status_code == 200: + if response.content: + deserialized = response.json() + else: + deserialized = None + + if response.status_code == 202: + response_headers["Operation-Location"] = self._deserialize( + "str", response.headers.get("Operation-Location") + ) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + @distributed_trace + def begin_export( + self, project_name: str, *, format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any + ) -> LROPoller[JSON]: + """Export project metadata and assets. + + Export project metadata and assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :keyword format: Knowledge base Import or Export format. Known values are: "json", "tsv", and + "excel". Default value is "json". + :paramtype format: str + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns JSON object + :rtype: ~azure.core.polling.LROPoller[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "createdDateTime": "2020-02-20 00:00:00", # Required. + "jobId": "str", # Required. + "lastUpdatedDateTime": "2020-02-20 00:00:00", # Required. + "resultUrl": "str", # URL to download the result of the Export Job. + Required. + "status": "str", # Job Status. Required. Known values are: "notStarted", + "running", "succeeded", "failed", "cancelled", "cancelling", and + "partiallyCompleted". + "errors": [ + { + "code": "str", # One of a server-defined set of error codes. + Required. Known values are: "InvalidRequest", "InvalidArgument", + "Unauthorized", "Forbidden", "NotFound", "ProjectNotFound", + "OperationNotFound", "AzureCognitiveSearchNotFound", + "AzureCognitiveSearchIndexNotFound", "TooManyRequests", + "AzureCognitiveSearchThrottling", + "AzureCognitiveSearchIndexLimitReached", "InternalServerError", and + "ServiceUnavailable". + "message": "str", # A human-readable representation of the + error. Required. + "details": [ + ... + ], + "innererror": { + "code": "str", # One of a server-defined set of + error codes. Required. Known values are: "InvalidRequest", + "InvalidParameterValue", "KnowledgeBaseNotFound", + "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and + "ExtractionFailure". + "message": "str", # Error message. Required. + "details": { + "str": "str" # Optional. Error details. + }, + "innererror": ..., + "target": "str" # Optional. Error target. + }, + "target": "str" # Optional. The target of the error. + } + ], + "expirationDateTime": "2020-02-20 00:00:00" # Optional. + } + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._export_initial( # type: ignore + project_name=project_name, + format=format, + asset_kind=asset_kind, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + if response.content: + deserialized = response.json() + else: + deserialized = None + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + def _import_assets_initial( # pylint: disable=inconsistent-return-statements + self, + project_name: str, + options: Optional[Union[JSON, IO]] = None, + *, + format: str = "json", + asset_kind: Optional[str] = None, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(options, (IO, bytes)): + _content = options + else: + if options is not None: + _json = options + else: + _json = None + + request = build_import_assets_request( + project_name=project_name, + format=format, + asset_kind=asset_kind, + content_type=content_type, + api_version=self._config.api_version, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + @overload + def begin_import_assets( + self, + project_name: str, + options: Optional[JSON] = None, + *, + format: str = "json", + asset_kind: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets. + + Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Project assets the needs to be imported. Default value is None. + :type options: JSON + :keyword format: Knowledge base Import or Export format. Known values are: "json", "tsv", and + "excel". Default value is "json". + :paramtype format: str + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + options = { + "assets": { + "qnas": [ + { + "activeLearningSuggestions": [ + { + "clusterHead": "str", # Optional. + Question chosen as the head of suggested questions cluster by + Active Learning clustering algorithm. + "suggestedQuestions": [ + { + "autoSuggestedCount": + 0, # Optional. The number of times the question was + suggested automatically by the Active Learning + algorithm. + "question": "str", # + Optional. Question suggested by the Active Learning + feature. + "userSuggestedCount": + 0 # Optional. The number of times the question was + suggested explicitly by the user. + } + ] + } + ], + "answer": "str", # Optional. Answer text. + "dialog": { + "isContextOnly": bool, # Optional. To mark + if a prompt is relevant only with a previous question or not. If + true, do not include this QnA as answer for queries without + context; otherwise, ignores context and includes this QnA in + answers. + "prompts": [ + { + "displayOrder": 0, # + Optional. Index of the prompt. It is used for ordering of + the prompts. + "displayText": "str", # + Optional. Text displayed to represent a follow up + question prompt. + "qna": { + "activeLearningSuggestions": [ + { + "clusterHead": "str", # Optional. Question + chosen as the head of suggested questions + cluster by Active Learning clustering + algorithm. + "suggestedQuestions": [ + { + "autoSuggestedCount": 0, # Optional. + The number of times the question was + suggested automatically by the Active + Learning algorithm. + "question": "str", # Optional. + Question suggested by the Active + Learning feature. + "userSuggestedCount": 0 # Optional. + The number of times the question was + suggested explicitly by the user. + } + ] + } + ], + "answer": "str", # + Optional. Answer text. + "dialog": ..., + "id": 0, # Optional. + Unique ID for the QnA. + "metadata": { + "str": "str" + # Optional. Metadata associated with the answer, + useful to categorize or filter question answers. + }, + "questions": [ + "str" # + Optional. List of questions associated with the + answer. + ], + "source": "str" # + Optional. Source from which QnA was indexed e.g. + https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + . + }, + "qnaId": 0 # Optional. ID of + the QnA corresponding to the prompt. + } + ] + }, + "id": 0, # Optional. Unique ID for the QnA. + "lastUpdatedDateTime": "2020-02-20 00:00:00", # + Optional. Date-time when the QnA was last updated. + "metadata": { + "str": "str" # Optional. Metadata associated + with the answer, useful to categorize or filter question answers. + }, + "questions": [ + "str" # Optional. List of questions + associated with the answer. + ], + "source": "str", # Optional. Source from which QnA + was indexed e.g. + https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + . + "sourceDisplayName": "str" # Optional. Friendly name + of the Source. + } + ], + "synonyms": [ + { + "alterations": [ + "str" # Collection of word alterations. + Required. + ] + } + ] + }, + "fileUri": "str", # Optional. Import data File URI. + "metadata": { + "language": "str", # Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for + Spanish etc. If not set, use "en" for English as default. Required. + "description": "str", # Optional. Description of the project. + "multilingualResource": bool, # Optional. Set to true to enable + creating knowledgebases in different languages for the same resource. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response + when no good match is found in the knowledge base. + } + } + } + """ + + @overload + def begin_import_assets( + self, + project_name: str, + options: Optional[IO] = None, + *, + format: str = "json", + asset_kind: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets. + + Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Project assets the needs to be imported. Default value is None. + :type options: IO + :keyword format: Knowledge base Import or Export format. Known values are: "json", "tsv", and + "excel". Default value is "json". + :paramtype format: str + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_import_assets( + self, + project_name: str, + options: Optional[Union[JSON, IO]] = None, + *, + format: str = "json", + asset_kind: Optional[str] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets. + + Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Project assets the needs to be imported. Is either a model type or a IO type. + Default value is None. + :type options: JSON or IO + :keyword format: Knowledge base Import or Export format. Known values are: "json", "tsv", and + "excel". Default value is "json". + :paramtype format: str + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._import_assets_initial( # type: ignore + project_name=project_name, + options=options, + format=format, + asset_kind=asset_kind, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + def _deploy_project_initial( # pylint: disable=inconsistent-return-statements + self, project_name: str, deployment_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_deploy_project_request( + project_name=project_name, + deployment_name=deployment_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + @distributed_trace + def begin_deploy_project(self, project_name: str, deployment_name: str, **kwargs: Any) -> LROPoller[None]: + """Deploy project to production. + + Deploy project to production. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param deployment_name: The name of the specific deployment of the project to use. Required. + :type deployment_name: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._deploy_project_initial( # type: ignore + project_name=project_name, + deployment_name=deployment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + @distributed_trace + def list_deployments( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> Iterable[JSON]: + """List all deployments of a project. + + List all deployments of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of JSON object + :rtype: ~azure.core.paging.ItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "deploymentName": "str", # Optional. Name of the deployment. + "lastDeployedDateTime": "2020-02-20 00:00:00" # Optional. Represents the + project last deployment date-time. + } + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_deployments_request( + project_name=project_name, + top=top, + skip=skip, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + return request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = deserialized["value"] + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.get("nextLink", None), iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_synonyms( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> Iterable[JSON]: + """Gets all the synonyms of a project. + + Gets all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of JSON object + :rtype: ~azure.core.paging.ItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "alterations": [ + "str" # Collection of word alterations. Required. + ] + } + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_synonyms_request( + project_name=project_name, + top=top, + skip=skip, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + return request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = deserialized["value"] + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.get("nextLink", None), iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @overload + def update_synonyms( # pylint: disable=inconsistent-return-statements + self, project_name: str, synonyms: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + synonyms = { + "nextLink": "str", # Optional. + "value": [ + { + "alterations": [ + "str" # Collection of word alterations. Required. + ] + } + ] + } + """ + + @overload + def update_synonyms( # pylint: disable=inconsistent-return-statements + self, project_name: str, synonyms: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_synonyms( # pylint: disable=inconsistent-return-statements + self, project_name: str, synonyms: Union[JSON, IO], **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Is either a model type or a IO type. Required. + :type synonyms: JSON or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(synonyms, (IO, bytes)): + _content = synonyms + else: + _json = synonyms + + request = build_update_synonyms_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) + + @distributed_trace + def list_sources( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> Iterable[JSON]: + """Gets all the sources of a project. + + Gets all the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of JSON object + :rtype: ~azure.core.paging.ItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "sourceKind": "str", # Supported source types. Required. Known values are: + "file" and "url". + "sourceUri": "str", # URI location for the file or url. Required. + "contentStructureKind": "str", # Optional. Content structure type for + sources. "unstructured" + "displayName": "str", # Optional. Friendly name of the Source. + "lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the + QnA was last updated. + "source": "str" # Optional. Unique source identifier. Name of the file if + it's a 'file' source; otherwise, the complete URL if it's a 'url' source. + } + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_sources_request( + project_name=project_name, + top=top, + skip=skip, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + return request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = deserialized["value"] + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.get("nextLink", None), iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + def _update_sources_initial( # pylint: disable=inconsistent-return-statements + self, project_name: str, sources: Union[List[JSON], IO], **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(sources, (IO, bytes)): + _content = sources + else: + _json = sources + + request = build_update_sources_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + @overload + def begin_update_sources( + self, project_name: str, sources: List[JSON], *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project. + + Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Required. + :type sources: list[JSON] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + sources = [ + { + "op": "str", # Update operation type for assets. Required. Known + values are: "add", "delete", and "replace". + "value": { + "sourceKind": "str", # Supported source types. Required. + Known values are: "file" and "url". + "sourceUri": "str", # URI location for the file or url. + Required. + "contentStructureKind": "str", # Optional. Content structure + type for sources. "unstructured" + "displayName": "str", # Optional. Friendly name of the + Source. + "refresh": bool, # Optional. Boolean flag used to refresh + data from the Source. + "source": "str" # Optional. Unique source identifier. Name + of the file if it's a 'file' source; otherwise, the complete URL if it's + a 'url' source. + } + } + ] + """ + + @overload + def begin_update_sources( + self, project_name: str, sources: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project. + + Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Required. + :type sources: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_sources(self, project_name: str, sources: Union[List[JSON], IO], **kwargs: Any) -> LROPoller[None]: + """Updates the sources of a project. + + Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Is either a list type or a IO type. + Required. + :type sources: list[JSON] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_sources_initial( # type: ignore + project_name=project_name, + sources=sources, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + @distributed_trace + def list_qnas( + self, + project_name: str, + *, + source: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[int] = None, + **kwargs: Any + ) -> Iterable[JSON]: + """Gets all the QnAs of a project. + + Gets all the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :keyword source: Source of the QnA. Default value is None. + :paramtype source: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of JSON object + :rtype: ~azure.core.paging.ItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200 + response == { + "activeLearningSuggestions": [ + { + "clusterHead": "str", # Optional. Question chosen as the + head of suggested questions cluster by Active Learning clustering + algorithm. + "suggestedQuestions": [ + { + "autoSuggestedCount": 0, # Optional. The + number of times the question was suggested automatically by the + Active Learning algorithm. + "question": "str", # Optional. Question + suggested by the Active Learning feature. + "userSuggestedCount": 0 # Optional. The + number of times the question was suggested explicitly by the + user. + } + ] + } + ], + "answer": "str", # Optional. Answer text. + "dialog": { + "isContextOnly": bool, # Optional. To mark if a prompt is relevant + only with a previous question or not. If true, do not include this QnA as + answer for queries without context; otherwise, ignores context and includes + this QnA in answers. + "prompts": [ + { + "displayOrder": 0, # Optional. Index of the prompt. + It is used for ordering of the prompts. + "displayText": "str", # Optional. Text displayed to + represent a follow up question prompt. + "qna": { + "activeLearningSuggestions": [ + { + "clusterHead": "str", # + Optional. Question chosen as the head of suggested + questions cluster by Active Learning clustering + algorithm. + "suggestedQuestions": [ + { + "autoSuggestedCount": 0, # Optional. The number + of times the question was suggested automatically + by the Active Learning algorithm. + "question": + "str", # Optional. Question suggested by the + Active Learning feature. + "userSuggestedCount": 0 # Optional. The number + of times the question was suggested explicitly by + the user. + } + ] + } + ], + "answer": "str", # Optional. Answer text. + "dialog": ..., + "id": 0, # Optional. Unique ID for the QnA. + "metadata": { + "str": "str" # Optional. Metadata + associated with the answer, useful to categorize or filter + question answers. + }, + "questions": [ + "str" # Optional. List of questions + associated with the answer. + ], + "source": "str" # Optional. Source from + which QnA was indexed e.g. + https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + . + }, + "qnaId": 0 # Optional. ID of the QnA corresponding + to the prompt. + } + ] + }, + "id": 0, # Optional. Unique ID for the QnA. + "lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the + QnA was last updated. + "metadata": { + "str": "str" # Optional. Metadata associated with the answer, useful + to categorize or filter question answers. + }, + "questions": [ + "str" # Optional. List of questions associated with the answer. + ], + "source": "str" # Optional. Source from which QnA was indexed e.g. + https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs . + } + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_qnas_request( + project_name=project_name, + source=source, + top=top, + skip=skip, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + return request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = deserialized["value"] + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.get("nextLink", None), iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + def _update_qnas_initial( # pylint: disable=inconsistent-return-statements + self, project_name: str, qnas: Union[List[JSON], IO], **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(qnas, (IO, bytes)): + _content = qnas + else: + _json = qnas + + request = build_update_qnas_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + if cls: + return cls(pipeline_response, None, response_headers) + + @overload + def begin_update_qnas( + self, project_name: str, qnas: List[JSON], *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: + """Updates the QnAs of a project. + + Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Required. + :type qnas: list[JSON] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + qnas = [ + { + "op": "str", # Update operation type for assets. Required. Known + values are: "add", "delete", and "replace". + "value": { + "activeLearningSuggestions": [ + { + "clusterHead": "str", # Optional. Question + chosen as the head of suggested questions cluster by Active + Learning clustering algorithm. + "suggestedQuestions": [ + { + "autoSuggestedCount": 0, # + Optional. The number of times the question was suggested + automatically by the Active Learning algorithm. + "question": "str", # + Optional. Question suggested by the Active Learning + feature. + "userSuggestedCount": 0 # + Optional. The number of times the question was suggested + explicitly by the user. + } + ] + } + ], + "answer": "str", # Optional. Answer text. + "dialog": { + "isContextOnly": bool, # Optional. To mark if a + prompt is relevant only with a previous question or not. If true, do + not include this QnA as answer for queries without context; + otherwise, ignores context and includes this QnA in answers. + "prompts": [ + { + "displayOrder": 0, # Optional. Index + of the prompt. It is used for ordering of the prompts. + "displayText": "str", # Optional. + Text displayed to represent a follow up question prompt. + "qna": ..., + "qnaId": 0 # Optional. ID of the QnA + corresponding to the prompt. + } + ] + }, + "id": 0, # Optional. Unique ID for the QnA. + "metadata": { + "str": "str" # Optional. Metadata associated with + the answer, useful to categorize or filter question answers. + }, + "questions": [ + "str" # Optional. List of questions associated with + the answer. + ], + "source": "str" # Optional. Source from which QnA was + indexed e.g. + https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs . + } + } + ] + """ + + @overload + def begin_update_qnas( + self, project_name: str, qnas: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: + """Updates the QnAs of a project. + + Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Required. + :type qnas: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_qnas(self, project_name: str, qnas: Union[List[JSON], IO], **kwargs: Any) -> LROPoller[None]: + """Updates the QnAs of a project. + + Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Is either a list type or a IO type. Required. + :type qnas: list[JSON] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_qnas_initial( # type: ignore + project_name=project_name, + qnas=qnas, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + @overload + def add_feedback( # pylint: disable=inconsistent-return-statements + self, project_name: str, feedback: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param feedback: Feedback for Active Learning. Required. + :type feedback: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # JSON input template you can fill out and use as your body input. + feedback = { + "records": [ + { + "qnaId": 0, # Optional. Unique ID of the QnA. + "userId": "str", # Optional. Unique identifier of the user. + "userQuestion": "str" # Optional. User suggested question + for the QnA. + } + ] + } + """ + + @overload + def add_feedback( # pylint: disable=inconsistent-return-statements + self, project_name: str, feedback: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param feedback: Feedback for Active Learning. Required. + :type feedback: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def add_feedback( # pylint: disable=inconsistent-return-statements + self, project_name: str, feedback: Union[JSON, IO], **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param feedback: Feedback for Active Learning. Is either a model type or a IO type. Required. + :type feedback: JSON or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(feedback, (IO, bytes)): + _content = feedback + else: + _json = feedback + + request = build_add_feedback_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_patch.py similarity index 59% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_patch.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_patch.py index 669166ec72de..74cc6dd438d3 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_patch.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_patch.py @@ -1,39 +1,15 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. -# This file is used for handwritten extensions to the generated code. Example: -# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md - -import importlib -from typing import Union, Any +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List, Union, Any from azure.core.credentials import AzureKeyCredential, TokenCredential from azure.core.pipeline.policies import AzureKeyCredentialPolicy, BearerTokenCredentialPolicy -from ._question_answering_projects_client import QuestionAnsweringProjectsClient \ - as QuestionAnsweringProjectsClientGenerated +from ._client import QuestionAnsweringAuthoringClient as QuestionAnsweringAuthoringClientGenerated def _authentication_policy(credential, **kwargs): @@ -55,7 +31,7 @@ def _authentication_policy(credential, **kwargs): return authentication_policy -class QuestionAnsweringProjectsClient(QuestionAnsweringProjectsClientGenerated): +class QuestionAnsweringAuthoringClient(QuestionAnsweringAuthoringClientGenerated): """The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language @@ -81,7 +57,6 @@ def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, TokenCre endpoint = endpoint.rstrip("/") except AttributeError: raise ValueError("Parameter 'endpoint' must be a string.") - super().__init__( endpoint=endpoint, credential=credential, # type: ignore @@ -90,6 +65,15 @@ def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, TokenCre ) +__all__: List[str] = [ + "QuestionAnsweringAuthoringClient" +] # Add all objects you want publicly available to users at this package level + + def patch_sdk(): - current_package = importlib.import_module("azure.ai.language.questionanswering.projects") - current_package.QuestionAnsweringProjectsClient = QuestionAnsweringProjectsClient + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_serialization.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_serialization.py new file mode 100644 index 000000000000..7c1dedb5133d --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_serialization.py @@ -0,0 +1,1970 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pylint: skip-file + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote # type: ignore +import xml.etree.ElementTree as ET + +import isodate + +from typing import Dict, Any, cast, TYPE_CHECKING + +from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +if TYPE_CHECKING: + from typing import Optional, Union, AnyStr, IO, Mapping + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data, content_type=None): + # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes, headers): + # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str # type: ignore + unicode_str = str # type: ignore + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes=None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) + continue + if xml_desc.get("text", False): + serialized.text = new_attr + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) + else: # JSON + for k in reversed(keys): + unflattened = {k: new_attr} + new_attr = unflattened + + _new_attr = new_attr + _serialized = serialized + for k in keys: + if k not in _serialized: + _serialized.update(_new_attr) + _new_attr = _new_attr[k] + _serialized = _serialized[k] + except ValueError: + continue + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + + # Just in case this is a dict + internal_data_type = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] + if not kwargs.get("skip_quote", False): + data = [quote(str(d), safe="") for d in data] + return str(self.serialize_iter(data, internal_data_type, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback(SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError: + serialized.append(None) + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) + return result + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key.""" + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes=None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [k for k, v in response._validation.items() if v.get("readonly")] + const = [k for k, v in response._validation.items() if v.get("constant")] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ["true", "1"]: + return True + elif attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) + attr = attr + padding + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_vendor.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_vendor.py similarity index 60% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_vendor.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_vendor.py index 54f238858ed8..a121a380c6ef 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_vendor.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_vendor.py @@ -5,6 +5,17 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from abc import ABC +from typing import TYPE_CHECKING + +from ._configuration import QuestionAnsweringAuthoringClientConfiguration + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core import PipelineClient + + from ._serialization import Deserializer, Serializer + def _format_url_section(template, **kwargs): components = template.split("/") @@ -15,3 +26,12 @@ def _format_url_section(template, **kwargs): formatted_components = template.split("/") components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] template = "/".join(components) + + +class MixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "PipelineClient" + _config: QuestionAnsweringAuthoringClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_version.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_version.py similarity index 95% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_version.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_version.py index f1fb63697cf5..73aef742777f 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_version.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.1.0b2" +VERSION = "1.1.0b3" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/__init__.py similarity index 54% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/__init__.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/__init__.py index 2578b874c48c..8feb974fc503 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/__init__.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/__init__.py @@ -6,12 +6,16 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._question_answering_projects_client import QuestionAnsweringProjectsClient +from ._client import QuestionAnsweringAuthoringClient -__all__ = ["QuestionAnsweringProjectsClient"] +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk -# `._patch.py` is used for handwritten extensions to the generated code -# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md -from ._patch import patch_sdk +__all__ = ["QuestionAnsweringAuthoringClient"] +__all__.extend([p for p in _patch_all if p not in __all__]) -patch_sdk() +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_question_answering_projects_client.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_client.py similarity index 83% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_question_answering_projects_client.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_client.py index ca2ebb7c7a82..2db9fc72dcd6 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_question_answering_projects_client.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_client.py @@ -9,21 +9,22 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING -from msrest import Deserializer, Serializer - from azure.core import AsyncPipelineClient from azure.core.credentials import AzureKeyCredential from azure.core.rest import AsyncHttpResponse, HttpRequest -from ._configuration import QuestionAnsweringProjectsClientConfiguration -from ._operations import QuestionAnsweringProjectsClientOperationsMixin +from .._serialization import Deserializer, Serializer +from ._configuration import QuestionAnsweringAuthoringClientConfiguration +from ._operations import QuestionAnsweringAuthoringClientOperationsMixin if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Dict -class QuestionAnsweringProjectsClient(QuestionAnsweringProjectsClientOperationsMixin): +class QuestionAnsweringAuthoringClient( + QuestionAnsweringAuthoringClientOperationsMixin +): # pylint: disable=client-accepts-api-version-keyword """The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language @@ -31,11 +32,11 @@ class QuestionAnsweringProjectsClient(QuestionAnsweringProjectsClientOperationsM href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview">https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview`. :param endpoint: Supported Cognitive Services endpoint (e.g., - https://:code:``.api.cognitiveservices.azure.com). + https://:code:``.api.cognitiveservices.azure.com). Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.AzureKeyCredential - :keyword api_version: Api Version. The default value is "2021-10-01". Note that overriding this + :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no @@ -44,7 +45,7 @@ class QuestionAnsweringProjectsClient(QuestionAnsweringProjectsClientOperationsM def __init__(self, endpoint: str, credential: AzureKeyCredential, **kwargs: Any) -> None: _endpoint = "{Endpoint}/language" - self._config = QuestionAnsweringProjectsClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + self._config = QuestionAnsweringAuthoringClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) self._client = AsyncPipelineClient(base_url=_endpoint, config=self._config, **kwargs) self._serialize = Serializer() @@ -60,7 +61,7 @@ def send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHt >>> response = await client.send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest @@ -80,7 +81,7 @@ def send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHt async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "QuestionAnsweringProjectsClient": + async def __aenter__(self) -> "QuestionAnsweringAuthoringClient": await self._client.__aenter__() return self diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_configuration.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_configuration.py similarity index 88% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_configuration.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_configuration.py index d01e2cbc4c0a..057f970f5450 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_configuration.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_configuration.py @@ -15,24 +15,24 @@ from .._version import VERSION -class QuestionAnsweringProjectsClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes - """Configuration for QuestionAnsweringProjectsClient. +class QuestionAnsweringAuthoringClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for QuestionAnsweringAuthoringClient. Note that all parameters used to create this instance are saved as instance attributes. :param endpoint: Supported Cognitive Services endpoint (e.g., - https://:code:``.api.cognitiveservices.azure.com). + https://:code:``.api.cognitiveservices.azure.com). Required. :type endpoint: str - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.AzureKeyCredential - :keyword api_version: Api Version. The default value is "2021-10-01". Note that overriding this + :keyword api_version: Api Version. Default value is "2021-10-01". Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ def __init__(self, endpoint: str, credential: AzureKeyCredential, **kwargs: Any) -> None: - super(QuestionAnsweringProjectsClientConfiguration, self).__init__(**kwargs) + super(QuestionAnsweringAuthoringClientConfiguration, self).__init__(**kwargs) api_version = kwargs.pop("api_version", "2021-10-01") # type: str if endpoint is None: diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_operations/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/__init__.py similarity index 56% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_operations/__init__.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/__init__.py index 54d46cdf0343..361d46bf72bd 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_operations/__init__.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/__init__.py @@ -6,8 +6,14 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._operations import QuestionAnsweringProjectsClientOperationsMixin +from ._operations import QuestionAnsweringAuthoringClientOperationsMixin + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - "QuestionAnsweringProjectsClientOperationsMixin", + "QuestionAnsweringAuthoringClientOperationsMixin", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_operations/_operations.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/_operations.py similarity index 52% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_operations/_operations.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/_operations.py index 2ce7195bf5fb..f5df5da4032f 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_operations/_operations.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/_operations.py @@ -6,7 +6,9 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union +import sys +from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload +from urllib.parse import parse_qs, urljoin, urlparse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( @@ -14,6 +16,7 @@ HttpResponseError, ResourceExistsError, ResourceNotFoundError, + ResourceNotModifiedError, map_error, ) from azure.core.pipeline import PipelineResponse @@ -23,123 +26,122 @@ from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from ..._operations._operations import ( build_add_feedback_request, build_create_project_request, - build_delete_project_request_initial, - build_deploy_project_request_initial, - build_export_request_initial, + build_delete_project_request, + build_deploy_project_request, + build_export_request, build_get_project_details_request, - build_import_assets_request_initial, + build_import_assets_request, build_list_deployments_request, build_list_projects_request, build_list_qnas_request, build_list_sources_request, build_list_synonyms_request, - build_update_qnas_request_initial, - build_update_sources_request_initial, + build_update_qnas_request, + build_update_sources_request, build_update_synonyms_request, ) +from .._vendor import MixinABC +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object T = TypeVar("T") -JSONType = Any ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class QuestionAnsweringProjectsClientOperationsMixin: # pylint: disable=too-many-public-methods +class QuestionAnsweringAuthoringClientOperationsMixin(MixinABC): # pylint: disable=too-many-public-methods @distributed_trace def list_projects( - self, *, top: Optional[int] = None, skip: Optional[int] = None, maxpagesize: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable[JSONType]: + self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncIterable[JSON]: """Gets all projects for a user. Gets all projects for a user. - :keyword top: The maximum number of resources to return from the collection. + :keyword top: The maximum number of resources to return from the collection. Default value is + None. :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int :return: An iterator like instance of JSON object - :rtype: ~azure.core.async_paging.AsyncItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.async_paging.AsyncItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "createdDateTime": "2020-02-20 00:00:00", # Optional. - Project creation date-time. - "description": "str", # Optional. Description of the - project. - "language": "str", # Optional. Language of the text records. - This is BCP-47 representation of a language. For example, use "en" for - English; "es" for Spanish etc. If not set, use "en" for English as - default. - "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. - Represents the project last deployment date-time. - "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. - Represents the project last modified date-time. - "multilingualResource": bool, # Optional. Resource enabled - for multiple languages across projects or not. - "projectName": "str", # Optional. Name of the project. - "settings": { - "defaultAnswer": "str" # Optional. Default Answer - response when no good match is found in the knowledge base. - } - } - ] + response == { + "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation + date-time. + "description": "str", # Optional. Description of the project. + "language": "str", # Optional. Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. + "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last deployment date-time. + "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last modified date-time. + "multilingualResource": bool, # Optional. Resource enabled for multiple + languages across projects or not. + "projectName": "str", # Optional. Name of the project. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } } """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_projects_request( - api_version=api_version, top=top, skip=skip, - maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore else: - - request = build_list_projects_request( - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(next_link, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" return request async def extract_data(pipeline_response): @@ -152,7 +154,7 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -166,22 +168,22 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - async def get_project_details(self, project_name: str, **kwargs: Any) -> JSONType: + async def get_project_details(self, project_name: str, **kwargs: Any) -> JSON: """Get the requested project metadata. Get the requested project metadata. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str :return: JSON object - :rtype: JSONType - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response.json() == { + response == { "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation date-time. "description": "str", # Optional. Description of the project. @@ -201,24 +203,34 @@ async def get_project_details(self, project_name: str, **kwargs: Any) -> JSONTyp } } """ - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] request = build_get_project_details_request( project_name=project_name, - api_version=api_version, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -231,33 +243,38 @@ async def get_project_details(self, project_name: str, **kwargs: Any) -> JSONTyp deserialized = None if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, cast(JSON, deserialized), {}) - return deserialized + return cast(JSON, deserialized) - @distributed_trace_async - async def create_project(self, project_name: str, options: JSONType, **kwargs: Any) -> JSONType: + @overload + async def create_project( + self, project_name: str, options: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> JSON: """Create or update a project. Create or update a project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :param options: Parameters needed to create the project. - :type options: JSONType + :param options: Parameters needed to create the project. Required. + :type options: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: JSON object - :rtype: JSONType - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # JSON input template you can fill out and use as your body input. options = { - "description": "str", # Optional. Description of the project. - "language": "str", # Required. Language of the text records. This is BCP-47 + "language": "str", # Language of the text records. This is BCP-47 representation of a language. For example, use "en" for English; "es" for Spanish - etc. If not set, use "en" for English as default. + etc. If not set, use "en" for English as default. Required. + "description": "str", # Optional. Description of the project. "multilingualResource": bool, # Optional. Set to true to enable creating knowledgebases in different languages for the same resource. "settings": { @@ -267,7 +284,94 @@ async def create_project(self, project_name: str, options: JSONType, **kwargs: A } # response body for status code(s): 200, 201 - response.json() == { + response == { + "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation + date-time. + "description": "str", # Optional. Description of the project. + "language": "str", # Optional. Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. + "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last deployment date-time. + "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last modified date-time. + "multilingualResource": bool, # Optional. Resource enabled for multiple + languages across projects or not. + "projectName": "str", # Optional. Name of the project. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } + } + """ + + @overload + async def create_project( + self, project_name: str, options: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> JSON: + """Create or update a project. + + Create or update a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Parameters needed to create the project. Required. + :type options: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200, 201 + response == { + "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation + date-time. + "description": "str", # Optional. Description of the project. + "language": "str", # Optional. Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for Spanish + etc. If not set, use "en" for English as default. + "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last deployment date-time. + "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the + project last modified date-time. + "multilingualResource": bool, # Optional. Resource enabled for multiple + languages across projects or not. + "projectName": "str", # Optional. Name of the project. + "settings": { + "defaultAnswer": "str" # Optional. Default Answer response when no + good match is found in the knowledge base. + } + } + """ + + @distributed_trace_async + async def create_project(self, project_name: str, options: Union[JSON, IO], **kwargs: Any) -> JSON: + """Create or update a project. + + Create or update a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Parameters needed to create the project. Is either a model type or a IO type. + Required. + :type options: JSON or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: JSON object + :rtype: JSON + :raises ~azure.core.exceptions.HttpResponseError: + + Example: + .. code-block:: python + + # response body for status code(s): 200, 201 + response == { "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation date-time. "description": "str", # Optional. Description of the project. @@ -287,29 +391,46 @@ async def create_project(self, project_name: str, options: JSONType, **kwargs: A } } """ - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[JSON] - _json = options + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(options, (IO, bytes)): + _content = options + else: + _json = options request = build_create_project_request( project_name=project_name, - api_version=api_version, content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -329,31 +450,41 @@ async def create_project(self, project_name: str, options: JSONType, **kwargs: A deserialized = None if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, cast(JSON, deserialized), {}) - return deserialized + return cast(JSON, deserialized) async def _delete_project_initial( # pylint: disable=inconsistent-return-statements self, project_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[None] - request = build_delete_project_request_initial( + request = build_delete_project_request( project_name=project_name, - api_version=api_version, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: @@ -367,14 +498,12 @@ async def _delete_project_initial( # pylint: disable=inconsistent-return-statem return cls(pipeline_response, None, response_headers) @distributed_trace_async - async def begin_delete_project( # pylint: disable=inconsistent-return-statements - self, project_name: str, **kwargs: Any - ) -> AsyncLROPoller[None]: + async def begin_delete_project(self, project_name: str, **kwargs: Any) -> AsyncLROPoller[None]: """Delete the project. Delete the project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False @@ -385,20 +514,22 @@ async def begin_delete_project( # pylint: disable=inconsistent-return-statement Retry-After header is present. :return: An instance of AsyncLROPoller that returns None :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._delete_project_initial( - project_name=project_name, api_version=api_version, cls=lambda x, y, z: x, **kwargs + raw_result = await self._delete_project_initial( # type: ignore + project_name=project_name, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) @@ -407,9 +538,12 @@ def get_long_running_output(pipeline_response): } if polling is True: - polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + polling_method = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) # type: AsyncPollingMethod elif polling is False: - polling_method = AsyncNoPolling() + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: @@ -422,28 +556,38 @@ def get_long_running_output(pipeline_response): return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) async def _export_initial( - self, project_name: str, *, format: Optional[str] = "json", asset_kind: Optional[str] = None, **kwargs: Any - ) -> Optional[JSONType]: - cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSONType]] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + self, project_name: str, *, format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any + ) -> Optional[JSON]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - request = build_export_request_initial( + cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSON]] + + request = build_export_request( project_name=project_name, - api_version=api_version, format=format, asset_kind=asset_kind, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: @@ -470,19 +614,19 @@ async def _export_initial( @distributed_trace_async async def begin_export( - self, project_name: str, *, format: Optional[str] = "json", asset_kind: Optional[str] = None, **kwargs: Any - ) -> AsyncLROPoller[JSONType]: + self, project_name: str, *, format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any + ) -> AsyncLROPoller[JSON]: """Export project metadata and assets. Export project metadata and assets. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :keyword format: Knowledge base Import or Export format. Possible values are: "json", "tsv", - and "excel". + :keyword format: Knowledge base Import or Export format. Known values are: "json", "tsv", and + "excel". Default value is "json". :paramtype format: str - :keyword asset_kind: Kind of the asset of the project. Possible values are: "qnas" or - "synonyms". + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. :paramtype asset_kind: str :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False @@ -492,68 +636,71 @@ async def begin_export( :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns JSON object - :rtype: ~azure.core.polling.AsyncLROPoller[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.polling.AsyncLROPoller[JSON] + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response.json() == { + response == { "createdDateTime": "2020-02-20 00:00:00", # Required. + "jobId": "str", # Required. + "lastUpdatedDateTime": "2020-02-20 00:00:00", # Required. + "resultUrl": "str", # URL to download the result of the Export Job. + Required. + "status": "str", # Job Status. Required. Known values are: "notStarted", + "running", "succeeded", "failed", "cancelled", "cancelling", and + "partiallyCompleted". "errors": [ { - "code": "str", # Required. One of a server-defined set of - error codes. Possible values include: "InvalidRequest", - "InvalidArgument", "Unauthorized", "Forbidden", "NotFound", - "ProjectNotFound", "OperationNotFound", "AzureCognitiveSearchNotFound", + "code": "str", # One of a server-defined set of error codes. + Required. Known values are: "InvalidRequest", "InvalidArgument", + "Unauthorized", "Forbidden", "NotFound", "ProjectNotFound", + "OperationNotFound", "AzureCognitiveSearchNotFound", "AzureCognitiveSearchIndexNotFound", "TooManyRequests", "AzureCognitiveSearchThrottling", - "AzureCognitiveSearchIndexLimitReached", "InternalServerError", + "AzureCognitiveSearchIndexLimitReached", "InternalServerError", and "ServiceUnavailable". + "message": "str", # A human-readable representation of the + error. Required. "details": [ ... ], "innererror": { - "code": "str", # Required. One of a server-defined - set of error codes. Possible values include: "InvalidRequest", + "code": "str", # One of a server-defined set of + error codes. Required. Known values are: "InvalidRequest", "InvalidParameterValue", "KnowledgeBaseNotFound", - "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", + "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and "ExtractionFailure". + "message": "str", # Error message. Required. "details": { "str": "str" # Optional. Error details. }, "innererror": ..., - "message": "str", # Required. Error message. "target": "str" # Optional. Error target. }, - "message": "str", # Required. A human-readable - representation of the error. "target": "str" # Optional. The target of the error. } ], - "expirationDateTime": "2020-02-20 00:00:00", # Optional. - "jobId": "str", # Required. - "lastUpdatedDateTime": "2020-02-20 00:00:00", # Required. - "resultUrl": "str", # Required. URL to download the result of the Export - Job. - "status": "str" # Required. Job Status. Possible values include: - "notStarted", "running", "succeeded", "failed", "cancelled", "cancelling", - "partiallyCompleted". + "expirationDateTime": "2020-02-20 00:00:00" # Optional. } """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._export_initial( + raw_result = await self._export_initial( # type: ignore project_name=project_name, format=format, asset_kind=asset_kind, - api_version=api_version, cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) kwargs.pop("error_map", None) @@ -573,9 +720,12 @@ def get_long_running_output(pipeline_response): } if polling is True: - polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + polling_method = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) # type: AsyncPollingMethod elif polling is False: - polling_method = AsyncNoPolling() + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: @@ -590,40 +740,57 @@ def get_long_running_output(pipeline_response): async def _import_assets_initial( # pylint: disable=inconsistent-return-statements self, project_name: str, - options: JSONType = None, + options: Optional[Union[JSON, IO]] = None, *, - format: Optional[str] = "json", + format: str = "json", asset_kind: Optional[str] = None, **kwargs: Any ) -> None: - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} - if options is not None: - _json = options + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(options, (IO, bytes)): + _content = options else: - _json = None + if options is not None: + _json = options + else: + _json = None - request = build_import_assets_request_initial( + request = build_import_assets_request( project_name=project_name, - api_version=api_version, - content_type=content_type, - json=_json, format=format, asset_kind=asset_kind, + content_type=content_type, + api_version=self._config.api_version, + json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: @@ -636,30 +803,34 @@ async def _import_assets_initial( # pylint: disable=inconsistent-return-stateme if cls: return cls(pipeline_response, None, response_headers) - @distributed_trace_async - async def begin_import_assets( # pylint: disable=inconsistent-return-statements + @overload + async def begin_import_assets( self, project_name: str, - options: JSONType = None, + options: Optional[JSON] = None, *, - format: Optional[str] = "json", + format: str = "json", asset_kind: Optional[str] = None, + content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[None]: """Import project assets. Import project assets. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :param options: Project assets the needs to be imported. - :type options: JSONType - :keyword format: Knowledge base Import or Export format. Possible values are: "json", "tsv", - and "excel". + :param options: Project assets the needs to be imported. Default value is None. + :type options: JSON + :keyword format: Knowledge base Import or Export format. Known values are: "json", "tsv", and + "excel". Default value is "json". :paramtype format: str - :keyword asset_kind: Kind of the asset of the project. Possible values are: "qnas" or - "synonyms". + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. :paramtype asset_kind: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal @@ -669,7 +840,7 @@ async def begin_import_assets( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns None :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python @@ -785,18 +956,18 @@ async def begin_import_assets( # pylint: disable=inconsistent-return-statements "synonyms": [ { "alterations": [ - "str" # Required. Collection of word - alterations. + "str" # Collection of word alterations. + Required. ] } ] }, "fileUri": "str", # Optional. Import data File URI. "metadata": { + "language": "str", # Language of the text records. This is BCP-47 + representation of a language. For example, use "en" for English; "es" for + Spanish etc. If not set, use "en" for English as default. Required. "description": "str", # Optional. Description of the project. - "language": "str", # Required. Language of the text records. This is - BCP-47 representation of a language. For example, use "en" for English; "es" - for Spanish etc. If not set, use "en" for English as default. "multilingualResource": bool, # Optional. Set to true to enable creating knowledgebases in different languages for the same resource. "settings": { @@ -806,26 +977,109 @@ async def begin_import_assets( # pylint: disable=inconsistent-return-statements } } """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + + @overload + async def begin_import_assets( + self, + project_name: str, + options: Optional[IO] = None, + *, + format: str = "json", + asset_kind: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets. + + Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Project assets the needs to be imported. Default value is None. + :type options: IO + :keyword format: Knowledge base Import or Export format. Known values are: "json", "tsv", and + "excel". Default value is "json". + :paramtype format: str + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False + for this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_import_assets( + self, + project_name: str, + options: Optional[Union[JSON, IO]] = None, + *, + format: str = "json", + asset_kind: Optional[str] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets. + + Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param options: Project assets the needs to be imported. Is either a model type or a IO type. + Default value is None. + :type options: JSON or IO + :keyword format: Knowledge base Import or Export format. Known values are: "json", "tsv", and + "excel". Default value is "json". + :paramtype format: str + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False + for this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._import_assets_initial( + raw_result = await self._import_assets_initial( # type: ignore project_name=project_name, options=options, format=format, asset_kind=asset_kind, - api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) @@ -834,9 +1088,12 @@ def get_long_running_output(pipeline_response): } if polling is True: - polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + polling_method = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) # type: AsyncPollingMethod elif polling is False: - polling_method = AsyncNoPolling() + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: @@ -851,25 +1108,35 @@ def get_long_running_output(pipeline_response): async def _deploy_project_initial( # pylint: disable=inconsistent-return-statements self, project_name: str, deployment_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - request = build_deploy_project_request_initial( + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_deploy_project_request( project_name=project_name, deployment_name=deployment_name, - api_version=api_version, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: @@ -883,16 +1150,16 @@ async def _deploy_project_initial( # pylint: disable=inconsistent-return-statem return cls(pipeline_response, None, response_headers) @distributed_trace_async - async def begin_deploy_project( # pylint: disable=inconsistent-return-statements + async def begin_deploy_project( self, project_name: str, deployment_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Deploy project to production. Deploy project to production. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :param deployment_name: The name of the specific deployment of the project to use. + :param deployment_name: The name of the specific deployment of the project to use. Required. :type deployment_name: str :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False @@ -903,24 +1170,27 @@ async def begin_deploy_project( # pylint: disable=inconsistent-return-statement Retry-After header is present. :return: An instance of AsyncLROPoller that returns None :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._deploy_project_initial( + raw_result = await self._deploy_project_initial( # type: ignore project_name=project_name, deployment_name=deployment_name, - api_version=api_version, cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) @@ -929,9 +1199,12 @@ def get_long_running_output(pipeline_response): } if polling is True: - polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + polling_method = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) # type: AsyncPollingMethod elif polling is False: - polling_method = AsyncNoPolling() + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: @@ -945,90 +1218,78 @@ def get_long_running_output(pipeline_response): @distributed_trace def list_deployments( - self, - project_name: str, - *, - top: Optional[int] = None, - skip: Optional[int] = None, - maxpagesize: Optional[int] = None, - **kwargs: Any - ) -> AsyncIterable[JSONType]: + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncIterable[JSON]: """List all deployments of a project. List all deployments of a project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :keyword top: The maximum number of resources to return from the collection. + :keyword top: The maximum number of resources to return from the collection. Default value is + None. :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int :return: An iterator like instance of JSON object - :rtype: ~azure.core.async_paging.AsyncItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.async_paging.AsyncItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "deploymentName": "str", # Optional. Name of the deployment. - "lastDeployedDateTime": "2020-02-20 00:00:00" # Optional. - Represents the project last deployment date-time. - } - ] + response == { + "deploymentName": "str", # Optional. Name of the deployment. + "lastDeployedDateTime": "2020-02-20 00:00:00" # Optional. Represents the + project last deployment date-time. } """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls = kwargs.pop("cls", None) # type: ClsType[JSON] - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_deployments_request( project_name=project_name, - api_version=api_version, top=top, skip=skip, - maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore else: - - request = build_list_deployments_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(next_link, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" return request async def extract_data(pipeline_response): @@ -1041,7 +1302,7 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -1056,90 +1317,78 @@ async def get_next(next_link=None): @distributed_trace def list_synonyms( - self, - project_name: str, - *, - top: Optional[int] = None, - skip: Optional[int] = None, - maxpagesize: Optional[int] = None, - **kwargs: Any - ) -> AsyncIterable[JSONType]: + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncIterable[JSON]: """Gets all the synonyms of a project. Gets all the synonyms of a project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :keyword top: The maximum number of resources to return from the collection. + :keyword top: The maximum number of resources to return from the collection. Default value is + None. :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int :return: An iterator like instance of JSON object - :rtype: ~azure.core.async_paging.AsyncItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.async_paging.AsyncItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "alterations": [ - "str" # Required. Collection of word alterations. - ] - } + response == { + "alterations": [ + "str" # Collection of word alterations. Required. ] } """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_synonyms_request( project_name=project_name, - api_version=api_version, top=top, skip=skip, - maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore else: - - request = build_list_synonyms_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(next_link, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" return request async def extract_data(pipeline_response): @@ -1152,7 +1401,7 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -1165,21 +1414,24 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) - @distributed_trace_async + @overload async def update_synonyms( # pylint: disable=inconsistent-return-statements - self, project_name: str, synonyms: JSONType, **kwargs: Any + self, project_name: str, synonyms: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> None: """Updates all the synonyms of a project. Updates all the synonyms of a project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :param synonyms: All the synonyms of a project. - :type synonyms: JSONType + :param synonyms: All the synonyms of a project. Required. + :type synonyms: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: None :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python @@ -1190,35 +1442,92 @@ async def update_synonyms( # pylint: disable=inconsistent-return-statements "value": [ { "alterations": [ - "str" # Required. Collection of word alterations. + "str" # Collection of word alterations. Required. ] } ] } """ - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] + @overload + async def update_synonyms( # pylint: disable=inconsistent-return-statements + self, project_name: str, synonyms: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + Updates all the synonyms of a project. - _json = synonyms + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_synonyms( # pylint: disable=inconsistent-return-statements + self, project_name: str, synonyms: Union[JSON, IO], **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Is either a model type or a IO type. Required. + :type synonyms: JSON or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(synonyms, (IO, bytes)): + _content = synonyms + else: + _json = synonyms request = build_update_synonyms_request( project_name=project_name, - api_version=api_version, content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [204]: @@ -1230,101 +1539,85 @@ async def update_synonyms( # pylint: disable=inconsistent-return-statements @distributed_trace def list_sources( - self, - project_name: str, - *, - top: Optional[int] = None, - skip: Optional[int] = None, - maxpagesize: Optional[int] = None, - **kwargs: Any - ) -> AsyncIterable[JSONType]: + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncIterable[JSON]: """Gets all the sources of a project. Gets all the sources of a project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :keyword top: The maximum number of resources to return from the collection. + :keyword top: The maximum number of resources to return from the collection. Default value is + None. :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int :return: An iterator like instance of JSON object - :rtype: ~azure.core.async_paging.AsyncItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.async_paging.AsyncItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "contentStructureKind": "unstructured", # Optional. Default - value is "unstructured". Content structure type for sources. Possible - values include: "unstructured". - "displayName": "str", # Optional. Friendly name of the - Source. - "lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. - Date-time when the QnA was last updated. - "source": "str", # Optional. Unique source identifier. Name - of the file if it's a 'file' source; otherwise, the complete URL if it's - a 'url' source. - "sourceKind": "str", # Required. Supported source types. - Possible values include: "file", "url". - "sourceUri": "str" # Required. URI location for the file or - url. - } - ] + response == { + "sourceKind": "str", # Supported source types. Required. Known values are: + "file" and "url". + "sourceUri": "str", # URI location for the file or url. Required. + "contentStructureKind": "str", # Optional. Content structure type for + sources. "unstructured" + "displayName": "str", # Optional. Friendly name of the Source. + "lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the + QnA was last updated. + "source": "str" # Optional. Unique source identifier. Name of the file if + it's a 'file' source; otherwise, the complete URL if it's a 'url' source. } """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_sources_request( project_name=project_name, - api_version=api_version, top=top, skip=skip, - maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore else: - - request = build_list_sources_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(next_link, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" return request async def extract_data(pipeline_response): @@ -1337,7 +1630,7 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -1351,31 +1644,48 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) async def _update_sources_initial( # pylint: disable=inconsistent-return-statements - self, project_name: str, sources: List[JSONType], **kwargs: Any + self, project_name: str, sources: Union[List[JSON], IO], **kwargs: Any ) -> None: - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] - _json = sources + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(sources, (IO, bytes)): + _content = sources + else: + _json = sources - request = build_update_sources_request_initial( + request = build_update_sources_request( project_name=project_name, - api_version=api_version, content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: @@ -1388,18 +1698,21 @@ async def _update_sources_initial( # pylint: disable=inconsistent-return-statem if cls: return cls(pipeline_response, None, response_headers) - @distributed_trace_async - async def begin_update_sources( # pylint: disable=inconsistent-return-statements - self, project_name: str, sources: List[JSONType], **kwargs: Any + @overload + async def begin_update_sources( + self, project_name: str, sources: List[JSON], *, content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[None]: """Updates the sources of a project. Updates the sources of a project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :param sources: Update sources parameters of a project. - :type sources: list[JSONType] + :param sources: Update sources parameters of a project. Required. + :type sources: list[JSON] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal @@ -1409,7 +1722,7 @@ async def begin_update_sources( # pylint: disable=inconsistent-return-statement Retry-After header is present. :return: An instance of AsyncLROPoller that returns None :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python @@ -1417,45 +1730,102 @@ async def begin_update_sources( # pylint: disable=inconsistent-return-statement # JSON input template you can fill out and use as your body input. sources = [ { - "op": "str", # Required. Update operation type for assets. Possible - values include: "add", "delete", "replace". + "op": "str", # Update operation type for assets. Required. Known + values are: "add", "delete", and "replace". "value": { - "contentStructureKind": "unstructured", # Optional. Default - value is "unstructured". Content structure type for sources. Possible - values include: "unstructured". + "sourceKind": "str", # Supported source types. Required. + Known values are: "file" and "url". + "sourceUri": "str", # URI location for the file or url. + Required. + "contentStructureKind": "str", # Optional. Content structure + type for sources. "unstructured" "displayName": "str", # Optional. Friendly name of the Source. "refresh": bool, # Optional. Boolean flag used to refresh data from the Source. - "source": "str", # Optional. Unique source identifier. Name + "source": "str" # Optional. Unique source identifier. Name of the file if it's a 'file' source; otherwise, the complete URL if it's a 'url' source. - "sourceKind": "str", # Required. Supported source types. - Possible values include: "file", "url". - "sourceUri": "str" # Required. URI location for the file or - url. } } ] """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + + @overload + async def begin_update_sources( + self, project_name: str, sources: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project. + + Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Required. + :type sources: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False + for this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_sources( + self, project_name: str, sources: Union[List[JSON], IO], **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project. + + Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Is either a list type or a IO type. + Required. + :type sources: list[JSON] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False + for this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._update_sources_initial( + raw_result = await self._update_sources_initial( # type: ignore project_name=project_name, sources=sources, - api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) @@ -1464,9 +1834,12 @@ def get_long_running_output(pipeline_response): } if polling is True: - polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + polling_method = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) # type: AsyncPollingMethod elif polling is False: - polling_method = AsyncNoPolling() + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: @@ -1486,178 +1859,165 @@ def list_qnas( source: Optional[str] = None, top: Optional[int] = None, skip: Optional[int] = None, - maxpagesize: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable[JSONType]: + ) -> AsyncIterable[JSON]: """Gets all the QnAs of a project. Gets all the QnAs of a project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :keyword source: Source of the QnA. + :keyword source: Source of the QnA. Default value is None. :paramtype source: str - :keyword top: The maximum number of resources to return from the collection. + :keyword top: The maximum number of resources to return from the collection. Default value is + None. :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int :return: An iterator like instance of JSON object - :rtype: ~azure.core.async_paging.AsyncItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError + :rtype: ~azure.core.async_paging.AsyncItemPaged[JSON] + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ + response == { + "activeLearningSuggestions": [ { - "activeLearningSuggestions": [ + "clusterHead": "str", # Optional. Question chosen as the + head of suggested questions cluster by Active Learning clustering + algorithm. + "suggestedQuestions": [ { - "clusterHead": "str", # Optional. Question - chosen as the head of suggested questions cluster by Active - Learning clustering algorithm. - "suggestedQuestions": [ - { - "autoSuggestedCount": 0, # - Optional. The number of times the question was suggested - automatically by the Active Learning algorithm. - "question": "str", # - Optional. Question suggested by the Active Learning - feature. - "userSuggestedCount": 0 # - Optional. The number of times the question was suggested - explicitly by the user. - } - ] + "autoSuggestedCount": 0, # Optional. The + number of times the question was suggested automatically by the + Active Learning algorithm. + "question": "str", # Optional. Question + suggested by the Active Learning feature. + "userSuggestedCount": 0 # Optional. The + number of times the question was suggested explicitly by the + user. } - ], - "answer": "str", # Optional. Answer text. - "dialog": { - "isContextOnly": bool, # Optional. To mark if a - prompt is relevant only with a previous question or not. If true, do - not include this QnA as answer for queries without context; - otherwise, ignores context and includes this QnA in answers. - "prompts": [ - { - "displayOrder": 0, # Optional. Index - of the prompt. It is used for ordering of the prompts. - "displayText": "str", # Optional. - Text displayed to represent a follow up question prompt. - "qna": { - "activeLearningSuggestions": - [ + ] + } + ], + "answer": "str", # Optional. Answer text. + "dialog": { + "isContextOnly": bool, # Optional. To mark if a prompt is relevant + only with a previous question or not. If true, do not include this QnA as + answer for queries without context; otherwise, ignores context and includes + this QnA in answers. + "prompts": [ + { + "displayOrder": 0, # Optional. Index of the prompt. + It is used for ordering of the prompts. + "displayText": "str", # Optional. Text displayed to + represent a follow up question prompt. + "qna": { + "activeLearningSuggestions": [ + { + "clusterHead": "str", # + Optional. Question chosen as the head of suggested + questions cluster by Active Learning clustering + algorithm. + "suggestedQuestions": [ { - "clusterHead": "str", # Optional. Question - chosen as the head of suggested questions cluster - by Active Learning clustering algorithm. - "suggestedQuestions": [ - { - "autoSuggestedCount": 0, # Optional. The - number of times the question was - suggested automatically by the Active - Learning algorithm. - "question": "str", # Optional. Question - suggested by the Active Learning feature. - "userSuggestedCount": 0 # Optional. The - number of times the question was - suggested explicitly by the user. - } - ] + "autoSuggestedCount": 0, # Optional. The number + of times the question was suggested automatically + by the Active Learning algorithm. + "question": + "str", # Optional. Question suggested by the + Active Learning feature. + "userSuggestedCount": 0 # Optional. The number + of times the question was suggested explicitly by + the user. } - ], - "answer": "str", # Optional. - Answer text. - "dialog": ..., - "id": 0, # Optional. Unique - ID for the QnA. - "metadata": { - "str": "str" # - Optional. Metadata associated with the answer, useful - to categorize or filter question answers. - }, - "questions": [ - "str" # Optional. - List of questions associated with the answer. - ], - "source": "str" # Optional. - Source from which QnA was indexed e.g. - https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs - . - }, - "qnaId": 0 # Optional. ID of the QnA - corresponding to the prompt. - } - ] - }, - "id": 0, # Optional. Unique ID for the QnA. - "lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. - Date-time when the QnA was last updated. - "metadata": { - "str": "str" # Optional. Metadata associated with - the answer, useful to categorize or filter question answers. - }, - "questions": [ - "str" # Optional. List of questions associated with - the answer. - ], - "source": "str" # Optional. Source from which QnA was - indexed e.g. - https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs . - } - ] + ] + } + ], + "answer": "str", # Optional. Answer text. + "dialog": ..., + "id": 0, # Optional. Unique ID for the QnA. + "metadata": { + "str": "str" # Optional. Metadata + associated with the answer, useful to categorize or filter + question answers. + }, + "questions": [ + "str" # Optional. List of questions + associated with the answer. + ], + "source": "str" # Optional. Source from + which QnA was indexed e.g. + https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + . + }, + "qnaId": 0 # Optional. ID of the QnA corresponding + to the prompt. + } + ] + }, + "id": 0, # Optional. Unique ID for the QnA. + "lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. Date-time when the + QnA was last updated. + "metadata": { + "str": "str" # Optional. Metadata associated with the answer, useful + to categorize or filter question answers. + }, + "questions": [ + "str" # Optional. List of questions associated with the answer. + ], + "source": "str" # Optional. Source from which QnA was indexed e.g. + https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs . } """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + cls = kwargs.pop("cls", None) # type: ClsType[JSON] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_qnas_request( project_name=project_name, - api_version=api_version, source=source, top=top, skip=skip, - maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore else: - - request = build_list_qnas_request( - project_name=project_name, - api_version=api_version, - source=source, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) + # make call to next link with the client's api-version + _parsed_next_link = urlparse(next_link) + _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) path_format_arguments = { "Endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } - request.url = self._client.format_url(next_link, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" return request async def extract_data(pipeline_response): @@ -1670,7 +2030,7 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response @@ -1684,31 +2044,48 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) async def _update_qnas_initial( # pylint: disable=inconsistent-return-statements - self, project_name: str, qnas: List[JSONType], **kwargs: Any + self, project_name: str, qnas: Union[List[JSON], IO], **kwargs: Any ) -> None: - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] - _json = qnas + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(qnas, (IO, bytes)): + _content = qnas + else: + _json = qnas - request = build_update_qnas_request_initial( + request = build_update_qnas_request( project_name=project_name, - api_version=api_version, content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: @@ -1721,18 +2098,21 @@ async def _update_qnas_initial( # pylint: disable=inconsistent-return-statement if cls: return cls(pipeline_response, None, response_headers) - @distributed_trace_async - async def begin_update_qnas( # pylint: disable=inconsistent-return-statements - self, project_name: str, qnas: List[JSONType], **kwargs: Any + @overload + async def begin_update_qnas( + self, project_name: str, qnas: List[JSON], *, content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[None]: """Updates the QnAs of a project. Updates the QnAs of a project. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :param qnas: Update QnAs parameters of a project. - :type qnas: list[JSONType] + :param qnas: Update QnAs parameters of a project. Required. + :type qnas: list[JSON] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal @@ -1742,7 +2122,7 @@ async def begin_update_qnas( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns None :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python @@ -1750,8 +2130,8 @@ async def begin_update_qnas( # pylint: disable=inconsistent-return-statements # JSON input template you can fill out and use as your body input. qnas = [ { - "op": "str", # Required. Update operation type for assets. Possible - values include: "add", "delete", "replace". + "op": "str", # Update operation type for assets. Required. Known + values are: "add", "delete", and "replace". "value": { "activeLearningSuggestions": [ { @@ -1807,24 +2187,81 @@ async def begin_update_qnas( # pylint: disable=inconsistent-return-statements } ] """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + + @overload + async def begin_update_qnas( + self, project_name: str, qnas: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the QnAs of a project. + + Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Required. + :type qnas: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False + for this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_qnas( + self, project_name: str, qnas: Union[List[JSON], IO], **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the QnAs of a project. + + Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Is either a list type or a IO type. Required. + :type qnas: list[JSON] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False + for this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._update_qnas_initial( + raw_result = await self._update_qnas_initial( # type: ignore project_name=project_name, qnas=qnas, - api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) @@ -1833,9 +2270,12 @@ def get_long_running_output(pipeline_response): } if polling is True: - polling_method = AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + polling_method = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) # type: AsyncPollingMethod elif polling is False: - polling_method = AsyncNoPolling() + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: @@ -1847,21 +2287,24 @@ def get_long_running_output(pipeline_response): ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - @distributed_trace_async + @overload async def add_feedback( # pylint: disable=inconsistent-return-statements - self, project_name: str, feedback: JSONType, **kwargs: Any + self, project_name: str, feedback: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> None: """Update Active Learning feedback. Update Active Learning feedback. - :param project_name: The name of the project to use. + :param project_name: The name of the project to use. Required. :type project_name: str - :param feedback: Feedback for Active Learning. - :type feedback: JSONType + :param feedback: Feedback for Active Learning. Required. + :type feedback: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :return: None :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: Example: .. code-block:: python @@ -1878,29 +2321,86 @@ async def add_feedback( # pylint: disable=inconsistent-return-statements ] } """ - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] + @overload + async def add_feedback( # pylint: disable=inconsistent-return-statements + self, project_name: str, feedback: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param feedback: Feedback for Active Learning. Required. + :type feedback: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def add_feedback( # pylint: disable=inconsistent-return-statements + self, project_name: str, feedback: Union[JSON, IO], **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param feedback: Feedback for Active Learning. Is either a model type or a IO type. Required. + :type feedback: JSON or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} - _json = feedback + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(feedback, (IO, bytes)): + _content = feedback + else: + _json = feedback request = build_add_feedback_request( project_name=project_name, - api_version=api_version, content_type=content_type, + api_version=self._config.api_version, json=_json, + content=_content, + headers=_headers, + params=_params, ) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } - request.url = self._client.format_url(request.url, **path_format_arguments) + request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [204]: diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_patch.py similarity index 57% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_patch.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_patch.py index 633631e26990..ea5e72f7df1a 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_patch.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_patch.py @@ -1,40 +1,16 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. -# This file is used for handwritten extensions to the generated code. Example: -# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md - -import importlib -from typing import Union, Any +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List, Union, Any from azure.core.credentials import AzureKeyCredential from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline.policies import AzureKeyCredentialPolicy, AsyncBearerTokenCredentialPolicy -from ._question_answering_projects_client import QuestionAnsweringProjectsClient \ - as QuestionAnsweringProjectsClientGenerated +from ._client import QuestionAnsweringAuthoringClient as QuestionAnsweringAuthoringClientGenerated def _authentication_policy(credential, **kwargs): @@ -56,7 +32,7 @@ def _authentication_policy(credential, **kwargs): return authentication_policy -class QuestionAnsweringProjectsClient(QuestionAnsweringProjectsClientGenerated): +class QuestionAnsweringAuthoringClient(QuestionAnsweringAuthoringClientGenerated): """The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language @@ -77,12 +53,13 @@ class QuestionAnsweringProjectsClient(QuestionAnsweringProjectsClientGenerated): Retry-After header is present. """ - def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, AsyncTokenCredential], **kwargs: Any) -> None: + def __init__( + self, endpoint: str, credential: Union[AzureKeyCredential, AsyncTokenCredential], **kwargs: Any + ) -> None: try: endpoint = endpoint.rstrip("/") except AttributeError: raise ValueError("Parameter 'endpoint' must be a string.") - super().__init__( endpoint=endpoint, credential=credential, # type: ignore @@ -91,6 +68,15 @@ def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, AsyncTok ) +__all__: List[str] = [ + "QuestionAnsweringAuthoringClient" +] # Add all objects you want publicly available to users at this package level + + def patch_sdk(): - current_package = importlib.import_module("azure.ai.language.questionanswering.projects.aio") - current_package.QuestionAnsweringProjectsClient = QuestionAnsweringProjectsClient + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_vendor.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_vendor.py new file mode 100644 index 000000000000..00319d6ffe88 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/aio/_vendor.py @@ -0,0 +1,26 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import TYPE_CHECKING + +from ._configuration import QuestionAnsweringAuthoringClientConfiguration + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core import AsyncPipelineClient + + from .._serialization import Deserializer, Serializer + + +class MixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "AsyncPipelineClient" + _config: QuestionAnsweringAuthoringClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/py.typed b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/py.typed similarity index 100% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/py.typed rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/authoring/py.typed diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/__init__.py index df4e7a017221..773c36978813 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/__init__.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/__init__.py @@ -6,29 +6,26 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._models_py3 import AnswerSpan -from ._models_py3 import AnswersFromTextOptions -from ._models_py3 import AnswersFromTextResult -from ._models_py3 import AnswersOptions -from ._models_py3 import AnswersResult -from ._models_py3 import Error -from ._models_py3 import ErrorResponse -from ._models_py3 import InnerErrorModel -from ._models_py3 import KnowledgeBaseAnswer -from ._models_py3 import KnowledgeBaseAnswerContext -from ._models_py3 import KnowledgeBaseAnswerDialog -from ._models_py3 import KnowledgeBaseAnswerPrompt -from ._models_py3 import MetadataFilter -from ._models_py3 import QueryFilters -from ._models_py3 import ShortAnswerOptions -from ._models_py3 import TextAnswer -from ._models_py3 import TextDocument +from ._models import AnswerSpan +from ._models import AnswersFromTextOptions +from ._models import AnswersFromTextResult +from ._models import AnswersOptions +from ._models import AnswersResult +from ._models import Error +from ._models import ErrorResponse +from ._models import InnerErrorModel +from ._models import KnowledgeBaseAnswer +from ._models import KnowledgeBaseAnswerContext +from ._models import KnowledgeBaseAnswerDialog +from ._models import KnowledgeBaseAnswerPrompt +from ._models import MetadataFilter +from ._models import QueryFilters +from ._models import ShortAnswerOptions +from ._models import TextAnswer +from ._models import TextDocument - -from ._question_answering_client_enums import ( - ErrorCode, - InnerErrorCode, -) +from ._enums import ErrorCode +from ._enums import InnerErrorCode from ._patch import __all__ as _patch_all from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_question_answering_client_enums.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_enums.py similarity index 90% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_question_answering_client_enums.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_enums.py index 3440ca287045..dbc6c082e762 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_question_answering_client_enums.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_enums.py @@ -7,11 +7,10 @@ # -------------------------------------------------------------------------- from enum import Enum -from six import with_metaclass from azure.core import CaseInsensitiveEnumMeta -class ErrorCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): +class ErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Human-readable error code.""" INVALID_REQUEST = "InvalidRequest" @@ -30,7 +29,7 @@ class ErrorCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): SERVICE_UNAVAILABLE = "ServiceUnavailable" -class InnerErrorCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): +class InnerErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Human-readable error code.""" INVALID_REQUEST = "InvalidRequest" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_models_py3.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_models.py similarity index 87% rename from sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_models_py3.py rename to sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_models.py index 5ae55f46ea69..840e1221883d 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_models_py3.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_models.py @@ -1,4 +1,5 @@ # coding=utf-8 +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -6,26 +7,29 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import sys from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union -from azure.core.exceptions import HttpResponseError -import msrest.serialization - -from ._question_answering_client_enums import * +from .. import _serialization if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - import __init__ as _models + from .. import models as _models +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class AnswersFromTextOptions(msrest.serialization.Model): +class AnswersFromTextOptions(_serialization.Model): """The question and text record parameters to answer. All required parameters must be populated in order to send to Azure. - :ivar question: Required. User question to query against the given text records. + :ivar question: User question to query against the given text records. Required. :vartype question: str - :ivar text_documents: Required. Text records to be searched for given question. + :ivar text_documents: Text records to be searched for given question. Required. :vartype text_documents: list[~azure.ai.language.questionanswering.models.TextDocument] :ivar language: Language of the text records. This is BCP-47 representation of a language. For example, use "en" for English; "es" for Spanish etc. If not set, use "en" for English as @@ -48,22 +52,22 @@ def __init__( self, *, question: str, text_documents: List["_models.TextDocument"], language: Optional[str] = None, **kwargs ): """ - :keyword question: Required. User question to query against the given text records. + :keyword question: User question to query against the given text records. Required. :paramtype question: str - :keyword text_documents: Required. Text records to be searched for given question. + :keyword text_documents: Text records to be searched for given question. Required. :paramtype text_documents: list[~azure.ai.language.questionanswering.models.TextDocument] :keyword language: Language of the text records. This is BCP-47 representation of a language. For example, use "en" for English; "es" for Spanish etc. If not set, use "en" for English as default. :paramtype language: str """ - super(AnswersFromTextOptions, self).__init__(**kwargs) + super().__init__(**kwargs) self.question = question self.text_documents = text_documents self.language = language -class AnswersFromTextResult(msrest.serialization.Model): +class AnswersFromTextResult(_serialization.Model): """Represents the answer results. :ivar answers: Represents the answer results. @@ -79,11 +83,11 @@ def __init__(self, *, answers: Optional[List["_models.TextAnswer"]] = None, **kw :keyword answers: Represents the answer results. :paramtype answers: list[~azure.ai.language.questionanswering.models.TextAnswer] """ - super(AnswersFromTextResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.answers = answers -class AnswersOptions(msrest.serialization.Model): +class AnswersOptions(_serialization.Model): """Parameters to query a knowledge base. :ivar qna_id: Exact QnA ID to fetch from the knowledge base, this field takes priority over @@ -166,7 +170,7 @@ def __init__( Sources. :paramtype include_unstructured_sources: bool """ - super(AnswersOptions, self).__init__(**kwargs) + super().__init__(**kwargs) self.qna_id = qna_id self.question = question self.top = top @@ -179,7 +183,7 @@ def __init__( self.include_unstructured_sources = include_unstructured_sources -class AnswerSpan(msrest.serialization.Model): +class AnswerSpan(_serialization.Model): """Answer span object of QnA. :ivar text: Predicted text of answer span. @@ -222,14 +226,14 @@ def __init__( :keyword length: The length of the answer span. :paramtype length: int """ - super(AnswerSpan, self).__init__(**kwargs) + super().__init__(**kwargs) self.text = text self.confidence = confidence self.offset = offset self.length = length -class AnswersResult(msrest.serialization.Model): +class AnswersResult(_serialization.Model): """Represents List of Question Answers. :ivar answers: Represents Answer Result list. @@ -245,22 +249,22 @@ def __init__(self, *, answers: Optional[List["_models.KnowledgeBaseAnswer"]] = N :keyword answers: Represents Answer Result list. :paramtype answers: list[~azure.ai.language.questionanswering.models.KnowledgeBaseAnswer] """ - super(AnswersResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.answers = answers -class Error(msrest.serialization.Model): +class Error(_serialization.Model): """The error object. All required parameters must be populated in order to send to Azure. - :ivar code: Required. One of a server-defined set of error codes. Possible values include: + :ivar code: One of a server-defined set of error codes. Required. Known values are: "InvalidRequest", "InvalidArgument", "Unauthorized", "Forbidden", "NotFound", "ProjectNotFound", "OperationNotFound", "AzureCognitiveSearchNotFound", "AzureCognitiveSearchIndexNotFound", "TooManyRequests", "AzureCognitiveSearchThrottling", - "AzureCognitiveSearchIndexLimitReached", "InternalServerError", "ServiceUnavailable". + "AzureCognitiveSearchIndexLimitReached", "InternalServerError", and "ServiceUnavailable". :vartype code: str or ~azure.ai.language.questionanswering.models.ErrorCode - :ivar message: Required. A human-readable representation of the error. + :ivar message: A human-readable representation of the error. Required. :vartype message: str :ivar target: The target of the error. :vartype target: str @@ -295,13 +299,13 @@ def __init__( **kwargs ): """ - :keyword code: Required. One of a server-defined set of error codes. Possible values include: + :keyword code: One of a server-defined set of error codes. Required. Known values are: "InvalidRequest", "InvalidArgument", "Unauthorized", "Forbidden", "NotFound", "ProjectNotFound", "OperationNotFound", "AzureCognitiveSearchNotFound", "AzureCognitiveSearchIndexNotFound", "TooManyRequests", "AzureCognitiveSearchThrottling", - "AzureCognitiveSearchIndexLimitReached", "InternalServerError", "ServiceUnavailable". + "AzureCognitiveSearchIndexLimitReached", "InternalServerError", and "ServiceUnavailable". :paramtype code: str or ~azure.ai.language.questionanswering.models.ErrorCode - :keyword message: Required. A human-readable representation of the error. + :keyword message: A human-readable representation of the error. Required. :paramtype message: str :keyword target: The target of the error. :paramtype target: str @@ -311,7 +315,7 @@ def __init__( about the error. :paramtype innererror: ~azure.ai.language.questionanswering.models.InnerErrorModel """ - super(Error, self).__init__(**kwargs) + super().__init__(**kwargs) self.code = code self.message = message self.target = target @@ -319,7 +323,7 @@ def __init__( self.innererror = innererror -class ErrorResponse(msrest.serialization.Model): +class ErrorResponse(_serialization.Model): """Error response. :ivar error: The error object. @@ -335,20 +339,20 @@ def __init__(self, *, error: Optional["_models.Error"] = None, **kwargs): :keyword error: The error object. :paramtype error: ~azure.ai.language.questionanswering.models.Error """ - super(ErrorResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.error = error -class InnerErrorModel(msrest.serialization.Model): +class InnerErrorModel(_serialization.Model): """An object containing more specific information about the error. As per Microsoft One API guidelines - https://github.com/Microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses. All required parameters must be populated in order to send to Azure. - :ivar code: Required. One of a server-defined set of error codes. Possible values include: + :ivar code: One of a server-defined set of error codes. Required. Known values are: "InvalidRequest", "InvalidParameterValue", "KnowledgeBaseNotFound", - "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", "ExtractionFailure". + "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and "ExtractionFailure". :vartype code: str or ~azure.ai.language.questionanswering.models.InnerErrorCode - :ivar message: Required. Error message. + :ivar message: Error message. Required. :vartype message: str :ivar details: Error details. :vartype details: dict[str, str] @@ -383,11 +387,11 @@ def __init__( **kwargs ): """ - :keyword code: Required. One of a server-defined set of error codes. Possible values include: + :keyword code: One of a server-defined set of error codes. Required. Known values are: "InvalidRequest", "InvalidParameterValue", "KnowledgeBaseNotFound", - "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", "ExtractionFailure". + "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", and "ExtractionFailure". :paramtype code: str or ~azure.ai.language.questionanswering.models.InnerErrorCode - :keyword message: Required. Error message. + :keyword message: Error message. Required. :paramtype message: str :keyword details: Error details. :paramtype details: dict[str, str] @@ -397,7 +401,7 @@ def __init__( about the error. :paramtype innererror: ~azure.ai.language.questionanswering.models.InnerErrorModel """ - super(InnerErrorModel, self).__init__(**kwargs) + super().__init__(**kwargs) self.code = code self.message = message self.details = details @@ -405,7 +409,7 @@ def __init__( self.innererror = innererror -class KnowledgeBaseAnswer(msrest.serialization.Model): +class KnowledgeBaseAnswer(_serialization.Model): """Represents knowledge base answer. :ivar questions: List of questions associated with the answer. @@ -474,7 +478,7 @@ def __init__( :keyword short_answer: Answer span object of QnA with respect to user's question. :paramtype short_answer: ~azure.ai.language.questionanswering.models.AnswerSpan """ - super(KnowledgeBaseAnswer, self).__init__(**kwargs) + super().__init__(**kwargs) self.questions = questions self.answer = answer self.confidence = confidence @@ -485,12 +489,12 @@ def __init__( self.short_answer = short_answer -class KnowledgeBaseAnswerContext(msrest.serialization.Model): +class KnowledgeBaseAnswerContext(_serialization.Model): """Context object with previous QnA's information. All required parameters must be populated in order to send to Azure. - :ivar previous_qna_id: Required. Previous turn top answer result QnA ID. + :ivar previous_qna_id: Previous turn top answer result QnA ID. Required. :vartype previous_qna_id: int :ivar previous_question: Previous user query. :vartype previous_question: str @@ -507,17 +511,17 @@ class KnowledgeBaseAnswerContext(msrest.serialization.Model): def __init__(self, *, previous_qna_id: int, previous_question: Optional[str] = None, **kwargs): """ - :keyword previous_qna_id: Required. Previous turn top answer result QnA ID. + :keyword previous_qna_id: Previous turn top answer result QnA ID. Required. :paramtype previous_qna_id: int :keyword previous_question: Previous user query. :paramtype previous_question: str """ - super(KnowledgeBaseAnswerContext, self).__init__(**kwargs) + super().__init__(**kwargs) self.previous_qna_id = previous_qna_id self.previous_question = previous_question -class KnowledgeBaseAnswerDialog(msrest.serialization.Model): +class KnowledgeBaseAnswerDialog(_serialization.Model): """Dialog associated with Answer. :ivar is_context_only: To mark if a prompt is relevant only with a previous question or not. If @@ -552,12 +556,12 @@ def __init__( :keyword prompts: List of prompts associated with the answer. :paramtype prompts: list[~azure.ai.language.questionanswering.models.KnowledgeBaseAnswerPrompt] """ - super(KnowledgeBaseAnswerDialog, self).__init__(**kwargs) + super().__init__(**kwargs) self.is_context_only = is_context_only self.prompts = prompts -class KnowledgeBaseAnswerPrompt(msrest.serialization.Model): +class KnowledgeBaseAnswerPrompt(_serialization.Model): """Prompt for an answer. :ivar display_order: Index of the prompt - used in ordering of the prompts. @@ -569,7 +573,7 @@ class KnowledgeBaseAnswerPrompt(msrest.serialization.Model): """ _validation = { - "display_text": {"max_length": 200, "min_length": 0}, + "display_text": {"max_length": 200}, } _attribute_map = { @@ -594,17 +598,17 @@ def __init__( :keyword display_text: Text displayed to represent a follow up question prompt. :paramtype display_text: str """ - super(KnowledgeBaseAnswerPrompt, self).__init__(**kwargs) + super().__init__(**kwargs) self.display_order = display_order self.qna_id = qna_id self.display_text = display_text -class MetadataFilter(msrest.serialization.Model): +class MetadataFilter(_serialization.Model): """Find QnAs that are associated with the given list of metadata. :ivar metadata: - :vartype metadata: list[any] + :vartype metadata: list[JSON] :ivar logical_operation: Operation used to join metadata filters. :vartype logical_operation: str """ @@ -614,19 +618,19 @@ class MetadataFilter(msrest.serialization.Model): "logical_operation": {"key": "logicalOperation", "type": "str"}, } - def __init__(self, *, metadata: Optional[List[Any]] = None, logical_operation: Optional[str] = None, **kwargs): + def __init__(self, *, metadata: Optional[List[JSON]] = None, logical_operation: Optional[str] = None, **kwargs): """ :keyword metadata: - :paramtype metadata: list[any] + :paramtype metadata: list[JSON] :keyword logical_operation: Operation used to join metadata filters. :paramtype logical_operation: str """ - super(MetadataFilter, self).__init__(**kwargs) + super().__init__(**kwargs) self.metadata = metadata self.logical_operation = logical_operation -class QueryFilters(msrest.serialization.Model): +class QueryFilters(_serialization.Model): """filters over knowledge base. :ivar metadata_filter: Find QnAs that are associated with the given list of metadata. @@ -661,20 +665,20 @@ def __init__( :keyword logical_operation: Logical operation used to join metadata filter with source filter. :paramtype logical_operation: str """ - super(QueryFilters, self).__init__(**kwargs) + super().__init__(**kwargs) self.metadata_filter = metadata_filter self.source_filter = source_filter self.logical_operation = logical_operation -class ShortAnswerOptions(msrest.serialization.Model): +class ShortAnswerOptions(_serialization.Model): """To configure Answer span prediction feature. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar enable: Enable or disable Answer Span prediction. Has constant value: True. + :ivar enable: Enable or disable Answer Span prediction. Required. Default value is True. :vartype enable: bool :ivar confidence_threshold: Minimum threshold score required to include an answer span, value ranges from 0 to 1. @@ -705,12 +709,12 @@ def __init__(self, *, confidence_threshold: Optional[float] = None, top: Optiona :keyword top: Number of Top answers to be considered for span prediction from 1 to 10. :paramtype top: int """ - super(ShortAnswerOptions, self).__init__(**kwargs) + super().__init__(**kwargs) self.confidence_threshold = confidence_threshold self.top = top -class TextAnswer(msrest.serialization.Model): +class TextAnswer(_serialization.Model): """Represents answer result. :ivar answer: Answer. @@ -745,7 +749,7 @@ def __init__( *, answer: Optional[str] = None, confidence: Optional[float] = None, - id: Optional[str] = None, + id: Optional[str] = None, # pylint: disable=redefined-builtin short_answer: Optional["_models.AnswerSpan"] = None, offset: Optional[int] = None, length: Optional[int] = None, @@ -765,7 +769,7 @@ def __init__( :keyword length: The length of the sentence. :paramtype length: int """ - super(TextAnswer, self).__init__(**kwargs) + super().__init__(**kwargs) self.answer = answer self.confidence = confidence self.id = id @@ -774,14 +778,14 @@ def __init__( self.length = length -class TextDocument(msrest.serialization.Model): +class TextDocument(_serialization.Model): """Represent input text record to be queried. All required parameters must be populated in order to send to Azure. - :ivar id: Required. Unique identifier for the text record. + :ivar id: Unique identifier for the text record. Required. :vartype id: str - :ivar text: Required. Text contents of the record. + :ivar text: Text contents of the record. Required. :vartype text: str """ @@ -795,13 +799,13 @@ class TextDocument(msrest.serialization.Model): "text": {"key": "text", "type": "str"}, } - def __init__(self, *, id: str, text: str, **kwargs): + def __init__(self, *, id: str, text: str, **kwargs): # pylint: disable=redefined-builtin """ - :keyword id: Required. Unique identifier for the text record. + :keyword id: Unique identifier for the text record. Required. :paramtype id: str - :keyword text: Required. Text contents of the record. + :keyword text: Text contents of the record. Required. :paramtype text: str """ - super(TextDocument, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id self.text = text diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_patch.py index e2c03178c549..203edbc55228 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_patch.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/models/_patch.py @@ -7,7 +7,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ from typing import List, Optional, Tuple, Union -from ._models_py3 import ( +from ._models import ( MetadataFilter as MetadataFilterGenerated, AnswersFromTextOptions as AnswersFromTextOptionsGenerated, TextDocument, diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_operations/_operations.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_operations/_operations.py deleted file mode 100644 index 5a8ce8c692bd..000000000000 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/_operations/_operations.py +++ /dev/null @@ -1,2535 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.polling.base_polling import LROBasePolling -from azure.core.rest import HttpRequest -from azure.core.tracing.decorator import distributed_trace - -from .._vendor import _format_url_section - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union - - T = TypeVar("T") - JSONType = Any - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False -# fmt: off - -def build_list_projects_request( - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[int] - maxpagesize = kwargs.pop('maxpagesize', None) # type: Optional[int] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects" - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if top is not None: - _query_parameters['top'] = _SERIALIZER.query("top", top, 'int') - if skip is not None: - _query_parameters['skip'] = _SERIALIZER.query("skip", skip, 'int') - if maxpagesize is not None: - _query_parameters['maxpagesize'] = _SERIALIZER.query("maxpagesize", maxpagesize, 'int') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_get_project_details_request( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_project_request( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_project_request_initial( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_export_request_initial( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - format = kwargs.pop('format', "json") # type: Optional[str] - asset_kind = kwargs.pop('asset_kind', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/:export" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if format is not None: - _query_parameters['format'] = _SERIALIZER.query("format", format, 'str') - if asset_kind is not None: - _query_parameters['assetKind'] = _SERIALIZER.query("asset_kind", asset_kind, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_import_assets_request_initial( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - format = kwargs.pop('format', "json") # type: Optional[str] - asset_kind = kwargs.pop('asset_kind', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/:import" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if format is not None: - _query_parameters['format'] = _SERIALIZER.query("format", format, 'str') - if asset_kind is not None: - _query_parameters['assetKind'] = _SERIALIZER.query("asset_kind", asset_kind, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_deploy_project_request_initial( - project_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/deployments/{deploymentName}" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_list_deployments_request( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[int] - maxpagesize = kwargs.pop('maxpagesize', None) # type: Optional[int] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/deployments" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if top is not None: - _query_parameters['top'] = _SERIALIZER.query("top", top, 'int') - if skip is not None: - _query_parameters['skip'] = _SERIALIZER.query("skip", skip, 'int') - if maxpagesize is not None: - _query_parameters['maxpagesize'] = _SERIALIZER.query("maxpagesize", maxpagesize, 'int') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_list_synonyms_request( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[int] - maxpagesize = kwargs.pop('maxpagesize', None) # type: Optional[int] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/synonyms" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if top is not None: - _query_parameters['top'] = _SERIALIZER.query("top", top, 'int') - if skip is not None: - _query_parameters['skip'] = _SERIALIZER.query("skip", skip, 'int') - if maxpagesize is not None: - _query_parameters['maxpagesize'] = _SERIALIZER.query("maxpagesize", maxpagesize, 'int') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_synonyms_request( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/synonyms" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_list_sources_request( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[int] - maxpagesize = kwargs.pop('maxpagesize', None) # type: Optional[int] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/sources" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if top is not None: - _query_parameters['top'] = _SERIALIZER.query("top", top, 'int') - if skip is not None: - _query_parameters['skip'] = _SERIALIZER.query("skip", skip, 'int') - if maxpagesize is not None: - _query_parameters['maxpagesize'] = _SERIALIZER.query("maxpagesize", maxpagesize, 'int') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_sources_request_initial( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/sources" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_list_qnas_request( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - source = kwargs.pop('source', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[int] - maxpagesize = kwargs.pop('maxpagesize', None) # type: Optional[int] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/qnas" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if source is not None: - _query_parameters['source'] = _SERIALIZER.query("source", source, 'str') - if top is not None: - _query_parameters['top'] = _SERIALIZER.query("top", top, 'int') - if skip is not None: - _query_parameters['skip'] = _SERIALIZER.query("skip", skip, 'int') - if maxpagesize is not None: - _query_parameters['maxpagesize'] = _SERIALIZER.query("maxpagesize", maxpagesize, 'int') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_qnas_request_initial( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/qnas" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_add_feedback_request( - project_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2021-10-01") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" - # Construct URL - _url = "/query-knowledgebases/projects/{projectName}/feedback" - path_format_arguments = { - "projectName": _SERIALIZER.url("project_name", project_name, 'str', max_length=100, min_length=0), - } - - _url = _format_url_section(_url, **path_format_arguments) - - # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - - # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class QuestionAnsweringProjectsClientOperationsMixin(object): # pylint: disable=too-many-public-methods - @distributed_trace - def list_projects( - self, **kwargs # type: Any - ): - # type: (...) -> Iterable[JSONType] - """Gets all projects for a user. - - Gets all projects for a user. - - :keyword top: The maximum number of resources to return from the collection. - :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. - :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int - :return: An iterator like instance of JSON object - :rtype: ~azure.core.paging.ItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "createdDateTime": "2020-02-20 00:00:00", # Optional. - Project creation date-time. - "description": "str", # Optional. Description of the - project. - "language": "str", # Optional. Language of the text records. - This is BCP-47 representation of a language. For example, use "en" for - English; "es" for Spanish etc. If not set, use "en" for English as - default. - "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. - Represents the project last deployment date-time. - "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. - Represents the project last modified date-time. - "multilingualResource": bool, # Optional. Resource enabled - for multiple languages across projects or not. - "projectName": "str", # Optional. Name of the project. - "settings": { - "defaultAnswer": "str" # Optional. Default Answer - response when no good match is found in the knowledge base. - } - } - ] - } - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - top = kwargs.pop("top", None) # type: Optional[int] - skip = kwargs.pop("skip", None) # type: Optional[int] - maxpagesize = kwargs.pop("maxpagesize", None) # type: Optional[int] - - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_projects_request( - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - else: - - request = build_list_projects_request( - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(next_link, **path_format_arguments) - - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.get("nextLink", None), iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def get_project_details( - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> JSONType - """Get the requested project metadata. - - Get the requested project metadata. - - :param project_name: The name of the project to use. - :type project_name: str - :return: JSON object - :rtype: JSONType - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response.json() == { - "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation - date-time. - "description": "str", # Optional. Description of the project. - "language": "str", # Optional. Language of the text records. This is BCP-47 - representation of a language. For example, use "en" for English; "es" for Spanish - etc. If not set, use "en" for English as default. - "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the - project last deployment date-time. - "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the - project last modified date-time. - "multilingualResource": bool, # Optional. Resource enabled for multiple - languages across projects or not. - "projectName": "str", # Optional. Name of the project. - "settings": { - "defaultAnswer": "str" # Optional. Default Answer response when no - good match is found in the knowledge base. - } - } - """ - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - - request = build_get_project_details_request( - project_name=project_name, - api_version=api_version, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - @distributed_trace - def create_project( - self, - project_name, # type: str - options, # type: JSONType - **kwargs # type: Any - ): - # type: (...) -> JSONType - """Create or update a project. - - Create or update a project. - - :param project_name: The name of the project to use. - :type project_name: str - :param options: Parameters needed to create the project. - :type options: JSONType - :return: JSON object - :rtype: JSONType - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - options = { - "description": "str", # Optional. Description of the project. - "language": "str", # Required. Language of the text records. This is BCP-47 - representation of a language. For example, use "en" for English; "es" for Spanish - etc. If not set, use "en" for English as default. - "multilingualResource": bool, # Optional. Set to true to enable creating - knowledgebases in different languages for the same resource. - "settings": { - "defaultAnswer": "str" # Optional. Default Answer response when no - good match is found in the knowledge base. - } - } - - # response body for status code(s): 200, 201 - response.json() == { - "createdDateTime": "2020-02-20 00:00:00", # Optional. Project creation - date-time. - "description": "str", # Optional. Description of the project. - "language": "str", # Optional. Language of the text records. This is BCP-47 - representation of a language. For example, use "en" for English; "es" for Spanish - etc. If not set, use "en" for English as default. - "lastDeployedDateTime": "2020-02-20 00:00:00", # Optional. Represents the - project last deployment date-time. - "lastModifiedDateTime": "2020-02-20 00:00:00", # Optional. Represents the - project last modified date-time. - "multilingualResource": bool, # Optional. Resource enabled for multiple - languages across projects or not. - "projectName": "str", # Optional. Name of the project. - "settings": { - "defaultAnswer": "str" # Optional. Default Answer response when no - good match is found in the knowledge base. - } - } - """ - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - - _json = options - - request = build_create_project_request( - project_name=project_name, - api_version=api_version, - content_type=content_type, - json=_json, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.status_code == 200: - if response.content: - deserialized = response.json() - else: - deserialized = None - - if response.status_code == 201: - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - - def _delete_project_initial( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - - request = build_delete_project_request_initial( - project_name=project_name, - api_version=api_version, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) - - if cls: - return cls(pipeline_response, None, response_headers) - - @distributed_trace - def begin_delete_project( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Delete the project. - - Delete the project. - - :param project_name: The name of the project to use. - :type project_name: str - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns None - :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - cls = kwargs.pop("cls", None) # type: ClsType[None] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._delete_project_initial( - project_name=project_name, api_version=api_version, cls=lambda x, y, z: x, **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: - polling_method = NoPolling() - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - def _export_initial( - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional[JSONType] - cls = kwargs.pop("cls", None) # type: ClsType[Optional[JSONType]] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - format = kwargs.pop("format", "json") # type: Optional[str] - asset_kind = kwargs.pop("asset_kind", None) # type: Optional[str] - - request = build_export_request_initial( - project_name=project_name, - api_version=api_version, - format=format, - asset_kind=asset_kind, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - deserialized = None - response_headers = {} - if response.status_code == 200: - if response.content: - deserialized = response.json() - else: - deserialized = None - - if response.status_code == 202: - response_headers["Operation-Location"] = self._deserialize( - "str", response.headers.get("Operation-Location") - ) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - - @distributed_trace - def begin_export( - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[JSONType] - """Export project metadata and assets. - - Export project metadata and assets. - - :param project_name: The name of the project to use. - :type project_name: str - :keyword format: Knowledge base Import or Export format. Possible values are: "json", "tsv", - and "excel". - :paramtype format: str - :keyword asset_kind: Kind of the asset of the project. Possible values are: "qnas" or - "synonyms". - :paramtype asset_kind: str - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns JSON object - :rtype: ~azure.core.polling.LROPoller[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response.json() == { - "createdDateTime": "2020-02-20 00:00:00", # Required. - "errors": [ - { - "code": "str", # Required. One of a server-defined set of - error codes. Possible values include: "InvalidRequest", - "InvalidArgument", "Unauthorized", "Forbidden", "NotFound", - "ProjectNotFound", "OperationNotFound", "AzureCognitiveSearchNotFound", - "AzureCognitiveSearchIndexNotFound", "TooManyRequests", - "AzureCognitiveSearchThrottling", - "AzureCognitiveSearchIndexLimitReached", "InternalServerError", - "ServiceUnavailable". - "details": [ - ... - ], - "innererror": { - "code": "str", # Required. One of a server-defined - set of error codes. Possible values include: "InvalidRequest", - "InvalidParameterValue", "KnowledgeBaseNotFound", - "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", - "ExtractionFailure". - "details": { - "str": "str" # Optional. Error details. - }, - "innererror": ..., - "message": "str", # Required. Error message. - "target": "str" # Optional. Error target. - }, - "message": "str", # Required. A human-readable - representation of the error. - "target": "str" # Optional. The target of the error. - } - ], - "expirationDateTime": "2020-02-20 00:00:00", # Optional. - "jobId": "str", # Required. - "lastUpdatedDateTime": "2020-02-20 00:00:00", # Required. - "resultUrl": "str", # Required. URL to download the result of the Export - Job. - "status": "str" # Required. Job Status. Possible values include: - "notStarted", "running", "succeeded", "failed", "cancelled", "cancelling", - "partiallyCompleted". - } - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - format = kwargs.pop("format", "json") # type: Optional[str] - asset_kind = kwargs.pop("asset_kind", None) # type: Optional[str] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._export_initial( - project_name=project_name, - format=format, - asset_kind=asset_kind, - api_version=api_version, - cls=lambda x, y, z: x, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - if response.content: - deserialized = response.json() - else: - deserialized = None - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: - polling_method = NoPolling() - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - def _import_assets_initial( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - options=None, # type: JSONType - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - format = kwargs.pop("format", "json") # type: Optional[str] - asset_kind = kwargs.pop("asset_kind", None) # type: Optional[str] - - if options is not None: - _json = options - else: - _json = None - - request = build_import_assets_request_initial( - project_name=project_name, - api_version=api_version, - content_type=content_type, - json=_json, - format=format, - asset_kind=asset_kind, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) - - if cls: - return cls(pipeline_response, None, response_headers) - - @distributed_trace - def begin_import_assets( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - options=None, # type: JSONType - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Import project assets. - - Import project assets. - - :param project_name: The name of the project to use. - :type project_name: str - :param options: Project assets the needs to be imported. - :type options: JSONType - :keyword format: Knowledge base Import or Export format. Possible values are: "json", "tsv", - and "excel". - :paramtype format: str - :keyword asset_kind: Kind of the asset of the project. Possible values are: "qnas" or - "synonyms". - :paramtype asset_kind: str - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns None - :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - options = { - "assets": { - "qnas": [ - { - "activeLearningSuggestions": [ - { - "clusterHead": "str", # Optional. - Question chosen as the head of suggested questions cluster by - Active Learning clustering algorithm. - "suggestedQuestions": [ - { - "autoSuggestedCount": - 0, # Optional. The number of times the question was - suggested automatically by the Active Learning - algorithm. - "question": "str", # - Optional. Question suggested by the Active Learning - feature. - "userSuggestedCount": - 0 # Optional. The number of times the question was - suggested explicitly by the user. - } - ] - } - ], - "answer": "str", # Optional. Answer text. - "dialog": { - "isContextOnly": bool, # Optional. To mark - if a prompt is relevant only with a previous question or not. If - true, do not include this QnA as answer for queries without - context; otherwise, ignores context and includes this QnA in - answers. - "prompts": [ - { - "displayOrder": 0, # - Optional. Index of the prompt. It is used for ordering of - the prompts. - "displayText": "str", # - Optional. Text displayed to represent a follow up - question prompt. - "qna": { - "activeLearningSuggestions": [ - { - "clusterHead": "str", # Optional. Question - chosen as the head of suggested questions - cluster by Active Learning clustering - algorithm. - "suggestedQuestions": [ - { - "autoSuggestedCount": 0, # Optional. - The number of times the question was - suggested automatically by the Active - Learning algorithm. - "question": "str", # Optional. - Question suggested by the Active - Learning feature. - "userSuggestedCount": 0 # Optional. - The number of times the question was - suggested explicitly by the user. - } - ] - } - ], - "answer": "str", # - Optional. Answer text. - "dialog": ..., - "id": 0, # Optional. - Unique ID for the QnA. - "metadata": { - "str": "str" - # Optional. Metadata associated with the answer, - useful to categorize or filter question answers. - }, - "questions": [ - "str" # - Optional. List of questions associated with the - answer. - ], - "source": "str" # - Optional. Source from which QnA was indexed e.g. - https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs - . - }, - "qnaId": 0 # Optional. ID of - the QnA corresponding to the prompt. - } - ] - }, - "id": 0, # Optional. Unique ID for the QnA. - "lastUpdatedDateTime": "2020-02-20 00:00:00", # - Optional. Date-time when the QnA was last updated. - "metadata": { - "str": "str" # Optional. Metadata associated - with the answer, useful to categorize or filter question answers. - }, - "questions": [ - "str" # Optional. List of questions - associated with the answer. - ], - "source": "str", # Optional. Source from which QnA - was indexed e.g. - https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs - . - "sourceDisplayName": "str" # Optional. Friendly name - of the Source. - } - ], - "synonyms": [ - { - "alterations": [ - "str" # Required. Collection of word - alterations. - ] - } - ] - }, - "fileUri": "str", # Optional. Import data File URI. - "metadata": { - "description": "str", # Optional. Description of the project. - "language": "str", # Required. Language of the text records. This is - BCP-47 representation of a language. For example, use "en" for English; "es" - for Spanish etc. If not set, use "en" for English as default. - "multilingualResource": bool, # Optional. Set to true to enable - creating knowledgebases in different languages for the same resource. - "settings": { - "defaultAnswer": "str" # Optional. Default Answer response - when no good match is found in the knowledge base. - } - } - } - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - format = kwargs.pop("format", "json") # type: Optional[str] - asset_kind = kwargs.pop("asset_kind", None) # type: Optional[str] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - cls = kwargs.pop("cls", None) # type: ClsType[None] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._import_assets_initial( - project_name=project_name, - options=options, - format=format, - asset_kind=asset_kind, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: - polling_method = NoPolling() - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - def _deploy_project_initial( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - deployment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - - request = build_deploy_project_request_initial( - project_name=project_name, - deployment_name=deployment_name, - api_version=api_version, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) - - if cls: - return cls(pipeline_response, None, response_headers) - - @distributed_trace - def begin_deploy_project( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - deployment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Deploy project to production. - - Deploy project to production. - - :param project_name: The name of the project to use. - :type project_name: str - :param deployment_name: The name of the specific deployment of the project to use. - :type deployment_name: str - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns None - :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - cls = kwargs.pop("cls", None) # type: ClsType[None] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._deploy_project_initial( - project_name=project_name, - deployment_name=deployment_name, - api_version=api_version, - cls=lambda x, y, z: x, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: - polling_method = NoPolling() - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - @distributed_trace - def list_deployments( - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable[JSONType] - """List all deployments of a project. - - List all deployments of a project. - - :param project_name: The name of the project to use. - :type project_name: str - :keyword top: The maximum number of resources to return from the collection. - :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. - :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int - :return: An iterator like instance of JSON object - :rtype: ~azure.core.paging.ItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "deploymentName": "str", # Optional. Name of the deployment. - "lastDeployedDateTime": "2020-02-20 00:00:00" # Optional. - Represents the project last deployment date-time. - } - ] - } - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - top = kwargs.pop("top", None) # type: Optional[int] - skip = kwargs.pop("skip", None) # type: Optional[int] - maxpagesize = kwargs.pop("maxpagesize", None) # type: Optional[int] - - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_deployments_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - else: - - request = build_list_deployments_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(next_link, **path_format_arguments) - - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.get("nextLink", None), iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def list_synonyms( - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable[JSONType] - """Gets all the synonyms of a project. - - Gets all the synonyms of a project. - - :param project_name: The name of the project to use. - :type project_name: str - :keyword top: The maximum number of resources to return from the collection. - :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. - :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int - :return: An iterator like instance of JSON object - :rtype: ~azure.core.paging.ItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "alterations": [ - "str" # Required. Collection of word alterations. - ] - } - ] - } - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - top = kwargs.pop("top", None) # type: Optional[int] - skip = kwargs.pop("skip", None) # type: Optional[int] - maxpagesize = kwargs.pop("maxpagesize", None) # type: Optional[int] - - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_synonyms_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - else: - - request = build_list_synonyms_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(next_link, **path_format_arguments) - - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.get("nextLink", None), iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def update_synonyms( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - synonyms, # type: JSONType - **kwargs # type: Any - ): - # type: (...) -> None - """Updates all the synonyms of a project. - - Updates all the synonyms of a project. - - :param project_name: The name of the project to use. - :type project_name: str - :param synonyms: All the synonyms of a project. - :type synonyms: JSONType - :return: None - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - synonyms = { - "nextLink": "str", # Optional. - "value": [ - { - "alterations": [ - "str" # Required. Collection of word alterations. - ] - } - ] - } - """ - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - - _json = synonyms - - request = build_update_synonyms_request( - project_name=project_name, - api_version=api_version, - content_type=content_type, - json=_json, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if cls: - return cls(pipeline_response, None, {}) - - @distributed_trace - def list_sources( - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable[JSONType] - """Gets all the sources of a project. - - Gets all the sources of a project. - - :param project_name: The name of the project to use. - :type project_name: str - :keyword top: The maximum number of resources to return from the collection. - :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. - :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int - :return: An iterator like instance of JSON object - :rtype: ~azure.core.paging.ItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "contentStructureKind": "unstructured", # Optional. Default - value is "unstructured". Content structure type for sources. Possible - values include: "unstructured". - "displayName": "str", # Optional. Friendly name of the - Source. - "lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. - Date-time when the QnA was last updated. - "source": "str", # Optional. Unique source identifier. Name - of the file if it's a 'file' source; otherwise, the complete URL if it's - a 'url' source. - "sourceKind": "str", # Required. Supported source types. - Possible values include: "file", "url". - "sourceUri": "str" # Required. URI location for the file or - url. - } - ] - } - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - top = kwargs.pop("top", None) # type: Optional[int] - skip = kwargs.pop("skip", None) # type: Optional[int] - maxpagesize = kwargs.pop("maxpagesize", None) # type: Optional[int] - - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_sources_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - else: - - request = build_list_sources_request( - project_name=project_name, - api_version=api_version, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(next_link, **path_format_arguments) - - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.get("nextLink", None), iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - def _update_sources_initial( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - sources, # type: List[JSONType] - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - - _json = sources - - request = build_update_sources_request_initial( - project_name=project_name, - api_version=api_version, - content_type=content_type, - json=_json, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) - - if cls: - return cls(pipeline_response, None, response_headers) - - @distributed_trace - def begin_update_sources( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - sources, # type: List[JSONType] - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Updates the sources of a project. - - Updates the sources of a project. - - :param project_name: The name of the project to use. - :type project_name: str - :param sources: Update sources parameters of a project. - :type sources: list[JSONType] - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns None - :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - sources = [ - { - "op": "str", # Required. Update operation type for assets. Possible - values include: "add", "delete", "replace". - "value": { - "contentStructureKind": "unstructured", # Optional. Default - value is "unstructured". Content structure type for sources. Possible - values include: "unstructured". - "displayName": "str", # Optional. Friendly name of the - Source. - "refresh": bool, # Optional. Boolean flag used to refresh - data from the Source. - "source": "str", # Optional. Unique source identifier. Name - of the file if it's a 'file' source; otherwise, the complete URL if it's - a 'url' source. - "sourceKind": "str", # Required. Supported source types. - Possible values include: "file", "url". - "sourceUri": "str" # Required. URI location for the file or - url. - } - } - ] - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - cls = kwargs.pop("cls", None) # type: ClsType[None] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._update_sources_initial( - project_name=project_name, - sources=sources, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: - polling_method = NoPolling() - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - @distributed_trace - def list_qnas( - self, - project_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable[JSONType] - """Gets all the QnAs of a project. - - Gets all the QnAs of a project. - - :param project_name: The name of the project to use. - :type project_name: str - :keyword source: Source of the QnA. - :paramtype source: str - :keyword top: The maximum number of resources to return from the collection. - :paramtype top: int - :keyword skip: An offset into the collection of the first resource to be returned. - :paramtype skip: int - :keyword maxpagesize: The maximum number of resources to include in a single response. - :paramtype maxpagesize: int - :return: An iterator like instance of JSON object - :rtype: ~azure.core.paging.ItemPaged[JSONType] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response.json() == { - "nextLink": "str", # Optional. - "value": [ - { - "activeLearningSuggestions": [ - { - "clusterHead": "str", # Optional. Question - chosen as the head of suggested questions cluster by Active - Learning clustering algorithm. - "suggestedQuestions": [ - { - "autoSuggestedCount": 0, # - Optional. The number of times the question was suggested - automatically by the Active Learning algorithm. - "question": "str", # - Optional. Question suggested by the Active Learning - feature. - "userSuggestedCount": 0 # - Optional. The number of times the question was suggested - explicitly by the user. - } - ] - } - ], - "answer": "str", # Optional. Answer text. - "dialog": { - "isContextOnly": bool, # Optional. To mark if a - prompt is relevant only with a previous question or not. If true, do - not include this QnA as answer for queries without context; - otherwise, ignores context and includes this QnA in answers. - "prompts": [ - { - "displayOrder": 0, # Optional. Index - of the prompt. It is used for ordering of the prompts. - "displayText": "str", # Optional. - Text displayed to represent a follow up question prompt. - "qna": { - "activeLearningSuggestions": - [ - { - "clusterHead": "str", # Optional. Question - chosen as the head of suggested questions cluster - by Active Learning clustering algorithm. - "suggestedQuestions": [ - { - "autoSuggestedCount": 0, # Optional. The - number of times the question was - suggested automatically by the Active - Learning algorithm. - "question": "str", # Optional. Question - suggested by the Active Learning feature. - "userSuggestedCount": 0 # Optional. The - number of times the question was - suggested explicitly by the user. - } - ] - } - ], - "answer": "str", # Optional. - Answer text. - "dialog": ..., - "id": 0, # Optional. Unique - ID for the QnA. - "metadata": { - "str": "str" # - Optional. Metadata associated with the answer, useful - to categorize or filter question answers. - }, - "questions": [ - "str" # Optional. - List of questions associated with the answer. - ], - "source": "str" # Optional. - Source from which QnA was indexed e.g. - https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs - . - }, - "qnaId": 0 # Optional. ID of the QnA - corresponding to the prompt. - } - ] - }, - "id": 0, # Optional. Unique ID for the QnA. - "lastUpdatedDateTime": "2020-02-20 00:00:00", # Optional. - Date-time when the QnA was last updated. - "metadata": { - "str": "str" # Optional. Metadata associated with - the answer, useful to categorize or filter question answers. - }, - "questions": [ - "str" # Optional. List of questions associated with - the answer. - ], - "source": "str" # Optional. Source from which QnA was - indexed e.g. - https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs . - } - ] - } - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - source = kwargs.pop("source", None) # type: Optional[str] - top = kwargs.pop("top", None) # type: Optional[int] - skip = kwargs.pop("skip", None) # type: Optional[int] - maxpagesize = kwargs.pop("maxpagesize", None) # type: Optional[int] - - cls = kwargs.pop("cls", None) # type: ClsType[JSONType] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - def prepare_request(next_link=None): - if not next_link: - - request = build_list_qnas_request( - project_name=project_name, - api_version=api_version, - source=source, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - else: - - request = build_list_qnas_request( - project_name=project_name, - api_version=api_version, - source=source, - top=top, - skip=skip, - maxpagesize=maxpagesize, - ) - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.url = self._client.format_url(next_link, **path_format_arguments) - - path_format_arguments = { - "Endpoint": self._serialize.url( - "self._config.endpoint", self._config.endpoint, "str", skip_quote=True - ), - } - request.method = "GET" - return request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.get("nextLink", None), iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - def _update_qnas_initial( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - qnas, # type: List[JSONType] - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - - _json = qnas - - request = build_update_qnas_request_initial( - project_name=project_name, - api_version=api_version, - content_type=content_type, - json=_json, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - response_headers = {} - response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) - - if cls: - return cls(pipeline_response, None, response_headers) - - @distributed_trace - def begin_update_qnas( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - qnas, # type: List[JSONType] - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Updates the QnAs of a project. - - Updates the QnAs of a project. - - :param project_name: The name of the project to use. - :type project_name: str - :param qnas: Update QnAs parameters of a project. - :type qnas: list[JSONType] - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be LROBasePolling. Pass in False for - this operation to not poll, or pass in your own initialized polling object for a personal - polling strategy. - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no - Retry-After header is present. - :return: An instance of LROPoller that returns None - :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - qnas = [ - { - "op": "str", # Required. Update operation type for assets. Possible - values include: "add", "delete", "replace". - "value": { - "activeLearningSuggestions": [ - { - "clusterHead": "str", # Optional. Question - chosen as the head of suggested questions cluster by Active - Learning clustering algorithm. - "suggestedQuestions": [ - { - "autoSuggestedCount": 0, # - Optional. The number of times the question was suggested - automatically by the Active Learning algorithm. - "question": "str", # - Optional. Question suggested by the Active Learning - feature. - "userSuggestedCount": 0 # - Optional. The number of times the question was suggested - explicitly by the user. - } - ] - } - ], - "answer": "str", # Optional. Answer text. - "dialog": { - "isContextOnly": bool, # Optional. To mark if a - prompt is relevant only with a previous question or not. If true, do - not include this QnA as answer for queries without context; - otherwise, ignores context and includes this QnA in answers. - "prompts": [ - { - "displayOrder": 0, # Optional. Index - of the prompt. It is used for ordering of the prompts. - "displayText": "str", # Optional. - Text displayed to represent a follow up question prompt. - "qna": ..., - "qnaId": 0 # Optional. ID of the QnA - corresponding to the prompt. - } - ] - }, - "id": 0, # Optional. Unique ID for the QnA. - "metadata": { - "str": "str" # Optional. Metadata associated with - the answer, useful to categorize or filter question answers. - }, - "questions": [ - "str" # Optional. List of questions associated with - the answer. - ], - "source": "str" # Optional. Source from which QnA was - indexed e.g. - https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs . - } - } - ] - """ - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] - cls = kwargs.pop("cls", None) # type: ClsType[None] - lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) - cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] - if cont_token is None: - raw_result = self._update_qnas_initial( - project_name=project_name, - qnas=qnas, - api_version=api_version, - content_type=content_type, - cls=lambda x, y, z: x, - **kwargs - ) - kwargs.pop("error_map", None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - - if polling is True: - polling_method = LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: - polling_method = NoPolling() - else: - polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output, - ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - - @distributed_trace - def add_feedback( # pylint: disable=inconsistent-return-statements - self, - project_name, # type: str - feedback, # type: JSONType - **kwargs # type: Any - ): - # type: (...) -> None - """Update Active Learning feedback. - - Update Active Learning feedback. - - :param project_name: The name of the project to use. - :type project_name: str - :param feedback: Feedback for Active Learning. - :type feedback: JSONType - :return: None - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - feedback = { - "records": [ - { - "qnaId": 0, # Optional. Unique ID of the QnA. - "userId": "str", # Optional. Unique identifier of the user. - "userQuestion": "str" # Optional. User suggested question - for the QnA. - } - ] - } - """ - cls = kwargs.pop("cls", None) # type: ClsType[None] - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop("error_map", {})) - - api_version = kwargs.pop("api_version", "2021-10-01") # type: str - content_type = kwargs.pop("content_type", "application/json") # type: Optional[str] - - _json = feedback - - request = build_add_feedback_request( - project_name=project_name, - api_version=api_version, - content_type=content_type, - json=_json, - ) - path_format_arguments = { - "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - request.url = self._client.format_url(request.url, **path_format_arguments) - - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if cls: - return cls(pipeline_response, None, {}) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_create_and_deploy_project_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_create_and_deploy_project_async.py index 5b6b1a0f8d3a..d4d3b4c6dadc 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_create_and_deploy_project_async.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_create_and_deploy_project_async.py @@ -24,14 +24,14 @@ async def sample_create_and_deploy_project_async(): # [START create_and_deploy_project] import os from azure.core.credentials import AzureKeyCredential - from azure.ai.language.questionanswering.projects.aio import QuestionAnsweringProjectsClient + from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient # get service secrets endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] key = os.environ["AZURE_QUESTIONANSWERING_KEY"] # create client - client = QuestionAnsweringProjectsClient(endpoint, AzureKeyCredential(key)) + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) async with client: # create project diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_export_import_project_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_export_import_project_async.py index 2b093d72012d..e9226ccaee41 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_export_import_project_async.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_export_import_project_async.py @@ -24,14 +24,14 @@ async def sample_export_import_project_async(): # [START export_import_project] import os from azure.core.credentials import AzureKeyCredential - from azure.ai.language.questionanswering.projects.aio import QuestionAnsweringProjectsClient + from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient # get service secrets endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] key = os.environ["AZURE_QUESTIONANSWERING_KEY"] # create client - client = QuestionAnsweringProjectsClient(endpoint, AzureKeyCredential(key)) + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) async with client: # create project diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_update_knowledge_sources_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_update_knowledge_sources_async.py index 847e8f9b66f1..f8a168fe6004 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_update_knowledge_sources_async.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/async_samples/sample_update_knowledge_sources_async.py @@ -24,14 +24,14 @@ async def sample_update_knowledge_sources_async(): # [START update_knowledge_sources] import os from azure.core.credentials import AzureKeyCredential - from azure.ai.language.questionanswering.projects.aio import QuestionAnsweringProjectsClient + from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient # get service secrets endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] key = os.environ["AZURE_QUESTIONANSWERING_KEY"] # create client - client = QuestionAnsweringProjectsClient(endpoint, AzureKeyCredential(key)) + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) async with client: # create project diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_create_and_deploy_project.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_create_and_deploy_project.py index 842f3b851764..fdb774da0790 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_create_and_deploy_project.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_create_and_deploy_project.py @@ -22,14 +22,14 @@ def sample_create_and_deploy_project(): # [START create_and_deploy_project] import os from azure.core.credentials import AzureKeyCredential - from azure.ai.language.questionanswering.projects import QuestionAnsweringProjectsClient + from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient # get service secrets endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] key = os.environ["AZURE_QUESTIONANSWERING_KEY"] # create client - client = QuestionAnsweringProjectsClient(endpoint, AzureKeyCredential(key)) + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) with client: # create project diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_export_import_project.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_export_import_project.py index 733cb3cc9ed1..94e94883c801 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_export_import_project.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_export_import_project.py @@ -25,14 +25,14 @@ def sample_export_import_project(): # [START export_import_project] import os from azure.core.credentials import AzureKeyCredential - from azure.ai.language.questionanswering.projects import QuestionAnsweringProjectsClient + from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient # get service secrets endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] key = os.environ["AZURE_QUESTIONANSWERING_KEY"] # create client - client = QuestionAnsweringProjectsClient(endpoint, AzureKeyCredential(key)) + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) with client: # create project diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_update_knowledge_sources.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_update_knowledge_sources.py index 1f3b911155b1..5cee9ef4f4b6 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_update_knowledge_sources.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/samples/authoring/sample_update_knowledge_sources.py @@ -22,14 +22,14 @@ def sample_update_knowledge_sources(): # [START update_knowledge_sources] import os from azure.core.credentials import AzureKeyCredential - from azure.ai.language.questionanswering.projects import QuestionAnsweringProjectsClient + from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient # get service secrets endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] key = os.environ["AZURE_QUESTIONANSWERING_KEY"] # create client - client = QuestionAnsweringProjectsClient(endpoint, AzureKeyCredential(key)) + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) with client: # create project diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/swagger/README.md b/sdk/cognitivelanguage/azure-ai-language-questionanswering/swagger/README.md index 9b2c498f7937..94c88ad0b2ab 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/swagger/README.md +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/swagger/README.md @@ -20,25 +20,58 @@ autorest ### Settings ```yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/34a2c0723155d134311419fd997925ce96b85bec/specification/cognitiveservices/data-plane/Language/stable/2021-10-01/questionanswering.json -output-folder: ../azure/ai/language/questionanswering namespace: azure.ai.language.questionanswering package-name: azure-ai-language-questionanswering license-header: MICROSOFT_MIT_NO_VERSION clear-output-folder: true no-namespace-folders: true python: true -title: QuestionAnsweringClient version-tolerant: true -models-mode: msrest -package-version: 1.1.0b1 +package-version: 1.1.0b3 add-credential: true credential-default-policy-type: AzureKeyCredentialPolicy credential-key-header-name: Ocp-Apim-Subscription-Key black: true ``` -### Rename "QuestionAnsweringKnowledgeBase_Query" -> "GetAnswers" +## Batch Execution + +```yaml +batch: + - tag: release_runtime_1_1_preview + - tag: release_authoring_1_1_preview +``` + + +## Runtime + +These settings apply only when `--tag=release_runtime_1_1_preview` is specified on the command line. + +```yaml $(tag) == 'release_runtime_1_1_preview' +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/34a2c0723155d134311419fd997925ce96b85bec/specification/cognitiveservices/data-plane/Language/stable/2021-10-01/questionanswering.json +output-folder: ../azure/ai/language/questionanswering +models-mode: msrest +title: QuestionAnsweringClient +``` + +## Authoring + +These settings apply only when `--tag=release_authoring_1_1_preview` is specified on the command line. + +```yaml $(tag) == 'release_authoring_1_1_preview' +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/59ad2b7dd63e952822aa51e11a26a0af5724f996/specification/cognitiveservices/data-plane/Language/stable/2021-10-01/questionanswering-authoring.json +output-folder: ../azure/ai/language/questionanswering/authoring +title: QuestionAnsweringAuthoringClient +``` + + + +## Customizations + +### Runtime + + +#### Rename "QuestionAnsweringKnowledgeBase_Query" -> "GetAnswers" ```yaml directive: @@ -48,7 +81,7 @@ directive: $["operationId"] = "getAnswers"; ``` -### Rename "QuestionAnsweringText_Query" -> "GetAnswersFromText" +#### Rename "QuestionAnsweringText_Query" -> "GetAnswersFromText" ```yaml directive: @@ -58,7 +91,7 @@ directive: $["operationId"] = "getAnswersFromText"; ``` -### Rename `KnowledgeBasedQueryOptions` -> `Options` +#### Rename `KnowledgeBasedQueryOptions` -> `Options` ```yaml directive: @@ -68,7 +101,7 @@ directive: $["x-ms-client-name"] = "Options"; ``` -### Rename `TextQueryOptions` -> `Options` +#### Rename `TextQueryOptions` -> `Options` ```yaml directive: @@ -78,7 +111,7 @@ directive: $["x-ms-client-name"] = "Options"; ``` -### Delete `StringIndexType` +#### Delete `StringIndexType` ```yaml directive: @@ -88,7 +121,7 @@ directive: delete $.properties["stringIndexType"] ``` -### Delete `RankerKind` and `LogicalOperationKind` enums +#### Delete `RankerKind` and `LogicalOperationKind` enums ```yaml directive: @@ -101,7 +134,7 @@ directive: delete $["LogicalOperationKind"]["enum"]; ``` -### Make `MetadataFilter`'s `metadata` property a list of string +#### Make `MetadataFilter`'s `metadata` property a list of string ```yaml directive: @@ -111,4 +144,133 @@ directive: delete $["MetadataFilter"]["properties"]["metadata"]["items"]["$ref"]; $["MetadataFilter"]["properties"]["metadata"]["items"]["type"] = "object"; delete $["MetadataRecord"]; -``` \ No newline at end of file +``` + +### Authoring + + +#### Remove operation group + +```yaml +directive: + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects"]["get"] + transform: > + $["operationId"] = "listProjects"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}"]["get"] + transform: > + $["operationId"] = "getProjectDetails"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}"]["patch"] + transform: > + $["operationId"] = "createProject"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}"]["delete"] + transform: > + $["operationId"] = "deleteProject"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/:export"]["post"] + transform: > + $["operationId"] = "export"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/:import"]["post"] + transform: > + $["operationId"] = "importAssets"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/deployments/{deploymentName}"]["put"] + transform: > + $["operationId"] = "deployProject"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/deployments"]["get"] + transform: > + $["operationId"] = "listDeployments"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/synonyms"]["get"] + transform: > + $["operationId"] = "listSynonyms"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/synonyms"]["put"] + transform: > + $["operationId"] = "updateSynonyms"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/sources"]["get"] + transform: > + $["operationId"] = "listSources"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/sources"]["patch"] + transform: > + $["operationId"] = "updateSources"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/qnas"]["get"] + transform: > + $["operationId"] = "listQnas"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/qnas"]["patch"] + transform: > + $["operationId"] = "updateQnas"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/feedback"]["post"] + transform: > + $["operationId"] = "addFeedback"; +``` + +#### Remove status operations + +```yaml +directive: + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/deletion-jobs/{jobId}"] + transform: > + delete $["get"]; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/export/jobs/{jobId}"] + transform: > + delete $["get"]; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/import/jobs/{jobId}"] + transform: > + delete $["get"]; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/deployments/{deploymentName}/jobs/{jobId}"] + transform: > + delete $["get"]; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/sources/jobs/{jobId}"] + transform: > + delete $["get"]; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/qnas/jobs/{jobId}"] + transform: > + delete $["get"]; +``` + +#### Rename body parameter + +```yaml +directive: + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/feedback"]["post"] + transform: > + $["parameters"][2]["x-ms-client-name"] = "feedback"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/qnas"]["patch"] + transform: > + $["parameters"][2]["x-ms-client-name"] = "qnas"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/sources"]["patch"] + transform: > + $["parameters"][2]["x-ms-client-name"] = "sources"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/synonyms"]["put"] + transform: > + $["parameters"][2]["x-ms-client-name"] = "synonyms"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}/:import"]["post"] + transform: > + $["parameters"][2]["x-ms-client-name"] = "options"; + - from: swagger-document + where: $["paths"]["/query-knowledgebases/projects/{projectName}"]["patch"] + transform: > + $["parameters"][1]["x-ms-client-name"] = "options"; +``` diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/swagger/authoring/readme.md b/sdk/cognitivelanguage/azure-ai-language-questionanswering/swagger/authoring/readme.md deleted file mode 100644 index 992bb35b73c6..000000000000 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/swagger/authoring/readme.md +++ /dev/null @@ -1,166 +0,0 @@ -# Azure QnA for Python - -> see https://aka.ms/autorest - -### Setup - -Install Autorest v3 - -```ps -npm install -g autorest -``` - -### Generation - -```ps -cd /authoring -autorest -``` - -### Settings - -```yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/cognitiveservices/data-plane/Language/stable/2021-10-01/questionanswering-authoring.json -output-folder: ../../azure/ai/language/questionanswering/projects -namespace: azure.ai.language.questionanswering.projects -package-name: azure-ai-language-questionanswering -license-header: MICROSOFT_MIT_NO_VERSION -clear-output-folder: true -no-namespace-folders: true -python: true -python3-only: false -title: QuestionAnsweringProjectsClient -version-tolerant: true -keep-version-file: false -package-version: 1.1.0b1 -add-credential: true -credential-default-policy-type: AzureKeyCredentialPolicy -credential-key-header-name: Ocp-Apim-Subscription-Key -black: true -``` - -### Remove operation group - -```yaml -directive: - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects"]["get"] - transform: > - $["operationId"] = "listProjects"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}"]["get"] - transform: > - $["operationId"] = "getProjectDetails"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}"]["patch"] - transform: > - $["operationId"] = "createProject"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}"]["delete"] - transform: > - $["operationId"] = "deleteProject"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/:export"]["post"] - transform: > - $["operationId"] = "export"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/:import"]["post"] - transform: > - $["operationId"] = "importAssets"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/deployments/{deploymentName}"]["put"] - transform: > - $["operationId"] = "deployProject"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/deployments"]["get"] - transform: > - $["operationId"] = "listDeployments"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/synonyms"]["get"] - transform: > - $["operationId"] = "listSynonyms"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/synonyms"]["put"] - transform: > - $["operationId"] = "updateSynonyms"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/sources"]["get"] - transform: > - $["operationId"] = "listSources"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/sources"]["patch"] - transform: > - $["operationId"] = "updateSources"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/qnas"]["get"] - transform: > - $["operationId"] = "listQnas"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/qnas"]["patch"] - transform: > - $["operationId"] = "updateQnas"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/feedback"]["post"] - transform: > - $["operationId"] = "addFeedback"; -``` - -### Remove status operations - -```yaml -directive: - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/deletion-jobs/{jobId}"] - transform: > - delete $["get"]; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/export/jobs/{jobId}"] - transform: > - delete $["get"]; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/import/jobs/{jobId}"] - transform: > - delete $["get"]; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/deployments/{deploymentName}/jobs/{jobId}"] - transform: > - delete $["get"]; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/sources/jobs/{jobId}"] - transform: > - delete $["get"]; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/qnas/jobs/{jobId}"] - transform: > - delete $["get"]; -``` - -### Rename body parameter - -```yaml -directive: - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/feedback"]["post"] - transform: > - $["parameters"][2]["x-ms-client-name"] = "feedback"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/qnas"]["patch"] - transform: > - $["parameters"][2]["x-ms-client-name"] = "qnas"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/sources"]["patch"] - transform: > - $["parameters"][2]["x-ms-client-name"] = "sources"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/synonyms"]["put"] - transform: > - $["parameters"][2]["x-ms-client-name"] = "synonyms"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}/:import"]["post"] - transform: > - $["parameters"][2]["x-ms-client-name"] = "options"; - - from: swagger-document - where: $["paths"]["/query-knowledgebases/projects/{projectName}"]["patch"] - transform: > - $["parameters"][1]["x-ms-client-name"] = "options"; -``` \ No newline at end of file diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_create_and_deploy_project.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_create_and_deploy_project.py index 3753acaf4508..45be245ff898 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_create_and_deploy_project.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_create_and_deploy_project.py @@ -5,7 +5,7 @@ # ------------------------------------ import pytest -from azure.ai.language.questionanswering.projects import QuestionAnsweringProjectsClient +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient from azure.core.credentials import AzureKeyCredential from helpers import QnaAuthoringHelper @@ -15,8 +15,8 @@ class TestCreateAndDeploy(QuestionAnsweringTestCase): def test_create_project_aad(self, recorded_test, qna_creds): - token = self.get_credential(QuestionAnsweringProjectsClient) - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], token) + token = self.get_credential(QuestionAnsweringAuthoringClient) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], token) # create project project_name = "IssacNewton" @@ -40,7 +40,7 @@ def test_create_project_aad(self, recorded_test, qna_creds): assert found def test_create_project(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" @@ -64,7 +64,7 @@ def test_create_project(self, recorded_test, qna_creds): assert found def test_deploy_project(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create deployable project project_name = "IssacNewton" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_create_and_deploy_project_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_create_and_deploy_project_async.py index eb93506f4c80..6c0ef5f92296 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_create_and_deploy_project_async.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_create_and_deploy_project_async.py @@ -5,7 +5,7 @@ # ------------------------------------ import pytest -from azure.ai.language.questionanswering.projects.aio import QuestionAnsweringProjectsClient +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient from azure.core.credentials import AzureKeyCredential from helpers import QnaAuthoringAsyncHelper @@ -16,8 +16,8 @@ class TestCreateAndDeployAsync(QuestionAnsweringTestCase): @pytest.mark.asyncio async def test_create_project_aad(self, recorded_test, qna_creds): - token = self.get_credential(QuestionAnsweringProjectsClient, is_async=True) - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], token) + token = self.get_credential(QuestionAnsweringAuthoringClient, is_async=True) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], token) # create project project_name = "IssacNewton" @@ -42,7 +42,7 @@ async def test_create_project_aad(self, recorded_test, qna_creds): @pytest.mark.asyncio async def test_create_project(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" @@ -67,7 +67,7 @@ async def test_create_project(self, recorded_test, qna_creds): @pytest.mark.asyncio async def test_deploy_project(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create deployable project project_name = "IssacNewton" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_export_import_project.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_export_import_project.py index 89643eb0188b..cb350f324501 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_export_import_project.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_export_import_project.py @@ -3,7 +3,7 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -from azure.ai.language.questionanswering.projects import QuestionAnsweringProjectsClient +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient from azure.core.credentials import AzureKeyCredential from helpers import QnaAuthoringHelper @@ -13,7 +13,7 @@ class TestExportAndImport(QuestionAnsweringTestCase): def test_export_project(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" @@ -30,7 +30,7 @@ def test_export_project(self, recorded_test, qna_creds): assert result["resultUrl"] is not None def test_import_project(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_export_import_project_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_export_import_project_async.py index 7a751a84316e..508358ef23aa 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_export_import_project_async.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_export_import_project_async.py @@ -5,7 +5,7 @@ # ------------------------------------ import pytest -from azure.ai.language.questionanswering.projects.aio import QuestionAnsweringProjectsClient +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient from azure.core.credentials import AzureKeyCredential from helpers import QnaAuthoringAsyncHelper @@ -16,7 +16,7 @@ class TestExportAndImportAsync(QuestionAnsweringTestCase): @pytest.mark.asyncio async def test_export_project(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" @@ -34,7 +34,7 @@ async def test_export_project(self, recorded_test, qna_creds): @pytest.mark.asyncio async def test_import_project(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_update_knowledge_sources.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_update_knowledge_sources.py index c5a381bb8d66..8f36d0c37013 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_update_knowledge_sources.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_update_knowledge_sources.py @@ -3,7 +3,7 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -from azure.ai.language.questionanswering.projects import QuestionAnsweringProjectsClient +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient from azure.core.credentials import AzureKeyCredential from helpers import QnaAuthoringHelper @@ -13,7 +13,7 @@ class TestSourcesQnasSynonyms(QuestionAnsweringTestCase): def test_add_source(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" @@ -49,7 +49,7 @@ def test_add_source(self, recorded_test, qna_creds): assert source_added def test_add_qna(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" @@ -84,7 +84,7 @@ def test_add_qna(self, recorded_test, qna_creds): assert qna_added def test_add_synonym(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_update_knowledge_sources_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_update_knowledge_sources_async.py index 1e0538463915..7ee61597f4f9 100644 --- a/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_update_knowledge_sources_async.py +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering/tests/test_update_knowledge_sources_async.py @@ -5,7 +5,7 @@ # ------------------------------------ import pytest -from azure.ai.language.questionanswering.projects.aio import QuestionAnsweringProjectsClient +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient from azure.core.credentials import AzureKeyCredential from helpers import QnaAuthoringAsyncHelper @@ -16,7 +16,7 @@ class TestSourcesQnasSynonymsAsync(QuestionAnsweringTestCase): @pytest.mark.asyncio async def test_add_source(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" @@ -53,7 +53,7 @@ async def test_add_source(self, recorded_test, qna_creds): @pytest.mark.asyncio async def test_add_qna(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton" @@ -89,7 +89,7 @@ async def test_add_qna(self, recorded_test, qna_creds): @pytest.mark.asyncio async def test_add_synonym(self, recorded_test, qna_creds): - client = QuestionAnsweringProjectsClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) + client = QuestionAnsweringAuthoringClient(qna_creds["qna_endpoint"], AzureKeyCredential(qna_creds["qna_key"])) # create project project_name = "IssacNewton"