diff --git a/azure-cognitiveservices-language-textanalytics/MANIFEST.in b/azure-cognitiveservices-language-textanalytics/MANIFEST.in index 9ecaeb15de50..7f728a95d094 100644 --- a/azure-cognitiveservices-language-textanalytics/MANIFEST.in +++ b/azure-cognitiveservices-language-textanalytics/MANIFEST.in @@ -1,2 +1,5 @@ include *.rst -include azure_bdist_wheel.py \ No newline at end of file +include azure/__init__.py +include azure/cognitiveservices/__init__.py +include azure/cognitiveservices/language/__init__.py + diff --git a/azure-cognitiveservices-language-textanalytics/README.rst b/azure-cognitiveservices-language-textanalytics/README.rst index c26973cad3a0..45de4e6b43dd 100644 --- a/azure-cognitiveservices-language-textanalytics/README.rst +++ b/azure-cognitiveservices-language-textanalytics/README.rst @@ -1,12 +1,12 @@ Microsoft Azure SDK for Python ============================== -This is the Microsoft Azure Cognitive Services Text Analytics Client Library. +This is the Microsoft Azure MyService Management Client Library. Azure Resource Manager (ARM) is the next generation of management APIs that replace the old Azure Service Management (ASM). -This package has been tested with Python 2.7, 3.3, 3.4, 3.5 and 3.6. +This package has been tested with Python 2.7, 3.4, 3.5, 3.6 and 3.7. For the older Azure Service Management (ASM) libraries, see `azure-servicemanagement-legacy `__ library. @@ -33,6 +33,14 @@ If you see azure==0.11.0 (or any version below 1.0), uninstall it first: pip uninstall azure +Usage +===== + +For code examples, see `MyService Management +`__ +on docs.microsoft.com. + + Provide Feedback ================ diff --git a/azure-cognitiveservices-language-textanalytics/azure/__init__.py b/azure-cognitiveservices-language-textanalytics/azure/__init__.py index 849489fca33c..0260537a02bb 100644 --- a/azure-cognitiveservices-language-textanalytics/azure/__init__.py +++ b/azure-cognitiveservices-language-textanalytics/azure/__init__.py @@ -1 +1 @@ -__import__('pkg_resources').declare_namespace(__name__) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file diff --git a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/__init__.py b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/__init__.py index 849489fca33c..0260537a02bb 100644 --- a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/__init__.py +++ b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/__init__.py @@ -1 +1 @@ -__import__('pkg_resources').declare_namespace(__name__) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file diff --git a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/__init__.py b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/__init__.py index 849489fca33c..0260537a02bb 100644 --- a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/__init__.py +++ b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/__init__.py @@ -1 +1 @@ -__import__('pkg_resources').declare_namespace(__name__) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file diff --git a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/entity_record.py b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/entity_record.py index e4751b5dde80..f32a151ef565 100644 --- a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/entity_record.py +++ b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/entity_record.py @@ -20,8 +20,8 @@ class EntityRecord(Model): :param name: Entity formal name. :type name: str - :ivar matches: List of instances this entity appears in the text. - :vartype matches: + :param matches: List of instances this entity appears in the text. + :type matches: list[~azure.cognitiveservices.language.textanalytics.models.MatchRecord] :param wikipedia_language: Wikipedia language for which the WikipediaId and WikipediaUrl refers to. @@ -34,10 +34,13 @@ class EntityRecord(Model): conjunction with the Bing Entity Search API to fetch additional relevant information. :type bing_id: str + :param type: Entity type from Named Entity Recognition model + :type type: str + :param sub_type: Entity sub type from Named Entity Recognition model + :type sub_type: str """ _validation = { - 'matches': {'readonly': True}, 'wikipedia_url': {'readonly': True}, } @@ -48,13 +51,17 @@ class EntityRecord(Model): 'wikipedia_id': {'key': 'wikipediaId', 'type': 'str'}, 'wikipedia_url': {'key': 'wikipediaUrl', 'type': 'str'}, 'bing_id': {'key': 'bingId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sub_type': {'key': 'subType', 'type': 'str'}, } def __init__(self, **kwargs): super(EntityRecord, self).__init__(**kwargs) self.name = kwargs.get('name', None) - self.matches = None + self.matches = kwargs.get('matches', None) self.wikipedia_language = kwargs.get('wikipedia_language', None) self.wikipedia_id = kwargs.get('wikipedia_id', None) self.wikipedia_url = None self.bing_id = kwargs.get('bing_id', None) + self.type = kwargs.get('type', None) + self.sub_type = kwargs.get('sub_type', None) diff --git a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/entity_record_py3.py b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/entity_record_py3.py index d0f25f93e96e..b0d834424758 100644 --- a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/entity_record_py3.py +++ b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/entity_record_py3.py @@ -20,8 +20,8 @@ class EntityRecord(Model): :param name: Entity formal name. :type name: str - :ivar matches: List of instances this entity appears in the text. - :vartype matches: + :param matches: List of instances this entity appears in the text. + :type matches: list[~azure.cognitiveservices.language.textanalytics.models.MatchRecord] :param wikipedia_language: Wikipedia language for which the WikipediaId and WikipediaUrl refers to. @@ -34,10 +34,13 @@ class EntityRecord(Model): conjunction with the Bing Entity Search API to fetch additional relevant information. :type bing_id: str + :param type: Entity type from Named Entity Recognition model + :type type: str + :param sub_type: Entity sub type from Named Entity Recognition model + :type sub_type: str """ _validation = { - 'matches': {'readonly': True}, 'wikipedia_url': {'readonly': True}, } @@ -48,13 +51,17 @@ class EntityRecord(Model): 'wikipedia_id': {'key': 'wikipediaId', 'type': 'str'}, 'wikipedia_url': {'key': 'wikipediaUrl', 'type': 'str'}, 'bing_id': {'key': 'bingId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sub_type': {'key': 'subType', 'type': 'str'}, } - def __init__(self, *, name: str=None, wikipedia_language: str=None, wikipedia_id: str=None, bing_id: str=None, **kwargs) -> None: + def __init__(self, *, name: str=None, matches=None, wikipedia_language: str=None, wikipedia_id: str=None, bing_id: str=None, type: str=None, sub_type: str=None, **kwargs) -> None: super(EntityRecord, self).__init__(**kwargs) self.name = name - self.matches = None + self.matches = matches self.wikipedia_language = wikipedia_language self.wikipedia_id = wikipedia_id self.wikipedia_url = None self.bing_id = bing_id + self.type = type + self.sub_type = sub_type diff --git a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/text_analytics_client.py b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/text_analytics_client.py index ee6526a34d06..57dc65fbec02 100644 --- a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/text_analytics_client.py +++ b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/text_analytics_client.py @@ -36,7 +36,7 @@ def __init__( raise ValueError("Parameter 'endpoint' must not be None.") if credentials is None: raise ValueError("Parameter 'credentials' must not be None.") - base_url = '{Endpoint}/text/analytics/v2.0' + base_url = '{Endpoint}/text/analytics/v2.1-preview' super(TextAnalyticsClientConfiguration, self).__init__(base_url) @@ -67,7 +67,7 @@ def __init__( super(TextAnalyticsClient, self).__init__(self.config.credentials, self.config) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self.api_version = 'v2.0' + self.api_version = 'v2.1-preview' self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) @@ -111,6 +111,7 @@ def key_phrases( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) @@ -119,9 +120,8 @@ def key_phrases( body_content = self._serialize.body(input, 'MultiLanguageBatchInput') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) @@ -175,6 +175,7 @@ def detect_language( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) @@ -183,9 +184,8 @@ def detect_language( body_content = self._serialize.body(input, 'BatchInput') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) @@ -242,6 +242,7 @@ def sentiment( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) @@ -250,9 +251,8 @@ def sentiment( body_content = self._serialize.body(input, 'MultiLanguageBatchInput') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) @@ -273,9 +273,22 @@ def entities( self, documents=None, custom_headers=None, raw=False, **operation_config): """The API returns a list of recognized entities in a given document. - To get even more information on each recognized entity we recommend - using the Bing Entity Search API by querying for the recognized - entities names. See the Supported + languages in Text Analytics API for the list of enabled + languages.The API returns a list of known entities and general named + entities ("Person", "Location", "Organization" etc) in a given + document. Known entities are returned with Wikipedia Id and Wikipedia + link, and also Bing Id which can be used in Bing Entity Search API. + General named entities are returned with entity types. If a general + named entity is also a known entity, then all information regarding it + (Wikipedia Id, Bing Id, entity type etc) will be returned. See the Supported + Entity Types in Text Analytics API for the list of supported Entity + Types. See the Supported languages in Text Analytics API for the list of enabled languages. @@ -308,6 +321,7 @@ def entities( # Construct headers header_parameters = {} + header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) @@ -316,9 +330,8 @@ def entities( body_content = self._serialize.body(input, 'MultiLanguageBatchInput') # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, stream=False, **operation_config) + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) diff --git a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/version.py b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/version.py index 2480717379b3..8140355c0985 100644 --- a/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/version.py +++ b/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "v2.0" +VERSION = "v2.1-preview" diff --git a/azure-cognitiveservices-language-textanalytics/azure_bdist_wheel.py b/azure-cognitiveservices-language-textanalytics/azure_bdist_wheel.py deleted file mode 100644 index 8a81d1b61775..000000000000 --- a/azure-cognitiveservices-language-textanalytics/azure_bdist_wheel.py +++ /dev/null @@ -1,54 +0,0 @@ -#------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -#-------------------------------------------------------------------------- - -from distutils import log as logger -import os.path - -from wheel.bdist_wheel import bdist_wheel -class azure_bdist_wheel(bdist_wheel): - """The purpose of this class is to build wheel a little differently than the sdist, - without requiring to build the wheel from the sdist (i.e. you can build the wheel - directly from source). - """ - - description = "Create an Azure wheel distribution" - - user_options = bdist_wheel.user_options + \ - [('azure-namespace-package=', None, - "Name of the deepest nspkg used")] - - def initialize_options(self): - bdist_wheel.initialize_options(self) - self.azure_namespace_package = None - - def finalize_options(self): - bdist_wheel.finalize_options(self) - if self.azure_namespace_package and not self.azure_namespace_package.endswith("-nspkg"): - raise ValueError("azure_namespace_package must finish by -nspkg") - - def run(self): - if not self.distribution.install_requires: - self.distribution.install_requires = [] - self.distribution.install_requires.append( - "{}>=2.0.0".format(self.azure_namespace_package)) - bdist_wheel.run(self) - - def write_record(self, bdist_dir, distinfo_dir): - if self.azure_namespace_package: - # Split and remove last part, assuming it's "nspkg" - subparts = self.azure_namespace_package.split('-')[0:-1] - folder_with_init = [os.path.join(*subparts[0:i+1]) for i in range(len(subparts))] - for azure_sub_package in folder_with_init: - init_file = os.path.join(bdist_dir, azure_sub_package, '__init__.py') - if os.path.isfile(init_file): - logger.info("manually remove {} while building the wheel".format(init_file)) - os.remove(init_file) - else: - raise ValueError("Unable to find {}. Are you sure of your namespace package?".format(init_file)) - bdist_wheel.write_record(self, bdist_dir, distinfo_dir) -cmdclass = { - 'bdist_wheel': azure_bdist_wheel, -} diff --git a/azure-cognitiveservices-language-textanalytics/sdk_packaging.toml b/azure-cognitiveservices-language-textanalytics/sdk_packaging.toml new file mode 100644 index 000000000000..3420ae36a5b6 --- /dev/null +++ b/azure-cognitiveservices-language-textanalytics/sdk_packaging.toml @@ -0,0 +1,7 @@ +[packaging] +package_name = "azure-cognitiveservices-language-textanalytics" +package_nspkg = "azure-cognitiveservices-language-nspkg" +package_pprint_name = "MyService Management" +package_doc_id = "" +is_stable = false +is_arm = true diff --git a/azure-cognitiveservices-language-textanalytics/setup.cfg b/azure-cognitiveservices-language-textanalytics/setup.cfg index 2d986195ea2f..3c6e79cf31da 100644 --- a/azure-cognitiveservices-language-textanalytics/setup.cfg +++ b/azure-cognitiveservices-language-textanalytics/setup.cfg @@ -1,3 +1,2 @@ [bdist_wheel] universal=1 -azure-namespace-package=azure-cognitiveservices-language-nspkg \ No newline at end of file diff --git a/azure-cognitiveservices-language-textanalytics/setup.py b/azure-cognitiveservices-language-textanalytics/setup.py index d7e918673dfa..84eb13ea0bde 100644 --- a/azure-cognitiveservices-language-textanalytics/setup.py +++ b/azure-cognitiveservices-language-textanalytics/setup.py @@ -10,16 +10,10 @@ import os.path from io import open from setuptools import find_packages, setup -try: - from azure_bdist_wheel import cmdclass -except ImportError: - from distutils import log as logger - logger.warn("Wheel is not available, disabling bdist_wheel hook") - cmdclass = {} # Change the PACKAGE_NAME only to change folder and different name PACKAGE_NAME = "azure-cognitiveservices-language-textanalytics" -PACKAGE_PPRINT_NAME = "Cognitive Services Text Analytics" +PACKAGE_PPRINT_NAME = "MyService Management" # a-b-c => a/b/c package_folder_path = PACKAGE_NAME.replace('-', '/') @@ -72,13 +66,23 @@ 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'License :: OSI Approved :: MIT License', ], zip_safe=False, - packages=find_packages(exclude=["tests"]), + packages=find_packages(exclude=[ + 'tests', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + 'azure.cognitiveservices', + 'azure.cognitiveservices.language', + ]), install_requires=[ - 'msrest>=0.4.24,<2.0.0', + 'msrest>=0.5.0', + 'msrestazure>=0.4.32,<2.0.0', 'azure-common~=1.1', ], - cmdclass=cmdclass + extras_require={ + ":python_version<'3.0'": ['azure-cognitiveservices-language-nspkg'], + } )