Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion azure-cognitiveservices-language-textanalytics/MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,2 +1,5 @@
include *.rst
include azure_bdist_wheel.py
include azure/__init__.py
include azure/cognitiveservices/__init__.py
include azure/cognitiveservices/language/__init__.py

12 changes: 10 additions & 2 deletions azure-cognitiveservices-language-textanalytics/README.rst
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
Microsoft Azure SDK for Python
==============================

This is the Microsoft Azure Cognitive Services Text Analytics Client Library.
This is the Microsoft Azure MyService Management Client Library.

Azure Resource Manager (ARM) is the next generation of management APIs that
replace the old Azure Service Management (ASM).

This package has been tested with Python 2.7, 3.3, 3.4, 3.5 and 3.6.
This package has been tested with Python 2.7, 3.4, 3.5, 3.6 and 3.7.

For the older Azure Service Management (ASM) libraries, see
`azure-servicemanagement-legacy <https://pypi.python.org/pypi/azure-servicemanagement-legacy>`__ library.
Expand All @@ -33,6 +33,14 @@ If you see azure==0.11.0 (or any version below 1.0), uninstall it first:
pip uninstall azure


Usage
=====

For code examples, see `MyService Management
<https://docs.microsoft.com/python/api/overview/azure/>`__
on docs.microsoft.com.


Provide Feedback
================

Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__import__('pkg_resources').declare_namespace(__name__)
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__import__('pkg_resources').declare_namespace(__name__)
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__import__('pkg_resources').declare_namespace(__name__)
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ class EntityRecord(Model):

:param name: Entity formal name.
:type name: str
:ivar matches: List of instances this entity appears in the text.
:vartype matches:
:param matches: List of instances this entity appears in the text.
:type matches:
list[~azure.cognitiveservices.language.textanalytics.models.MatchRecord]
:param wikipedia_language: Wikipedia language for which the WikipediaId
and WikipediaUrl refers to.
Expand All @@ -34,10 +34,13 @@ class EntityRecord(Model):
conjunction with the Bing Entity Search API to fetch additional relevant
information.
:type bing_id: str
:param type: Entity type from Named Entity Recognition model
:type type: str
:param sub_type: Entity sub type from Named Entity Recognition model
:type sub_type: str
"""

_validation = {
'matches': {'readonly': True},
'wikipedia_url': {'readonly': True},
}

Expand All @@ -48,13 +51,17 @@ class EntityRecord(Model):
'wikipedia_id': {'key': 'wikipediaId', 'type': 'str'},
'wikipedia_url': {'key': 'wikipediaUrl', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'sub_type': {'key': 'subType', 'type': 'str'},
}

def __init__(self, **kwargs):
super(EntityRecord, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.matches = None
self.matches = kwargs.get('matches', None)
self.wikipedia_language = kwargs.get('wikipedia_language', None)
self.wikipedia_id = kwargs.get('wikipedia_id', None)
self.wikipedia_url = None
self.bing_id = kwargs.get('bing_id', None)
self.type = kwargs.get('type', None)
self.sub_type = kwargs.get('sub_type', None)
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ class EntityRecord(Model):

:param name: Entity formal name.
:type name: str
:ivar matches: List of instances this entity appears in the text.
:vartype matches:
:param matches: List of instances this entity appears in the text.
:type matches:
list[~azure.cognitiveservices.language.textanalytics.models.MatchRecord]
:param wikipedia_language: Wikipedia language for which the WikipediaId
and WikipediaUrl refers to.
Expand All @@ -34,10 +34,13 @@ class EntityRecord(Model):
conjunction with the Bing Entity Search API to fetch additional relevant
information.
:type bing_id: str
:param type: Entity type from Named Entity Recognition model
:type type: str
:param sub_type: Entity sub type from Named Entity Recognition model
:type sub_type: str
"""

_validation = {
'matches': {'readonly': True},
'wikipedia_url': {'readonly': True},
}

Expand All @@ -48,13 +51,17 @@ class EntityRecord(Model):
'wikipedia_id': {'key': 'wikipediaId', 'type': 'str'},
'wikipedia_url': {'key': 'wikipediaUrl', 'type': 'str'},
'bing_id': {'key': 'bingId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'sub_type': {'key': 'subType', 'type': 'str'},
}

def __init__(self, *, name: str=None, wikipedia_language: str=None, wikipedia_id: str=None, bing_id: str=None, **kwargs) -> None:
def __init__(self, *, name: str=None, matches=None, wikipedia_language: str=None, wikipedia_id: str=None, bing_id: str=None, type: str=None, sub_type: str=None, **kwargs) -> None:
super(EntityRecord, self).__init__(**kwargs)
self.name = name
self.matches = None
self.matches = matches
self.wikipedia_language = wikipedia_language
self.wikipedia_id = wikipedia_id
self.wikipedia_url = None
self.bing_id = bing_id
self.type = type
self.sub_type = sub_type
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def __init__(
raise ValueError("Parameter 'endpoint' must not be None.")
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
base_url = '{Endpoint}/text/analytics/v2.0'
base_url = '{Endpoint}/text/analytics/v2.1-preview'

super(TextAnalyticsClientConfiguration, self).__init__(base_url)

Expand Down Expand Up @@ -67,7 +67,7 @@ def __init__(
super(TextAnalyticsClient, self).__init__(self.config.credentials, self.config)

client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = 'v2.0'
self.api_version = 'v2.1-preview'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)

Expand Down Expand Up @@ -111,6 +111,7 @@ def key_phrases(

# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
Expand All @@ -119,9 +120,8 @@ def key_phrases(
body_content = self._serialize.body(input, 'MultiLanguageBatchInput')

# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)

if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
Expand Down Expand Up @@ -175,6 +175,7 @@ def detect_language(

# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
Expand All @@ -183,9 +184,8 @@ def detect_language(
body_content = self._serialize.body(input, 'BatchInput')

# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)

if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
Expand Down Expand Up @@ -242,6 +242,7 @@ def sentiment(

# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
Expand All @@ -250,9 +251,8 @@ def sentiment(
body_content = self._serialize.body(input, 'MultiLanguageBatchInput')

# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)

if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
Expand All @@ -273,9 +273,22 @@ def entities(
self, documents=None, custom_headers=None, raw=False, **operation_config):
"""The API returns a list of recognized entities in a given document.

To get even more information on each recognized entity we recommend
using the Bing Entity Search API by querying for the recognized
entities names. See the <a
The API returns a list of recognized entities in a given document. To
get even more information on each recognized entity we recommend using
the Bing Entity Search API by querying for the recognized entities
names. See the <a
href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/text-analytics-supported-languages">Supported
languages in Text Analytics API</a> for the list of enabled
languages.The API returns a list of known entities and general named
entities ("Person", "Location", "Organization" etc) in a given
document. Known entities are returned with Wikipedia Id and Wikipedia
link, and also Bing Id which can be used in Bing Entity Search API.
General named entities are returned with entity types. If a general
named entity is also a known entity, then all information regarding it
(Wikipedia Id, Bing Id, entity type etc) will be returned. See the <a
href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/how-tos/text-analytics-how-to-entity-linking#supported-types-for-named-entity-recognition">Supported
Entity Types in Text Analytics API</a> for the list of supported Entity
Types. See the <a
href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/text-analytics-supported-languages">Supported
languages in Text Analytics API</a> for the list of enabled languages.

Expand Down Expand Up @@ -308,6 +321,7 @@ def entities(

# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
Expand All @@ -316,9 +330,8 @@ def entities(
body_content = self._serialize.body(input, 'MultiLanguageBatchInput')

# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)

if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@
# regenerated.
# --------------------------------------------------------------------------

VERSION = "v2.0"
VERSION = "v2.1-preview"

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[packaging]
package_name = "azure-cognitiveservices-language-textanalytics"
package_nspkg = "azure-cognitiveservices-language-nspkg"
package_pprint_name = "MyService Management"
package_doc_id = ""
is_stable = false
is_arm = true
1 change: 0 additions & 1 deletion azure-cognitiveservices-language-textanalytics/setup.cfg
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
[bdist_wheel]
universal=1
azure-namespace-package=azure-cognitiveservices-language-nspkg
24 changes: 14 additions & 10 deletions azure-cognitiveservices-language-textanalytics/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,10 @@
import os.path
from io import open
from setuptools import find_packages, setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}

# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-cognitiveservices-language-textanalytics"
PACKAGE_PPRINT_NAME = "Cognitive Services Text Analytics"
PACKAGE_PPRINT_NAME = "MyService Management"

# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
Expand Down Expand Up @@ -72,13 +66,23 @@
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=["tests"]),
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.cognitiveservices',
'azure.cognitiveservices.language',
]),
install_requires=[
'msrest>=0.4.24,<2.0.0',
'msrest>=0.5.0',
'msrestazure>=0.4.32,<2.0.0',
'azure-common~=1.1',
],
cmdclass=cmdclass
extras_require={
":python_version<'3.0'": ['azure-cognitiveservices-language-nspkg'],
}
)