Skip to content

Commit

Permalink
[textanalytics] remove dummy classes from samples (#19441)
Browse files Browse the repository at this point in the history
* just make samples module level functions

* update dedent for snippts
  • Loading branch information
kristapratico authored Jun 24, 2021
1 parent e1c0bc4 commit fbea032
Show file tree
Hide file tree
Showing 30 changed files with 1,573 additions and 1,641 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def cancel(self, **kwargs): # type: ignore
:start-after: [START analyze_healthcare_entities_with_cancellation]
:end-before: [END analyze_healthcare_entities_with_cancellation]
:language: python
:dedent: 8
:dedent: 4
:caption: Cancel an existing health operation.
"""
polling_interval = kwargs.pop("polling_interval", 5)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,14 +95,14 @@ class TextAnalyticsClient(TextAnalyticsClientBase):
:start-after: [START create_ta_client_with_key]
:end-before: [END create_ta_client_with_key]
:language: python
:dedent: 8
:dedent: 4
:caption: Creating the TextAnalyticsClient with endpoint and API key.
.. literalinclude:: ../samples/sample_authentication.py
:start-after: [START create_ta_client_with_aad]
:end-before: [END create_ta_client_with_aad]
:language: python
:dedent: 8
:dedent: 4
:caption: Creating the TextAnalyticsClient with endpoint and token credential from Azure Active Directory.
"""

Expand Down Expand Up @@ -174,7 +174,7 @@ def detect_language( # type: ignore
:start-after: [START detect_language]
:end-before: [END detect_language]
:language: python
:dedent: 8
:dedent: 4
:caption: Detecting language in a batch of documents.
"""
country_hint_arg = kwargs.pop("country_hint", None)
Expand Down Expand Up @@ -256,7 +256,7 @@ def recognize_entities( # type: ignore
:start-after: [START recognize_entities]
:end-before: [END recognize_entities]
:language: python
:dedent: 8
:dedent: 4
:caption: Recognize entities in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -355,7 +355,7 @@ def recognize_pii_entities( # type: ignore
:start-after: [START recognize_pii_entities]
:end-before: [END recognize_pii_entities]
:language: python
:dedent: 8
:dedent: 4
:caption: Recognize personally identifiable information entities in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -457,7 +457,7 @@ def recognize_linked_entities( # type: ignore
:start-after: [START recognize_linked_entities]
:end-before: [END recognize_linked_entities]
:language: python
:dedent: 8
:dedent: 4
:caption: Recognize linked entities in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -559,7 +559,7 @@ def begin_analyze_healthcare_entities( # type: ignore
:start-after: [START analyze_healthcare_entities]
:end-before: [END analyze_healthcare_entities]
:language: python
:dedent: 8
:dedent: 4
:caption: Recognize healthcare entities in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -667,7 +667,7 @@ def extract_key_phrases( # type: ignore
:start-after: [START extract_key_phrases]
:end-before: [END extract_key_phrases]
:language: python
:dedent: 8
:dedent: 4
:caption: Extract the key phrases in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -759,7 +759,7 @@ def analyze_sentiment( # type: ignore
:start-after: [START analyze_sentiment]
:end-before: [END analyze_sentiment]
:language: python
:dedent: 8
:dedent: 4
:caption: Analyze sentiment in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -874,7 +874,7 @@ def begin_analyze_actions( # type: ignore
:start-after: [START analyze]
:end-before: [END analyze]
:language: python
:dedent: 8
:dedent: 4
:caption: Start a long-running operation to perform a variety of text analysis
actions over a batch of documents.
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ async def cancel( # type: ignore
:start-after: [START analyze_healthcare_entities_with_cancellation_async]
:end-before: [END analyze_healthcare_entities_with_cancellation_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Cancel an existing health operation.
"""
polling_interval = kwargs.pop("polling_interval", 5)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,14 +91,14 @@ class TextAnalyticsClient(AsyncTextAnalyticsClientBase):
:start-after: [START create_ta_client_with_key_async]
:end-before: [END create_ta_client_with_key_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Creating the TextAnalyticsClient with endpoint and API key.
.. literalinclude:: ../samples/async_samples/sample_authentication_async.py
:start-after: [START create_ta_client_with_aad_async]
:end-before: [END create_ta_client_with_aad_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Creating the TextAnalyticsClient with endpoint and token credential from Azure Active Directory.
"""

Expand Down Expand Up @@ -172,7 +172,7 @@ async def detect_language( # type: ignore
:start-after: [START detect_language_async]
:end-before: [END detect_language_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Detecting language in a batch of documents.
"""
country_hint_arg = kwargs.pop("country_hint", None)
Expand Down Expand Up @@ -252,7 +252,7 @@ async def recognize_entities( # type: ignore
:start-after: [START recognize_entities_async]
:end-before: [END recognize_entities_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Recognize entities in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -350,7 +350,7 @@ async def recognize_pii_entities( # type: ignore
:start-after: [START recognize_pii_entities]
:end-before: [END recognize_pii_entities]
:language: python
:dedent: 8
:dedent: 4
:caption: Recognize personally identifiable information entities in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -450,7 +450,7 @@ async def recognize_linked_entities( # type: ignore
:start-after: [START recognize_linked_entities_async]
:end-before: [END recognize_linked_entities_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Recognize linked entities in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -537,7 +537,7 @@ async def extract_key_phrases( # type: ignore
:start-after: [START extract_key_phrases_async]
:end-before: [END extract_key_phrases_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Extract the key phrases in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -625,7 +625,7 @@ async def analyze_sentiment( # type: ignore
:start-after: [START analyze_sentiment_async]
:end-before: [END analyze_sentiment_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Analyze sentiment in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -734,7 +734,7 @@ async def begin_analyze_healthcare_entities( # type: ignore
:start-after: [START analyze_healthcare_entities_async]
:end-before: [END analyze_healthcare_entities_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Analyze healthcare entities in a batch of documents.
"""
language_arg = kwargs.pop("language", None)
Expand Down Expand Up @@ -858,7 +858,7 @@ async def begin_analyze_actions( # type: ignore
:start-after: [START analyze_async]
:end-before: [END analyze_async]
:language: python
:dedent: 8
:dedent: 4
:caption: Start a long-running operation to perform a variety of text analysis actions over
a batch of documents.
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,41 +25,38 @@
import asyncio


class AlternativeDocumentInputSampleAsync(object):

async def alternative_document_input(self):
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics.aio import TextAnalyticsClient

endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]

text_analytics_client = TextAnalyticsClient(endpoint=endpoint, credential=AzureKeyCredential(key))

documents = [
{"id": "0", "country_hint": "US", "text": "I had the best day of my life. I decided to go sky-diving and it made me appreciate my whole life so much more. I developed a deep-connection with my instructor as well."},
{"id": "1", "country_hint": "GB",
"text": "This was a waste of my time. The speaker put me to sleep."},
{"id": "2", "country_hint": "MX", "text": "No tengo dinero ni nada que dar..."},
{"id": "3", "country_hint": "FR",
"text": "L'hôtel n'était pas très confortable. L'éclairage était trop sombre."}
]
async with text_analytics_client:
result = await text_analytics_client.detect_language(documents)

for idx, doc in enumerate(result):
if not doc.is_error:
print("Document text: {}".format(documents[idx]))
print("Language detected: {}".format(doc.primary_language.name))
print("ISO6391 name: {}".format(doc.primary_language.iso6391_name))
print("Confidence score: {}\n".format(doc.primary_language.confidence_score))
if doc.is_error:
print(doc.id, doc.error)
async def sample_alternative_document_input():
from azure.core.credentials import AzureKeyCredential
from azure.ai.textanalytics.aio import TextAnalyticsClient

endpoint = os.environ["AZURE_TEXT_ANALYTICS_ENDPOINT"]
key = os.environ["AZURE_TEXT_ANALYTICS_KEY"]

text_analytics_client = TextAnalyticsClient(endpoint=endpoint, credential=AzureKeyCredential(key))

documents = [
{"id": "0", "country_hint": "US", "text": "I had the best day of my life. I decided to go sky-diving and it made me appreciate my whole life so much more. I developed a deep-connection with my instructor as well."},
{"id": "1", "country_hint": "GB",
"text": "This was a waste of my time. The speaker put me to sleep."},
{"id": "2", "country_hint": "MX", "text": "No tengo dinero ni nada que dar..."},
{"id": "3", "country_hint": "FR",
"text": "L'hôtel n'était pas très confortable. L'éclairage était trop sombre."}
]
async with text_analytics_client:
result = await text_analytics_client.detect_language(documents)

for idx, doc in enumerate(result):
if not doc.is_error:
print("Document text: {}".format(documents[idx]))
print("Language detected: {}".format(doc.primary_language.name))
print("ISO6391 name: {}".format(doc.primary_language.iso6391_name))
print("Confidence score: {}\n".format(doc.primary_language.confidence_score))
if doc.is_error:
print(doc.id, doc.error)


async def main():
sample = AlternativeDocumentInputSampleAsync()
await sample.alternative_document_input()
await sample_alternative_document_input()


if __name__ == '__main__':
Expand Down
Loading

0 comments on commit fbea032

Please sign in to comment.