Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.textanalytics;

import com.azure.ai.textanalytics.implementation.TextAnalyticsClientImpl;
import com.azure.ai.textanalytics.implementation.models.DocumentError;
import com.azure.ai.textanalytics.implementation.models.DocumentSentiment;
import com.azure.ai.textanalytics.implementation.models.MultiLanguageBatchInput;
import com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel;
import com.azure.ai.textanalytics.implementation.models.SentimentResponse;
import com.azure.ai.textanalytics.models.AnalyzeSentimentResult;
import com.azure.ai.textanalytics.models.DocumentResultCollection;
import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions;
import com.azure.ai.textanalytics.models.TextDocumentInput;
import com.azure.ai.textanalytics.models.TextSentiment;
import com.azure.ai.textanalytics.models.TextSentimentClass;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.SimpleResponse;
import com.azure.core.util.Context;
import com.azure.core.util.logging.ClientLogger;
import reactor.core.publisher.Mono;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;

import static com.azure.ai.textanalytics.Transforms.toTextAnalyticsError;
import static com.azure.ai.textanalytics.Transforms.toMultiLanguageInput;
import static com.azure.ai.textanalytics.Transforms.toTextDocumentStatistics;
import static com.azure.ai.textanalytics.Transforms.toBatchStatistics;
import static com.azure.ai.textanalytics.Transforms.mapByIndex;

class AnalyzeSentimentClient {
private final ClientLogger logger;
private final TextAnalyticsClientImpl service;

AnalyzeSentimentClient(TextAnalyticsClientImpl service, ClientLogger logger) {
this.logger = logger;
this.service = service;
}

Mono<Response<AnalyzeSentimentResult>> analyzeSentimentWithResponse(String text, String language, Context context) {
Objects.requireNonNull(text, "'text' cannot be null.");

return analyzeBatchSentimentWithResponse(
Collections.singletonList(new TextDocumentInput("0", text, language)), null, context)
.map(response -> new SimpleResponse<>(response, response.getValue().iterator().next()));
}

Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentWithResponse(
List<String> textInputs, String language, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");

List<TextDocumentInput> documentInputs = mapByIndex(textInputs, (index, value) ->
new TextDocumentInput(index, value, language));
return analyzeBatchSentimentWithResponse(documentInputs, null, context);
}

Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeBatchSentimentWithResponse(
List<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");

final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput()
.setDocuments(toMultiLanguageInput(textInputs));
return service.sentimentWithRestResponseAsync(
batchInput,
options == null ? null : options.getModelVersion(),
options == null ? null : options.showStatistics(), context)
.doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString()))
.doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response))
.doOnError(error -> logger.warning("Failed to text sentiment - {}", error))
.map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue())));
}

/**
* Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}.
*
* @param sentimentResponse the {@link SentimentResponse} returned by the service.
*
* @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK.
*/
private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection(
final SentimentResponse sentimentResponse) {
List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>();
for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) {
analyzeSentimentResults.add(convertToTextSentimentResult(documentSentiment));
}
for (DocumentError documentError : sentimentResponse.getErrors()) {
final com.azure.ai.textanalytics.models.TextAnalyticsError error =
toTextAnalyticsError(documentError.getError());
analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null,
error, null, null));
}
return new DocumentResultCollection<>(analyzeSentimentResults,
sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null
: toBatchStatistics(sentimentResponse.getStatistics()));
}

private AnalyzeSentimentResult convertToTextSentimentResult(final DocumentSentiment documentSentiment) {
// Document text sentiment
final TextSentimentClass documentSentimentClass = TextSentimentClass.fromString(documentSentiment.
getSentiment().toString());
if (documentSentimentClass == null) {
// Not throw exception for an invalid Sentiment type because we should not skip processing the
// other response. It is a service issue.
logger.logExceptionAsWarning(
new RuntimeException(String.format("'%s' is not valid text sentiment.",
documentSentiment.getSentiment())));
}
final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores();

// Sentence text sentiment
final List<TextSentiment> sentenceSentimentTexts = documentSentiment.getSentences().stream()
.map(sentenceSentiment -> {
TextSentimentClass sentimentClass = TextSentimentClass.fromString(sentenceSentiment
.getSentiment().toString());
if (sentimentClass == null) {
// Not throw exception for an invalid Sentiment type because we should not skip processing the
// other response. It is a service issue.
logger.logExceptionAsWarning(
new RuntimeException(String.format("'%s' is not valid text sentiment.",
sentenceSentiment.getSentiment())));
}
SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores();

return new TextSentiment(sentimentClass, confidenceScorePerSentence.getNegative(),
confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive(),
sentenceSentiment.getLength(), sentenceSentiment.getOffset());

}).collect(Collectors.toList());

return new AnalyzeSentimentResult(documentSentiment.getId(),
documentSentiment.getStatistics() == null ? null
: toTextDocumentStatistics(documentSentiment.getStatistics()), null,
new TextSentiment(documentSentimentClass, confidenceScorePerLabel.getNegative(),
confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive(),
sentenceSentimentTexts.stream().mapToInt(TextSentiment::getLength).sum(), 0),
sentenceSentimentTexts);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.textanalytics;

import com.azure.ai.textanalytics.implementation.TextAnalyticsClientImpl;
import com.azure.ai.textanalytics.implementation.models.DocumentError;
import com.azure.ai.textanalytics.implementation.models.DocumentLanguage;
import com.azure.ai.textanalytics.implementation.models.LanguageBatchInput;
import com.azure.ai.textanalytics.implementation.models.LanguageInput;
import com.azure.ai.textanalytics.implementation.models.LanguageResult;
import com.azure.ai.textanalytics.models.DetectLanguageInput;
import com.azure.ai.textanalytics.models.DetectLanguageResult;
import com.azure.ai.textanalytics.models.DetectedLanguage;
import com.azure.ai.textanalytics.models.DocumentResultCollection;
import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.SimpleResponse;
import com.azure.core.util.Context;
import com.azure.core.util.logging.ClientLogger;
import reactor.core.publisher.Mono;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;

import static com.azure.ai.textanalytics.Transforms.mapByIndex;

class DetectLanguageClient {
private final ClientLogger logger;
private final TextAnalyticsClientImpl service;

DetectLanguageClient(TextAnalyticsClientImpl service, ClientLogger logger) {
this.logger = logger;
this.service = service;
}

Mono<Response<DetectLanguageResult>> detectLanguageWithResponse(String text, String countryHint, Context context) {
Objects.requireNonNull(text, "'text' cannot be null.");
List<DetectLanguageInput> languageInputs = Collections.singletonList(new DetectLanguageInput("0",
text, countryHint));
return detectBatchLanguagesWithResponse(languageInputs, null, context).map(response ->
new SimpleResponse<>(response, response.getValue().iterator().next()));
}

Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguagesWithResponse(List<String> textInputs,
String countryHint, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");
List<DetectLanguageInput> detectLanguageInputs = mapByIndex(textInputs, (index, value) ->
new DetectLanguageInput(index, value, countryHint));

return detectBatchLanguagesWithResponse(detectLanguageInputs, null, context);
}

Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectBatchLanguagesWithResponse(
List<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");

final LanguageBatchInput languageBatchInput = new LanguageBatchInput()
.setDocuments(textInputs.stream().map(detectLanguageInput -> new LanguageInput()
.setId(detectLanguageInput.getId()).setText(detectLanguageInput.getText())
.setCountryHint(detectLanguageInput.getCountryHint())).collect(Collectors.toList()));

return service.languagesWithRestResponseAsync(
languageBatchInput, options == null ? null : options.getModelVersion(),
options == null ? null : options.showStatistics(), context)
.doOnSubscribe(ignoredValue -> logger.info("A batch of language input - {}", textInputs.toString()))
.doOnSuccess(response -> logger.info("A batch of detected language output - {}", response.getValue()))
.doOnError(error -> logger.warning("Failed to detected languages - {}", error))
.map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue())));
}

/**
* Helper method to convert the service response of {@link LanguageResult} to {@link DocumentResultCollection}.
*
* @param languageResult the {@link LanguageResult} returned by the service.
*
* @return the {@link DocumentResultCollection} of {@link DetectLanguageResult} to be returned by the SDK.
*/
private DocumentResultCollection<DetectLanguageResult> toDocumentResultCollection(
final LanguageResult languageResult) {

final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>();
for (DocumentLanguage documentLanguage : languageResult.getDocuments()) {
DetectedLanguage primaryLanguage = null;
if (documentLanguage.getDetectedLanguages().size() >= 1) {
com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguageResult =
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's confusing to have the same name for implementation and public model classes.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Models in the implementation folder are auto-generated models. Some of the public model classes adopt the name but they are different. We want to explore the same name as DetectedLanguage and also keep no-change on the auto-generated code. That is the only way we found. Love to see another option.

documentLanguage.getDetectedLanguages().get(0);
primaryLanguage = new DetectedLanguage(detectedLanguageResult.getName(),
detectedLanguageResult.getIso6391Name(), detectedLanguageResult.getScore());
}
detectLanguageResults.add(new DetectLanguageResult(documentLanguage.getId(),
documentLanguage.getStatistics() == null
? null : Transforms.toTextDocumentStatistics(documentLanguage.getStatistics()),
null,
primaryLanguage,
documentLanguage.getDetectedLanguages().stream().map(detectedLanguage ->
new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getScore())).collect(Collectors.toList())));
}

for (DocumentError documentError : languageResult.getErrors()) {
com.azure.ai.textanalytics.models.TextAnalyticsError error =
Transforms.toTextAnalyticsError(documentError.getError());
detectLanguageResults.add(
new DetectLanguageResult(documentError.getId(), null, error, null, null));
}

return new DocumentResultCollection<>(detectLanguageResults, languageResult.getModelVersion(),
languageResult.getStatistics() == null ? null
: Transforms.toBatchStatistics(languageResult.getStatistics()));
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.textanalytics;

import com.azure.ai.textanalytics.implementation.TextAnalyticsClientImpl;
import com.azure.ai.textanalytics.implementation.models.DocumentError;
import com.azure.ai.textanalytics.implementation.models.DocumentKeyPhrases;
import com.azure.ai.textanalytics.implementation.models.MultiLanguageBatchInput;
import com.azure.ai.textanalytics.models.DocumentResultCollection;
import com.azure.ai.textanalytics.models.ExtractKeyPhraseResult;
import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions;
import com.azure.ai.textanalytics.models.TextDocumentInput;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.SimpleResponse;
import com.azure.core.util.Context;
import com.azure.core.util.logging.ClientLogger;
import reactor.core.publisher.Mono;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;

import static com.azure.ai.textanalytics.Transforms.toTextAnalyticsError;
import static com.azure.ai.textanalytics.Transforms.toMultiLanguageInput;
import static com.azure.ai.textanalytics.Transforms.toTextDocumentStatistics;
import static com.azure.ai.textanalytics.Transforms.toBatchStatistics;
import static com.azure.ai.textanalytics.Transforms.mapByIndex;
import static com.azure.core.util.FluxUtil.monoError;

class ExtractKeyPhraseClient {
private final ClientLogger logger;
private final TextAnalyticsClientImpl service;

ExtractKeyPhraseClient(TextAnalyticsClientImpl service, ClientLogger logger) {
this.logger = logger;
this.service = service;
}

Mono<Response<ExtractKeyPhraseResult>> extractKeyPhrasesWithResponse(String text, String language,
Context context) {
Objects.requireNonNull(text, "'text' cannot be null.");

return extractBatchKeyPhrasesWithResponse(
Collections.singletonList(new TextDocumentInput("0", text, language)), null, context)
.map(response -> new SimpleResponse<>(response, response.getValue().iterator().next()));
}

Mono<Response<DocumentResultCollection<ExtractKeyPhraseResult>>> extractKeyPhrasesWithResponse(
List<String> textInputs, String language, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");

List<TextDocumentInput> documentInputs = mapByIndex(textInputs, (index, value) ->
new TextDocumentInput(index, value, language));
try {
return extractBatchKeyPhrasesWithResponse(documentInputs, null, context);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}

Mono<Response<DocumentResultCollection<ExtractKeyPhraseResult>>> extractBatchKeyPhrasesWithResponse(
List<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) {
Objects.requireNonNull(textInputs, "'textInputs' cannot be null.");

final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput()
.setDocuments(toMultiLanguageInput(textInputs));
return service.keyPhrasesWithRestResponseAsync(
batchInput,
options == null ? null : options.getModelVersion(),
options == null ? null : options.showStatistics(), context)
.doOnSubscribe(ignoredValue -> logger.info("A batch of key phrases input - {}", textInputs.toString()))
.doOnSuccess(response -> logger.info("A batch of key phrases output - {}", response.getValue()))
.doOnError(error -> logger.warning("Failed to key phrases - {}", error))
.map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue())));
}

private DocumentResultCollection<ExtractKeyPhraseResult> toDocumentResultCollection(
final com.azure.ai.textanalytics.implementation.models.KeyPhraseResult keyPhraseResult) {
List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>();
for (DocumentKeyPhrases documentKeyPhrases : keyPhraseResult.getDocuments()) {
keyPhraseResultList.add(new ExtractKeyPhraseResult(documentKeyPhrases.getId(),
documentKeyPhrases.getStatistics() == null ? null
: toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null,
documentKeyPhrases.getKeyPhrases()));
}

for (DocumentError documentError : keyPhraseResult.getErrors()) {
final com.azure.ai.textanalytics.models.TextAnalyticsError error =
toTextAnalyticsError(documentError.getError());
keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, error, null));
}

return new DocumentResultCollection<>(keyPhraseResultList,
keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null
: toBatchStatistics(keyPhraseResult.getStatistics()));
}
}
Loading