diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/TextAnalyticsAsyncClient.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/TextAnalyticsAsyncClient.java index a159344f130c..4247bcf95e7e 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/TextAnalyticsAsyncClient.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/TextAnalyticsAsyncClient.java @@ -4,11 +4,20 @@ package com.azure.ai.textanalytics; import com.azure.ai.textanalytics.implementation.TextAnalyticsClientImpl; +import com.azure.ai.textanalytics.implementation.models.DocumentEntities; import com.azure.ai.textanalytics.implementation.models.DocumentError; +import com.azure.ai.textanalytics.implementation.models.DocumentKeyPhrases; import com.azure.ai.textanalytics.implementation.models.DocumentLanguage; +import com.azure.ai.textanalytics.implementation.models.DocumentLinkedEntities; +import com.azure.ai.textanalytics.implementation.models.DocumentSentiment; +import com.azure.ai.textanalytics.implementation.models.EntitiesResult; +import com.azure.ai.textanalytics.implementation.models.EntityLinkingResult; import com.azure.ai.textanalytics.implementation.models.LanguageBatchInput; import com.azure.ai.textanalytics.implementation.models.LanguageResult; import com.azure.ai.textanalytics.implementation.models.MultiLanguageBatchInput; +import com.azure.ai.textanalytics.implementation.models.SentenceSentiment; +import com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel; +import com.azure.ai.textanalytics.implementation.models.SentimentResponse; import com.azure.ai.textanalytics.models.DetectLanguageInput; import com.azure.ai.textanalytics.models.DetectLanguageResult; import com.azure.ai.textanalytics.models.DocumentResultCollection; @@ -19,6 +28,8 @@ import com.azure.ai.textanalytics.models.TextAnalyticsClientOptions; import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions; import com.azure.ai.textanalytics.models.TextDocumentInput; +import com.azure.ai.textanalytics.models.TextSentiment; +import com.azure.ai.textanalytics.models.TextSentimentClass; import com.azure.ai.textanalytics.models.TextSentimentResult; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceClient; @@ -33,6 +44,7 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -56,7 +68,7 @@ public final class TextAnalyticsAsyncClient { this.serviceVersion = serviceVersion; this.clientOptions = clientOptions; } - + TextAnalyticsAsyncClient(TextAnalyticsClientImpl service, TextAnalyticsServiceVersion serviceVersion) { this(service, serviceVersion, null); } @@ -75,8 +87,7 @@ public TextAnalyticsServiceVersion getServiceVersion() { * certainty that the identified language is true. * * @param text The text to be analyzed. - * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} has - * the {@link DetectLanguageResult detected language} of the text. + * @return A {@link Mono} containing the {@link DetectLanguageResult detected language} of the text. * @throws NullPointerException if {@code text} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) @@ -291,18 +302,33 @@ private static DetectLanguageResult convertToDetectLanguageResult(final Document documentLanguage.getDetectedLanguages().get(0), documentLanguage.getDetectedLanguages()); } - // (2) entities - // new user + // Named Entity + + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @return A {@link Mono} containing the {@link NamedEntityResult named entity} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono recognizeEntities(String text) { return recognizeEntitiesWithResponse(text, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} has the + * {@link NamedEntityResult named entity} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizeEntitiesWithResponse(String text, String language) { try { - return withContext( - context -> recognizeEntitiesWithResponse(text, language, context)); + return withContext(context -> recognizeEntitiesWithResponse(text, language, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } @@ -310,7 +336,7 @@ public Mono> recognizeEntitiesWithResponse(String te Mono> recognizeEntitiesWithResponse(String text, String language, Context context) { List documentInputs = new ArrayList<>(); - // TODO (savaity/shawn): update/validate inputs and id assigning + // TODO (shawn): update/validate inputs and id assigning documentInputs.add(new TextDocumentInput(Integer.toString(0), text, language)); return recognizeBatchEntitiesWithResponse(documentInputs, null, context).flatMap(response -> { Iterator responseItem = response.getValue().iterator(); @@ -321,11 +347,28 @@ Mono> recognizeEntitiesWithResponse(String text, Str }); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link NamedEntityResult named entity} of the text. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizeEntities(List inputs) { return recognizeEntitiesWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Response} of {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link NamedEntityResult named entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> recognizeEntitiesWithResponse( List inputs, String language) { @@ -343,18 +386,33 @@ Mono>> recognizeEntitiesWit return recognizeBatchEntitiesWithResponse(documentInputs, null, context); } - // advantage user + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link NamedEntityResult named entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizeBatchEntities(List inputs) { return recognizeBatchEntitiesWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @param options TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} contains the + * {@link DocumentResultCollection batch} of {@link NamedEntityResult named entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> recognizeBatchEntitiesWithResponse( List inputs, TextAnalyticsRequestOptions options) { try { - return withContext(context -> - recognizeBatchEntitiesWithResponse(inputs, options, context)); + return withContext(context -> recognizeBatchEntitiesWithResponse(inputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } @@ -362,44 +420,116 @@ public Mono>> recognizeBatc Mono>> recognizeBatchEntitiesWithResponse( List document, TextAnalyticsRequestOptions options, Context context) { + final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput().setDocuments(document); return service.entitiesRecognitionGeneralWithRestResponseAsync( - new MultiLanguageBatchInput().setDocuments(document), options == null ? null : options.getModelVersion(), + batchInput, + options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) - .map(response -> new SimpleResponse<>(response, null)); + .doOnSubscribe(ignoredValue -> logger.info("A batch of named entities input - {}", batchInput)) + .doOnSuccess(response -> logger.info("A batch of named entities output - {}", batchInput)) + .doOnError(error -> logger.warning("Failed to named entities - {}", batchInput)) + .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); + } + + private DocumentResultCollection toDocumentResultCollection( + final EntitiesResult entitiesResult) { + return new DocumentResultCollection<>(getDocumentNamedEntities(entitiesResult), + entitiesResult.getModelVersion(), entitiesResult.getStatistics()); + } + + private List getDocumentNamedEntities(final EntitiesResult entitiesResult) { + Stream validDocumentList = entitiesResult.getDocuments().stream() + .map(this::convertToNamedEntityResult); + Stream errorDocumentList = entitiesResult.getErrors().stream() + .map(this::convertToErrorNamedEntityResult); + + return Stream.concat(validDocumentList, errorDocumentList).collect(Collectors.toList()); + } + + private NamedEntityResult convertToNamedEntityResult(final DocumentEntities documentEntities) { + return new NamedEntityResult(documentEntities.getId(), documentEntities.getStatistics(), + documentEntities.getEntities()); + } + + private NamedEntityResult convertToErrorNamedEntityResult(final DocumentError documentError) { + final Error serviceError = documentError.getError(); + final Error error = new Error().setCode(serviceError.getCode()).setMessage(serviceError.getMessage()) + .setTarget(serviceError.getTarget()); + return new NamedEntityResult(documentError.getId(), error, true); } - // (3) PII entities - // new user + // PII Entity + + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @return A {@link Mono} containing the {@link NamedEntityResult PII entity} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono recognizePiiEntities(String text) { - return null; + return recognizePiiEntitiesWithResponse(text, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} has the + * {@link NamedEntityResult named entity} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizePiiEntitiesWithResponse(String text, String language) { try { - return withContext(context -> - recognizePiiEntitiesWithResponse(text, language, context)); + return withContext(context -> recognizePiiEntitiesWithResponse(text, language, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono> recognizePiiEntitiesWithResponse(String text, String language, Context context) { - return null; + List documentInputs = new ArrayList<>(); + // TODO (shawn): update/validate inputs and id assigning + documentInputs.add(new TextDocumentInput(Integer.toString(0), text, language)); + return recognizeBatchPiiEntitiesWithResponse(documentInputs, null, context).flatMap(response -> { + Iterator responseItem = response.getValue().iterator(); + if (responseItem.hasNext()) { + return Mono.just(new SimpleResponse<>(response, responseItem.next())); + } + return monoError(logger, new RuntimeException("Unable to recognize PII entities for the provided text.")); + }); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link NamedEntityResult named entity} of the text. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizePiiEntities(List inputs) { return recognizePiiEntitiesWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Response} of {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link NamedEntityResult named entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> recognizePiiEntitiesWithResponse( List inputs, String language) { try { - return withContext(context -> - recognizePiiEntitiesWithResponse(inputs, language, context)); + return withContext(context -> recognizePiiEntitiesWithResponse(inputs, language, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } @@ -412,43 +542,77 @@ Mono>> recognizePiiEntities return recognizeBatchPiiEntitiesWithResponse(documentInputs, null, context); } - // advantage user + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link NamedEntityResult named entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizeBatchPiiEntities(List inputs) { return recognizeBatchPiiEntitiesWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @param options TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} contains the + * {@link DocumentResultCollection batch} of {@link NamedEntityResult named entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> recognizeBatchPiiEntitiesWithResponse( List inputs, TextAnalyticsRequestOptions options) { try { - return withContext(context -> - recognizeBatchPiiEntitiesWithResponse(inputs, options, context)); + return withContext(context -> recognizeBatchPiiEntitiesWithResponse(inputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono>> recognizeBatchPiiEntitiesWithResponse( - List documents, TextAnalyticsRequestOptions options, Context context) { + List document, TextAnalyticsRequestOptions options, Context context) { + final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput().setDocuments(document); return service.entitiesRecognitionPiiWithRestResponseAsync( - new MultiLanguageBatchInput().setDocuments(documents), options == null ? null : options.getModelVersion(), + batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) - .map(response -> new SimpleResponse<>(response, null)); + .doOnSubscribe(ignoredValue -> logger.info("A batch of PII entities input - {}", batchInput)) + .doOnSuccess(response -> logger.info("A batch of PII entities output - {}", batchInput)) + .doOnError(error -> logger.warning("Failed to PII entities - {}", batchInput)) + .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } - // (4) Link entities - // new user + // Linked Entity + + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @return A {@link Mono} containing the {@link LinkedEntityResult linked entity} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono recognizeLinkedEntities(String text) { return recognizeLinkedEntitiesWithResponse(text, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} has the + * {@link LinkedEntityResult named entity} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizeLinkedEntitiesWithResponse(String text, String language) { try { - return withContext(context -> - recognizeLinkedEntitiesWithResponse(text, language, context)); + return withContext(context -> recognizeLinkedEntitiesWithResponse(text, language, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } @@ -457,7 +621,7 @@ public Mono> recognizeLinkedEntitiesWithResponse(St Mono> recognizeLinkedEntitiesWithResponse(String text, String language, Context context) { List documentInputs = new ArrayList<>(); - // TODO (savaity/shawn): update/validate inputs and id assigning + // TODO (shawn): update/validate inputs and id assigning documentInputs.add(new TextDocumentInput(Integer.toString(0), text, language)); return recognizeBatchLinkedEntitiesWithResponse(documentInputs, null, context).flatMap(response -> { Iterator responseItem = response.getValue().iterator(); @@ -469,17 +633,33 @@ Mono> recognizeLinkedEntitiesWithResponse(String te }); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link LinkedEntityResult linked entity} of the text. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizeLinkedEntities(List inputs) { return recognizeLinkedEntitiesWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Response} of {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link LinkedEntityResult linked entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> recognizeLinkedEntitiesWithResponse( List inputs, String language) { try { - return withContext(context -> - recognizeLinkedEntitiesWithResponse(inputs, language, context)); + return withContext(context -> recognizeLinkedEntitiesWithResponse(inputs, language, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } @@ -491,19 +671,34 @@ Mono>> recognizeLinkedEnti return recognizeBatchLinkedEntitiesWithResponse(documentInputs, null, context); } - // advantage user + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link LinkedEntityResult linked entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> recognizeBatchLinkedEntities( List inputs) { return recognizeBatchLinkedEntitiesWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @param options TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} contains the + * {@link DocumentResultCollection batch} of {@link LinkedEntityResult linked entity}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> recognizeBatchLinkedEntitiesWithResponse( List inputs, TextAnalyticsRequestOptions options) { try { - return withContext(context -> - recognizeBatchLinkedEntitiesWithResponse(inputs, options, context)); + return withContext(context -> recognizeBatchLinkedEntitiesWithResponse(inputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } @@ -511,18 +706,67 @@ public Mono>> recognizeBat Mono>> recognizeBatchLinkedEntitiesWithResponse( List inputs, TextAnalyticsRequestOptions options, Context context) { - return service.entitiesLinkingWithRestResponseAsync(new MultiLanguageBatchInput().setDocuments(inputs), - options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), - context).map(response -> new SimpleResponse<>(response, null)); + final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput().setDocuments(inputs); + return service.entitiesLinkingWithRestResponseAsync( + batchInput, + options == null ? null : options.getModelVersion(), + options == null ? null : options.showStatistics(), context) + .doOnSubscribe(ignoredValue -> logger.info("A batch of linked entities input - {}", batchInput)) + .doOnSuccess(response -> logger.info("A batch of linked entities output - {}", batchInput)) + .doOnError(error -> logger.warning("Failed to linked entities - {}", batchInput)) + .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); + } + + private DocumentResultCollection toDocumentResultCollection( + final EntityLinkingResult entityLinkingResult) { + return new DocumentResultCollection<>(getDocumentLinkedEntities(entityLinkingResult), + entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics()); + } + + private List getDocumentLinkedEntities(final EntityLinkingResult entitiesResult) { + Stream validDocumentList = entitiesResult.getDocuments().stream() + .map(this::convertToLinkedEntityResult); + Stream errorDocumentList = entitiesResult.getErrors().stream() + .map(this::convertToErrorLinkedEntityResult); + + return Stream.concat(validDocumentList, errorDocumentList).collect(Collectors.toList()); } - // (5) key phrase - // new user + private LinkedEntityResult convertToLinkedEntityResult(final DocumentLinkedEntities documentLinkedEntities) { + return new LinkedEntityResult(documentLinkedEntities.getId(), documentLinkedEntities.getStatistics(), + documentLinkedEntities.getEntities()); + } + + private LinkedEntityResult convertToErrorLinkedEntityResult(final DocumentError documentError) { + final Error serviceError = documentError.getError(); + final Error error = new Error().setCode(serviceError.getCode()).setMessage(serviceError.getMessage()) + .setTarget(serviceError.getTarget()); + return new LinkedEntityResult(documentError.getId(), error, true); + } + + // Key Phrases + + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @return A {@link Mono} containing the {@link KeyPhraseResult key phrases} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono extractKeyPhrases(String text) { return extractKeyPhrasesWithResponse(text, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} has the + * {@link KeyPhraseResult key phrases} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> extractKeyPhrasesWithResponse(String text, String language) { try { @@ -532,11 +776,10 @@ public Mono> extractKeyPhrasesWithResponse(String text } } - Mono> extractKeyPhrasesWithResponse(String text, String language, - Context context) { + Mono> extractKeyPhrasesWithResponse(String text, String language, Context context) { List documentInputs = new ArrayList<>(); - documentInputs.add(new TextDocumentInput(Integer.toString(0), text, language)); // TODO (savaity): should this be a random number generator? + documentInputs.add(new TextDocumentInput(Integer.toString(0), text, language)); return extractBatchKeyPhrasesWithResponse(documentInputs, null, context).flatMap(response -> { Iterator responseItem = response.getValue().iterator(); if (responseItem.hasNext()) { @@ -546,12 +789,28 @@ Mono> extractKeyPhrasesWithResponse(String text, Strin }); } - // hackathon user + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link KeyPhraseResult key phrases} of the text. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> extractKeyPhrases(List inputs) { return extractKeyPhrasesWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Response} of {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link KeyPhraseResult key phrases}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> extractKeyPhrasesWithResponse(List inputs, String language) { @@ -562,19 +821,34 @@ public Mono>> extractKeyPhras } } - Mono>> extractKeyPhrasesWithResponse(List inputs, - String language, - Context context) { + Mono>> extractKeyPhrasesWithResponse( + List inputs, String language, Context context) { List documentInputs = getDocumentInputList(inputs, language); return extractBatchKeyPhrasesWithResponse(documentInputs, null, context); } - // advantage user + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link KeyPhraseResult key phrases}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> extractBatchKeyPhrases(List inputs) { return extractBatchKeyPhrasesWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @param options TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} contains the + * {@link DocumentResultCollection batch} of {@link KeyPhraseResult key phrases}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> extractBatchKeyPhrasesWithResponse( List inputs, TextAnalyticsRequestOptions options) { @@ -587,23 +861,72 @@ public Mono>> extractBatchKey Mono>> extractBatchKeyPhrasesWithResponse( List document, TextAnalyticsRequestOptions options, Context context) { - return service.keyPhrasesWithRestResponseAsync(new MultiLanguageBatchInput().setDocuments(document), - options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), - context).map(response -> new SimpleResponse<>(response, null)); + final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput().setDocuments(document); + return service.keyPhrasesWithRestResponseAsync( + batchInput, + options == null ? null : options.getModelVersion(), + options == null ? null : options.showStatistics(), context) + .doOnSubscribe(ignoredValue -> logger.info("A batch of key phrases input - {}", batchInput)) + .doOnSuccess(response -> logger.info("A batch of key phrases output - {}", batchInput)) + .doOnError(error -> logger.warning("Failed to key phrases - {}", batchInput)) + .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); + } + + private DocumentResultCollection toDocumentResultCollection( + final com.azure.ai.textanalytics.implementation.models.KeyPhraseResult keyPhraseResult) { + return new DocumentResultCollection<>(getDocumentNamedEntities(keyPhraseResult), + keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics()); + } + + private List getDocumentNamedEntities( + final com.azure.ai.textanalytics.implementation.models.KeyPhraseResult keyPhraseResult) { + Stream validDocumentList = keyPhraseResult.getDocuments().stream() + .map(this::convertToKeyPhraseResult); + Stream errorDocumentList = keyPhraseResult.getErrors().stream() + .map(this::convertToErrorKeyPhraseResult); + + return Stream.concat(validDocumentList, errorDocumentList).collect(Collectors.toList()); + } + + private KeyPhraseResult convertToKeyPhraseResult(final DocumentKeyPhrases documentKeyPhrases) { + return new KeyPhraseResult(documentKeyPhrases.getId(), documentKeyPhrases.getStatistics(), + documentKeyPhrases.getKeyPhrases()); + } + + private KeyPhraseResult convertToErrorKeyPhraseResult(final DocumentError documentError) { + final Error serviceError = documentError.getError(); + final Error error = new Error().setCode(serviceError.getCode()).setMessage(serviceError.getMessage()) + .setTarget(serviceError.getTarget()); + return new KeyPhraseResult(documentError.getId(), error, true); } - // (6) sentiment - // new user, + // Sentiment + + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @return A {@link Mono} containing the {@link TextSentimentResult text sentiment} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) - public Mono analyzeSentiment(String input) { - return analyzeSentimentWithResponse(input, null).flatMap(FluxUtil::toMono); + public Mono analyzeSentiment(String text) { + return analyzeSentimentWithResponse(text, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param text the text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} has the + * {@link TextSentimentResult text sentiment} of the text. + * @throws NullPointerException if {@code text} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> analyzeSentimentWithResponse(String text, String language) { try { - return withContext(context -> - analyzeSentimentWithResponse(text, language, context)); + return withContext(context -> analyzeSentimentWithResponse(text, language, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } @@ -622,11 +945,28 @@ Mono> analyzeSentimentWithResponse(String text, St }); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link TextSentimentResult text sentiment} of the text. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> analyzeSentiment(List inputs) { return analyzeSentimentWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of text to be analyzed. + * @param language TODO (shawn): add doc + * @return A {@link Response} of {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link TextSentimentResult text sentiment}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> analyzeSentimentWithResponse( List inputs, String language) { @@ -637,26 +977,39 @@ public Mono>> analyzeSent } } - Mono>> analyzeSentimentWithResponse(List inputs, - String language, - Context context) { - + Mono>> analyzeSentimentWithResponse( + List inputs, String language, Context context) { List documentInputs = getDocumentInputList(inputs, language); return analyzeBatchSentimentWithResponse(documentInputs, null, context); } - // advantage user + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the + * {@link TextSentimentResult text sentiment}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono> analyzeBatchSentiment(List inputs) { return analyzeBatchSentimentWithResponse(inputs, null).flatMap(FluxUtil::toMono); } + /** + * TODO (shawn): add doc + * + * @param inputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. + * @param options TODO (shawn): add doc + * @return A {@link Mono} containing a {@link Response} whose {@link Response#getValue() value} contains the + * {@link DocumentResultCollection batch} of {@link TextSentimentResult text sentiment}. + * @throws NullPointerException if {@code inputs} is {@code null}. + */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono>> analyzeBatchSentimentWithResponse( List inputs, TextAnalyticsRequestOptions options) { try { - return withContext(context -> - analyzeBatchSentimentWithResponse(inputs, options, context)); + return withContext(context -> analyzeBatchSentimentWithResponse(inputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } @@ -664,10 +1017,110 @@ public Mono>> analyzeBatc Mono>> analyzeBatchSentimentWithResponse( List document, TextAnalyticsRequestOptions options, Context context) { + final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput().setDocuments(document); return service.sentimentWithRestResponseAsync( - new MultiLanguageBatchInput().setDocuments(document), options == null ? null : options.getModelVersion(), + batchInput, + options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) - .map(response -> new SimpleResponse<>(response, null)); + .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", batchInput)) + .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", batchInput)) + .doOnError(error -> logger.warning("Failed to text sentiment - {}", batchInput)) + .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); + } + + private DocumentResultCollection toDocumentResultCollection( + final SentimentResponse sentimentResponse) { + return new DocumentResultCollection(getDocumentTextSentiment(sentimentResponse), + sentimentResponse.getModelVersion(), sentimentResponse.getStatistics()); + } + + private List getDocumentTextSentiment(final SentimentResponse sentimentResponse) { + Stream validDocumentList = sentimentResponse.getDocuments().stream() + .map(this::convertToTextSentimentResult); + Stream errorDocumentList = sentimentResponse.getErrors().stream() + .map(this::convertToErrorTextSentimentResult); + + return Stream.concat(validDocumentList, errorDocumentList).collect(Collectors.toList()); + } + + private TextSentimentResult convertToTextSentimentResult(final DocumentSentiment documentSentiment) { + // Document text sentiment + final TextSentiment documentSentimentText = new TextSentiment(); + final TextSentimentClass documentSentimentClass = convertToTextSentimentClass(documentSentiment.getSentiment()); + if (documentSentimentClass == null) { + return null; + } + //TODO (shawn): calculate max length + documentSentimentText.setLength("MAX_LENGTH").setOffset(0).setTextSentimentClass(documentSentimentClass); + setTextSentimentScore(documentSentiment.getDocumentScores(), documentSentimentClass, documentSentimentText); + + // Sentence text sentiment + final List sentenceSentimentTexts = + convertToSentenceSentiments(documentSentiment.getSentences()); + + return new TextSentimentResult(documentSentiment.getId(), documentSentiment.getStatistics(), + documentSentimentText, sentenceSentimentTexts); + } + + private List convertToSentenceSentiments(final List sentenceSentiments) { + final List sentenceSentimentCollection = new ArrayList<>(); + sentenceSentiments.stream().forEach(sentenceSentiment -> { + final TextSentiment singleSentenceSentiment = new TextSentiment(); + singleSentenceSentiment.setLength(Integer.toString(sentenceSentiment.getLength())); + singleSentenceSentiment.setLength(Integer.toString(sentenceSentiment.getOffset())); + final TextSentimentClass sentimentClass = convertToTextSentimentClass(sentenceSentiment.getSentiment()); + setTextSentimentScore(sentenceSentiment.getSentenceScores(), sentimentClass, singleSentenceSentiment); + singleSentenceSentiment.setTextSentimentClass(sentimentClass); + + // TODO (Shawn): warnings are missing + // sentenceSentiment.getWarnings(); + sentenceSentimentCollection.add(singleSentenceSentiment); + }); + return sentenceSentimentCollection; + } + + private void setTextSentimentScore(final SentimentConfidenceScorePerLabel sentimentScore, + final TextSentimentClass textSentimentClass, final TextSentiment textSentimentResult) { + switch (textSentimentClass) { + case POSITIVE: + textSentimentResult.setPositiveScore(sentimentScore.getPositive()); + break; + case NEUTRAL: + textSentimentResult.setNeutralScore(sentimentScore.getNeutral()); + break; + case NEGATIVE: + textSentimentResult.setNegativeScore(sentimentScore.getNegative()); + break; + case MIXED: + textSentimentResult.setPositiveScore(sentimentScore.getPositive()); + textSentimentResult.setNeutralScore(sentimentScore.getNeutral()); + textSentimentResult.setNegativeScore(sentimentScore.getNegative()); + break; + default: + break; + } } + private TextSentimentClass convertToTextSentimentClass(final String sentiment) { + switch (sentiment.toLowerCase(Locale.ENGLISH)) { + case "positive": + return TextSentimentClass.POSITIVE; + case "neutral": + return TextSentimentClass.NEUTRAL; + case "negative": + return TextSentimentClass.NEGATIVE; + case "mixed": + return TextSentimentClass.MIXED; + default: + throw logger.logExceptionAsWarning( + new RuntimeException(String.format("'%s' is not valid text sentiment."))); + } + } + + private TextSentimentResult convertToErrorTextSentimentResult(final DocumentError documentError) { + final Error serviceError = documentError.getError(); + final Error error = new Error().setCode(serviceError.getCode()).setMessage(serviceError.getMessage()) + .setTarget(serviceError.getTarget()); + return new TextSentimentResult(documentError.getId(), error, true); + } } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/DocumentSentiment.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/DocumentSentiment.java index 2304841a7c20..a549d2ca507a 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/DocumentSentiment.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/DocumentSentiment.java @@ -39,7 +39,7 @@ public final class DocumentSentiment { * sentiment class. */ @JsonProperty(value = "documentScores", required = true) - private Object documentScores; + private SentimentConfidenceScorePerLabel documentScores; /* * Sentence level sentiment analysis. @@ -117,7 +117,7 @@ public DocumentSentiment setStatistics(TextDocumentStatistics statistics) { * * @return the documentScores value. */ - public Object getDocumentScores() { + public SentimentConfidenceScorePerLabel getDocumentScores() { return this.documentScores; } @@ -128,7 +128,7 @@ public Object getDocumentScores() { * @param documentScores the documentScores value to set. * @return the DocumentSentiment object itself. */ - public DocumentSentiment setDocumentScores(Object documentScores) { + public DocumentSentiment setDocumentScores(SentimentConfidenceScorePerLabel documentScores) { this.documentScores = documentScores; return this; } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/EntitiesResult.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/EntitiesResult.java index 1fea312b07da..054133df1c9c 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/EntitiesResult.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/EntitiesResult.java @@ -4,8 +4,8 @@ package com.azure.ai.textanalytics.implementation.models; -import com.azure.core.annotation.Fluent; import com.azure.ai.textanalytics.models.TextBatchStatistics; +import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/EntityLinkingResult.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/EntityLinkingResult.java index 38caa542a5c3..771784f33d00 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/EntityLinkingResult.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/EntityLinkingResult.java @@ -4,8 +4,8 @@ package com.azure.ai.textanalytics.implementation.models; -import com.azure.core.annotation.Fluent; import com.azure.ai.textanalytics.models.TextBatchStatistics; +import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/KeyPhraseResult.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/KeyPhraseResult.java index 0eef59e64956..2f5a2d5e8b8b 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/KeyPhraseResult.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/KeyPhraseResult.java @@ -4,8 +4,8 @@ package com.azure.ai.textanalytics.implementation.models; -import com.azure.core.annotation.Fluent; import com.azure.ai.textanalytics.models.TextBatchStatistics; +import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/LanguageResult.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/LanguageResult.java index e45d7cba139c..667a1d80fc26 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/LanguageResult.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/LanguageResult.java @@ -4,8 +4,8 @@ package com.azure.ai.textanalytics.implementation.models; -import com.azure.core.annotation.Fluent; import com.azure.ai.textanalytics.models.TextBatchStatistics; +import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/RequestStatistics.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/RequestStatistics.java deleted file mode 100644 index b0934b055ab5..000000000000 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/RequestStatistics.java +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. -// Code generated by Microsoft (R) AutoRest Code Generator. - -package com.azure.ai.textanalytics.implementation.models; - -import com.azure.core.annotation.Fluent; -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * if showStats=true was specified in the request this field will contain - * information about the request payload. - */ -@Fluent -public final class RequestStatistics { - /* - * Number of documents submitted in the request. - */ - @JsonProperty(value = "documentsCount", required = true) - private int documentsCount; - - /* - * Number of valid documents. This excludes empty, over-size limit or - * non-supported languages documents. - */ - @JsonProperty(value = "validDocumentsCount", required = true) - private int validDocumentsCount; - - /* - * Number of invalid documents. This includes empty, over-size limit or - * non-supported languages documents. - */ - @JsonProperty(value = "erroneousDocumentsCount", required = true) - private int erroneousDocumentsCount; - - /* - * Number of transactions for the request. - */ - @JsonProperty(value = "transactionsCount", required = true) - private long transactionsCount; - - /** - * Get the documentsCount property: Number of documents submitted in the - * request. - * - * @return the documentsCount value. - */ - public int getDocumentsCount() { - return this.documentsCount; - } - - /** - * Set the documentsCount property: Number of documents submitted in the - * request. - * - * @param documentsCount the documentsCount value to set. - * @return the RequestStatistics object itself. - */ - public RequestStatistics setDocumentsCount(int documentsCount) { - this.documentsCount = documentsCount; - return this; - } - - /** - * Get the validDocumentsCount property: Number of valid documents. This - * excludes empty, over-size limit or non-supported languages documents. - * - * @return the validDocumentsCount value. - */ - public int getValidDocumentsCount() { - return this.validDocumentsCount; - } - - /** - * Set the validDocumentsCount property: Number of valid documents. This - * excludes empty, over-size limit or non-supported languages documents. - * - * @param validDocumentsCount the validDocumentsCount value to set. - * @return the RequestStatistics object itself. - */ - public RequestStatistics setValidDocumentsCount(int validDocumentsCount) { - this.validDocumentsCount = validDocumentsCount; - return this; - } - - /** - * Get the erroneousDocumentsCount property: Number of invalid documents. - * This includes empty, over-size limit or non-supported languages - * documents. - * - * @return the erroneousDocumentsCount value. - */ - public int getErroneousDocumentsCount() { - return this.erroneousDocumentsCount; - } - - /** - * Set the erroneousDocumentsCount property: Number of invalid documents. - * This includes empty, over-size limit or non-supported languages - * documents. - * - * @param erroneousDocumentsCount the erroneousDocumentsCount value to set. - * @return the RequestStatistics object itself. - */ - public RequestStatistics setErroneousDocumentsCount(int erroneousDocumentsCount) { - this.erroneousDocumentsCount = erroneousDocumentsCount; - return this; - } - - /** - * Get the transactionsCount property: Number of transactions for the - * request. - * - * @return the transactionsCount value. - */ - public long getTransactionsCount() { - return this.transactionsCount; - } - - /** - * Set the transactionsCount property: Number of transactions for the - * request. - * - * @param transactionsCount the transactionsCount value to set. - * @return the RequestStatistics object itself. - */ - public RequestStatistics setTransactionsCount(long transactionsCount) { - this.transactionsCount = transactionsCount; - return this; - } -} diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/SentenceSentiment.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/SentenceSentiment.java index 8b5e445e82cf..280075f847a1 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/SentenceSentiment.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/SentenceSentiment.java @@ -25,7 +25,7 @@ public final class SentenceSentiment { * classes. */ @JsonProperty(value = "sentenceScores", required = true) - private Object sentenceScores; + private SentimentConfidenceScorePerLabel sentenceScores; /* * The sentence offset from the start of the document. @@ -73,7 +73,7 @@ public SentenceSentiment setSentiment(String sentiment) { * * @return the sentenceScores value. */ - public Object getSentenceScores() { + public SentimentConfidenceScorePerLabel getSentenceScores() { return this.sentenceScores; } @@ -84,7 +84,7 @@ public Object getSentenceScores() { * @param sentenceScores the sentenceScores value to set. * @return the SentenceSentiment object itself. */ - public SentenceSentiment setSentenceScores(Object sentenceScores) { + public SentenceSentiment setSentenceScores(SentimentConfidenceScorePerLabel sentenceScores) { this.sentenceScores = sentenceScores; return this; } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/SentimentResponse.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/SentimentResponse.java index 95a683f7cbb8..6cca62fb063f 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/SentimentResponse.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/SentimentResponse.java @@ -4,6 +4,7 @@ package com.azure.ai.textanalytics.implementation.models; +import com.azure.ai.textanalytics.models.TextBatchStatistics; import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; @@ -29,7 +30,7 @@ public final class SentimentResponse { * The statistics property. */ @JsonProperty(value = "statistics") - private RequestStatistics statistics; + private TextBatchStatistics statistics; /* * This field indicates which model is used for scoring. @@ -82,7 +83,7 @@ public SentimentResponse setErrors(List errors) { * * @return the statistics value. */ - public RequestStatistics getStatistics() { + public TextBatchStatistics getStatistics() { return this.statistics; } @@ -92,7 +93,7 @@ public RequestStatistics getStatistics() { * @param statistics the statistics value to set. * @return the SentimentResponse object itself. */ - public SentimentResponse setStatistics(RequestStatistics statistics) { + public SentimentResponse setStatistics(TextBatchStatistics statistics) { this.statistics = statistics; return this; } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/KeyPhraseResult.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/KeyPhraseResult.java index aa6537ee2d3d..468f3ee01f2b 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/KeyPhraseResult.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/KeyPhraseResult.java @@ -10,10 +10,17 @@ /** * The KeyPhraseResult model. */ +// TODO (shawn): Should be @Immutable, but will produce spotbug/checkstyle error @Fluent public final class KeyPhraseResult extends DocumentResult { private List keyPhrases; + // TODO(shawn): not public modifier + public KeyPhraseResult(String id, Error error, boolean isError) { + super(id, error, isError); + keyPhrases = null; + } + public KeyPhraseResult(String id, TextDocumentStatistics textDocumentStatistics, List keyPhrases) { super(id, textDocumentStatistics); this.keyPhrases = keyPhrases; diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/LinkedEntityResult.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/LinkedEntityResult.java index 5d03eb62c7a7..526111f14f93 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/LinkedEntityResult.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/LinkedEntityResult.java @@ -11,10 +11,19 @@ /** * The LinkedEntityResult model. */ +// TODO (shawn): Should be @Immutable, but will produce spotbug/checkstyle error @Fluent public final class LinkedEntityResult extends DocumentResult { private final List linkedEntities; + // TODO(shawn): not public modifier + public LinkedEntityResult(String id, Error error, boolean isError) { + super(id, error, isError); + linkedEntities = null; + } + + // TODO(shawn): not public modifier + /** * LinkedEntityResult model constructor * diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/NamedEntity.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/NamedEntity.java index 277256c42aef..5a4dadf492ee 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/NamedEntity.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/NamedEntity.java @@ -63,7 +63,7 @@ public String getText() { * @param text the text value to set. * @return the NamedEntity object itself. */ - NamedEntity setText(String text) { + public NamedEntity setText(String text) { this.text = text; return this; } @@ -83,7 +83,7 @@ public String getType() { * @param type the type value to set. * @return the NamedEntity object itself. */ - NamedEntity setType(String type) { + public NamedEntity setType(String type) { this.type = type; return this; } @@ -105,7 +105,7 @@ public String getSubtype() { * @param subtype the subtype value to set. * @return the NamedEntity object itself. */ - NamedEntity setSubtype(String subtype) { + public NamedEntity setSubtype(String subtype) { this.subtype = subtype; return this; } @@ -127,7 +127,7 @@ public int getOffset() { * @param offset the offset value to set. * @return the NamedEntity object itself. */ - NamedEntity setOffset(int offset) { + public NamedEntity setOffset(int offset) { this.offset = offset; return this; } @@ -149,7 +149,7 @@ public int getLength() { * @param length the length value to set. * @return the NamedEntity object itself. */ - NamedEntity setLength(int length) { + public NamedEntity setLength(int length) { this.length = length; return this; } @@ -171,7 +171,7 @@ public double getScore() { * @param score the score value to set. * @return the NamedEntity object itself. */ - NamedEntity setScore(double score) { + public NamedEntity setScore(double score) { this.score = score; return this; } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/NamedEntityResult.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/NamedEntityResult.java index d08507cb256c..a2b7c1280a28 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/NamedEntityResult.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/NamedEntityResult.java @@ -10,10 +10,18 @@ /** * The NamedEntityResult model. */ +// TODO (shawn): Should be @Immutable, but will produce spotbug/checkstyle error @Fluent public final class NamedEntityResult extends DocumentResult { - private List namedEntities; + private final List namedEntities; + // TODO(shawn): not public modifier + public NamedEntityResult(String id, Error error, boolean isError) { + super(id, error, isError); + namedEntities = null; + } + + // TODO(shawn): not public modifier public NamedEntityResult(String id, TextDocumentStatistics textDocumentStatistics, List namedEntities) { super(id, textDocumentStatistics); @@ -23,9 +31,4 @@ public NamedEntityResult(String id, TextDocumentStatistics textDocumentStatistic public List getNamedEntities() { return namedEntities; } - - NamedEntityResult setNamedEntities(List namedEntities) { - this.namedEntities = namedEntities; - return this; - } } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextBatchStatistics.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextBatchStatistics.java index af99b4f6ff9b..c4155fd4ff1f 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextBatchStatistics.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextBatchStatistics.java @@ -40,24 +40,24 @@ public final class TextBatchStatistics { private long transactionCount; /** - * Get the documentsCount property: Number of documents submitted in the + * Get the documentCount property: Number of documents submitted in the * request. * - * @return the documentsCount value. + * @return the documentCount value. */ public int getDocumentCount() { return this.documentCount; } /** - * Set the documentsCount property: Number of documents submitted in the + * Set the documentCount property: Number of documents submitted in the * request. * - * @param documentsCount the documentsCount value to set. - * @return the DocumentBatchStatistics object itself. + * @param documentCount the documentCount value to set. + * @return the TextBatchStatistics object itself. */ - public TextBatchStatistics setDocumentCount(int documentsCount) { - this.documentCount = documentsCount; + public TextBatchStatistics setDocumentCount(int documentCount) { + this.documentCount = documentCount; return this; } @@ -76,7 +76,7 @@ public int getValidDocumentCount() { * excludes empty, over-size limit or non-supported languages documents. * * @param validDocumentCount the validDocumentCount value to set. - * @return the DocumentBatchStatistics object itself. + * @return the TextBatchStatistics object itself. */ public TextBatchStatistics setValidDocumentCount(int validDocumentCount) { this.validDocumentCount = validDocumentCount; @@ -100,7 +100,7 @@ public int getErroneousDocumentCount() { * documents. * * @param erroneousDocumentCount the erroneousDocumentCount value to set. - * @return the DocumentBatchStatistics object itself. + * @return the TextBatchStatistics object itself. */ public TextBatchStatistics setErroneousDocumentCount(int erroneousDocumentCount) { this.erroneousDocumentCount = erroneousDocumentCount; @@ -108,21 +108,21 @@ public TextBatchStatistics setErroneousDocumentCount(int erroneousDocumentCount) } /** - * Get the transactionsCount property: Number of transactions for the + * Get the transactionCount property: Number of transactions for the * request. * - * @return the transactionsCount value. + * @return the transactionCount value. */ public long getTransactionCount() { return this.transactionCount; } /** - * Set the transactionsCount property: Number of transactions for the + * Set the transactionCount property: Number of transactions for the * request. * - * @param transactionCount the transactionsCount value to set. - * @return the DocumentBatchStatistics object itself. + * @param transactionCount the transactionCount value to set. + * @return the TextBatchStatistics object itself. */ public TextBatchStatistics setTransactionCount(long transactionCount) { this.transactionCount = transactionCount; diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentiment.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentiment.java index 6322202d4c4a..1977fe697d1c 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentiment.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentiment.java @@ -30,7 +30,7 @@ public String getLength() { return length; } - TextSentiment setLength(String length) { + public TextSentiment setLength(String length) { this.length = length; return this; } @@ -39,7 +39,7 @@ public double getNegativeScore() { return negativeScore; } - TextSentiment setNegativeScore(double negativeScore) { + public TextSentiment setNegativeScore(double negativeScore) { this.negativeScore = negativeScore; return this; } @@ -48,7 +48,7 @@ public double getNeutralScore() { return neutralScore; } - TextSentiment setNeutralScore(double neutralScore) { + public TextSentiment setNeutralScore(double neutralScore) { this.neutralScore = neutralScore; return this; } @@ -57,7 +57,7 @@ public double getPositiveScore() { return positiveScore; } - TextSentiment setPositiveScore(double positiveScore) { + public TextSentiment setPositiveScore(double positiveScore) { this.positiveScore = positiveScore; return this; } @@ -66,7 +66,7 @@ public int getOffset() { return offset; } - TextSentiment setOffset(int offset) { + public TextSentiment setOffset(int offset) { this.offset = offset; return this; } @@ -75,7 +75,7 @@ public TextSentimentClass getTextSentimentClass() { return textSentimentClass; } - TextSentiment setTextSentimentClass(TextSentimentClass textSentimentClass) { + public TextSentiment setTextSentimentClass(TextSentimentClass textSentimentClass) { this.textSentimentClass = textSentimentClass; return this; } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentimentClass.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentimentClass.java index f9c65de58541..079cc159eb46 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentimentClass.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentimentClass.java @@ -10,5 +10,5 @@ public enum TextSentimentClass { POSITIVE, NEGATIVE, NEUTRAL, - MIXED; + MIXED } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentimentResult.java b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentimentResult.java index 9552011ffcdd..c0e2c3189dd9 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentimentResult.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/models/TextSentimentResult.java @@ -3,6 +3,7 @@ package com.azure.ai.textanalytics.models; + import com.azure.core.annotation.Fluent; import java.util.List; @@ -10,13 +11,21 @@ /** * The TextSentimentResult model. */ +// TODO (shawn): Should be @Immutable, but will produce spotbug/checkstyle error @Fluent public final class TextSentimentResult extends DocumentResult { - private TextSentiment documentSentiment; - private List sentenceSentiments; + private final TextSentiment documentSentiment; + private final List sentenceSentiments; + + // TODO(shawn): not public modifier + public TextSentimentResult(String id, Error error, boolean isError) { + super(id, error, isError); + documentSentiment = null; + sentenceSentiments = null; + } public TextSentimentResult(String id, TextDocumentStatistics textDocumentStatistics, - TextSentiment documentSentiment, List sentenceSentiments) { + TextSentiment documentSentiment, List sentenceSentiments) { super(id, textDocumentStatistics); this.documentSentiment = documentSentiment; this.sentenceSentiments = sentenceSentiments; @@ -29,14 +38,4 @@ public TextSentiment getDocumentSentiment() { public List getSentenceSentiments() { return sentenceSentiments; } - - TextSentimentResult setDocumentSentiment(TextSentiment documentSentiment) { - this.documentSentiment = documentSentiment; - return this; - } - - TextSentimentResult setSentenceSentiments(List sentenceSentiments) { - this.sentenceSentiments = sentenceSentiments; - return this; - } } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/AnalyzeSentiment.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/AnalyzeSentiment.java index 77d7d27b6940..9cabab3a3f9e 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/AnalyzeSentiment.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/AnalyzeSentiment.java @@ -5,6 +5,7 @@ import com.azure.ai.textanalytics.models.TextSentiment; import com.azure.ai.textanalytics.models.TextSentimentResult; +import com.azure.core.util.Configuration; import java.util.List; @@ -13,8 +14,10 @@ public class AnalyzeSentiment { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The text that need be analysed. diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/HelloWorld.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/HelloWorld.java index 09085d4224c3..837371e52941 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/HelloWorld.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/HelloWorld.java @@ -5,6 +5,7 @@ import com.azure.ai.textanalytics.models.DetectLanguageResult; import com.azure.ai.textanalytics.models.DetectedLanguage; +import com.azure.core.util.Configuration; import java.util.List; @@ -13,8 +14,10 @@ public class HelloWorld { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The text that need be analysed. diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeEntities.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeEntities.java index db5ad5ef1b2d..f5da78da03b7 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeEntities.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeEntities.java @@ -3,13 +3,17 @@ package com.azure.ai.textanalytics; +import com.azure.core.util.Configuration; + public class RecognizeEntities { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The text that need be analysed. diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeKeyPhrases.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeKeyPhrases.java index a752133c8f14..d05441365454 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeKeyPhrases.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeKeyPhrases.java @@ -3,13 +3,17 @@ package com.azure.ai.textanalytics; +import com.azure.core.util.Configuration; + public class RecognizeKeyPhrases { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The text that need be analysed. diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeLinkedEntities.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeLinkedEntities.java index 67e604fd6a57..4ece6b0c7a78 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeLinkedEntities.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizeLinkedEntities.java @@ -3,13 +3,17 @@ package com.azure.ai.textanalytics; +import com.azure.core.util.Configuration; + public class RecognizeLinkedEntities { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The text that need be analysed. diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizePII.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizePII.java index 6099c32cc4a0..d25f8d2a0cd7 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizePII.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/RecognizePII.java @@ -3,13 +3,17 @@ package com.azure.ai.textanalytics; +import com.azure.core.util.Configuration; + public class RecognizePII { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The text that need be analysed. diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/AnalyzeSentimentBatchDocuments.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/AnalyzeSentimentBatchDocuments.java index a38e767f60a7..439aca396912 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/AnalyzeSentimentBatchDocuments.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/AnalyzeSentimentBatchDocuments.java @@ -11,6 +11,7 @@ import com.azure.ai.textanalytics.models.TextDocumentInput; import com.azure.ai.textanalytics.models.TextSentiment; import com.azure.ai.textanalytics.models.TextSentimentResult; +import com.azure.core.util.Configuration; import com.azure.core.util.Context; import java.util.Arrays; @@ -21,17 +22,19 @@ public class AnalyzeSentimentBatchDocuments { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The texts that need be analysed. List inputs = Arrays.asList( - new TextDocumentInput("1", "The hotel was dark and unclean.", "US"), - new TextDocumentInput("2", "The restaurant had amazing gnocci.", "US") + new TextDocumentInput("1", "The hotel was dark and unclean.", "en"), + new TextDocumentInput("2", "The restaurant had amazing gnocci.", "en") ); - final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true).setModelVersion("1.0"); + final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); DocumentResultCollection detectedBatchResult = client.analyzeBatchSentimentWithResponse(inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s", detectedBatchResult.getModelVersion()); diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/DetectLanguageBatchDocuments.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/DetectLanguageBatchDocuments.java index 9899e8741041..18b285de94b0 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/DetectLanguageBatchDocuments.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/DetectLanguageBatchDocuments.java @@ -27,7 +27,7 @@ public static void main(String[] args) { // The texts that need be analysed. List inputs = Arrays.asList( - new DetectLanguageInput("1", "This is written in English", "US"), + new DetectLanguageInput("1", "This is written in English", "en"), new DetectLanguageInput("2", "Este es un document escrito en Español.", "es") ); diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeEntitiesBatchDocuments.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeEntitiesBatchDocuments.java index 29bcbb1b31fa..433e00f56342 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeEntitiesBatchDocuments.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeEntitiesBatchDocuments.java @@ -10,6 +10,7 @@ import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions; import com.azure.ai.textanalytics.models.TextBatchStatistics; import com.azure.ai.textanalytics.models.TextDocumentInput; +import com.azure.core.util.Configuration; import com.azure.core.util.Context; import java.util.Arrays; @@ -19,18 +20,21 @@ public class RecognizeEntitiesBatchDocuments { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The texts that need be analysed. List inputs = Arrays.asList( - new TextDocumentInput("1", "Satya Nadella is the CEO of Microsoft", "US"), - new TextDocumentInput("2", "Elon Musk is the CEO of SpaceX and Tesla.", "US") + new TextDocumentInput("1", "Satya Nadella is the CEO of Microsoft", "en"), + new TextDocumentInput("2", "Elon Musk is the CEO of SpaceX and Tesla.", "en") ); - final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true).setModelVersion("1.0"); - final DocumentResultCollection detectedBatchResult = client.recognizeBatchEntitiesWithResponse(inputs, requestOptions, Context.NONE).getValue(); + final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); + final DocumentResultCollection detectedBatchResult = + client.recognizeBatchEntitiesWithResponse(inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s", detectedBatchResult.getModelVersion()); final TextBatchStatistics batchStatistics = detectedBatchResult.getStatistics(); diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeKeyPhrasesBatchDocuments.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeKeyPhrasesBatchDocuments.java index 7d60eafa8975..57061d5506f9 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeKeyPhrasesBatchDocuments.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeKeyPhrasesBatchDocuments.java @@ -10,6 +10,7 @@ import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions; import com.azure.ai.textanalytics.models.TextBatchStatistics; import com.azure.ai.textanalytics.models.TextDocumentInput; +import com.azure.core.util.Configuration; import com.azure.core.util.Context; import java.util.Arrays; @@ -20,17 +21,19 @@ public class RecognizeKeyPhrasesBatchDocuments { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The texts that need be analysed. List inputs = Arrays.asList( - new TextDocumentInput("1", "My cat might need to see a veterinarian", "US"), - new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "US") + new TextDocumentInput("1", "My cat might need to see a veterinarian", "en"), + new TextDocumentInput("2", "The pitot tube is used to measure airspeed.", "en") ); - final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true).setModelVersion("1.0"); + final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); final DocumentResultCollection detectedBatchResult = client.extractBatchKeyPhrasesWithResponse(inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s", detectedBatchResult.getModelVersion()); diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeLinkedEntitiesBatchDocuments.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeLinkedEntitiesBatchDocuments.java index d0e71c41bfed..c0d2feeb9741 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeLinkedEntitiesBatchDocuments.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizeLinkedEntitiesBatchDocuments.java @@ -10,6 +10,7 @@ import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions; import com.azure.ai.textanalytics.models.TextBatchStatistics; import com.azure.ai.textanalytics.models.TextDocumentInput; +import com.azure.core.util.Configuration; import com.azure.core.util.Context; import java.util.Arrays; @@ -20,17 +21,19 @@ public class RecognizeLinkedEntitiesBatchDocuments { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The texts that need be analysed. List inputs = Arrays.asList( - new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "US"), - new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "US") + new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.", "en"), + new TextDocumentInput("2", "Mount Shasta has lenticular clouds.", "en") ); - final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true).setModelVersion("1.0"); + final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); final DocumentResultCollection detectedBatchResult = client.recognizeBatchLinkedEntitiesWithResponse(inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s", detectedBatchResult.getModelVersion()); diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizePIIBatchDocuments.java b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizePIIBatchDocuments.java index 1e64d94c4f91..cd32b62acf70 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizePIIBatchDocuments.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/samples/java/com/azure/ai/textanalytics/batch/RecognizePIIBatchDocuments.java @@ -10,6 +10,7 @@ import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions; import com.azure.ai.textanalytics.models.TextBatchStatistics; import com.azure.ai.textanalytics.models.TextDocumentInput; +import com.azure.core.util.Configuration; import com.azure.core.util.Context; import java.util.Arrays; @@ -20,17 +21,19 @@ public class RecognizePIIBatchDocuments { public static void main(String[] args) { // Instantiate a client that will be used to call the service. TextAnalyticsClient client = new TextAnalyticsClientBuilder() - .subscriptionKey("subscription-key") - .endpoint("https://servicename.cognitiveservices.azure.com/") +// .subscriptionKey("subscription-key") +// .endpoint("https://servicename.cognitiveservices.azure.com/") + .subscriptionKey(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_SUBSCRIPTION_KEY")) + .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); // The texts that need be analysed. List inputs = Arrays.asList( - new TextDocumentInput("1", "My SSN is 555-55-5555", "US"), - new TextDocumentInput("2", "Visa card 4147999933330000", "US") + new TextDocumentInput("1", "My SSN is 555-55-5555", "en"), + new TextDocumentInput("2", "Visa card 4147999933330000", "en") ); - final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true).setModelVersion("1.0"); + final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); final DocumentResultCollection detectedBatchResult = client.recognizeBatchPiiEntitiesWithResponse(inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s", detectedBatchResult.getModelVersion()); diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsAsyncClientTest.java b/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsAsyncClientTest.java index 62cf3271ab89..51d5587d00bb 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsAsyncClientTest.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsAsyncClientTest.java @@ -131,4 +131,45 @@ public void detectLanguageDuplicateIdInput() { .verifyErrorSatisfies(ex -> assertRestException(ex, HttpResponseException.class, 400)); }); } + + @Test + public void recognizeEntitiesForSimpleInput() { + DetectedLanguage primaryLanguage = new DetectedLanguage().setName("English").setIso6391Name("en").setScore(1.0); + + } + + @Test + public void recognizeEntitiesForEmptyText() { + + } + + @Test + public void recognizeEntitiesForFaultyText() { + + } + + @Test + public void recognizeEntitiesForBatchInput() { + + } + + @Test + public void recognizeEntitiesForBatchInputShowStatistics() { +// recognizeEntitiesShowStatisticsRunner((inputs, options) -> { +// StepVerifier.create(client.recognizeBatchEntitiesWithResponse(inputs, options)) +// .assertNext(response -> validateBatchResult(response.getValue(), getExpectedBatchNamedEntityResult(), +// "Named Entity")) +// .verifyComplete(); +// }); + } + + @Test + public void recognizeEntitiesForBatchStringInput() { + + } + + @Test + public void recognizeEntitiesForBatchListCountryHint() { + + } } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsClientTest.java b/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsClientTest.java index cd7dcab8d977..7226b9e360e2 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsClientTest.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsClientTest.java @@ -129,4 +129,37 @@ public void detectLanguageDuplicateIdInput() { HttpResponseException.class, 400); }); } + + @Test + public void recognizeEntitiesForSimpleInput() { + } + + @Test + public void recognizeEntitiesForEmptyText() { + + } + + @Test + public void recognizeEntitiesForFaultyText() { + + } + + @Test + public void recognizeEntitiesForBatchInput() { + + } + + @Test + public void recognizeEntitiesForBatchInputShowStatistics() { + } + + @Test + public void recognizeEntitiesForBatchStringInput() { + + } + + @Test + public void recognizeEntitiesForBatchListCountryHint() { + + } } diff --git a/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsClientTestBase.java b/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsClientTestBase.java index 8d069b6f541d..05717d84c4cd 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsClientTestBase.java +++ b/sdk/textanalytics/azure-ai-textanalytics/src/test/java/com/azure/ai/textanalytics/TextAnalyticsClientTestBase.java @@ -8,8 +8,11 @@ import com.azure.ai.textanalytics.models.DetectedLanguage; import com.azure.ai.textanalytics.models.DocumentResultCollection; import com.azure.ai.textanalytics.models.Error; +import com.azure.ai.textanalytics.models.NamedEntity; +import com.azure.ai.textanalytics.models.NamedEntityResult; import com.azure.ai.textanalytics.models.TextAnalyticsRequestOptions; import com.azure.ai.textanalytics.models.TextBatchStatistics; +import com.azure.ai.textanalytics.models.TextDocumentInput; import com.azure.ai.textanalytics.models.TextDocumentStatistics; import com.azure.core.credential.TokenCredential; import com.azure.core.exception.HttpResponseException; @@ -104,6 +107,7 @@ T clientSetup(Function clientBuilder) { return Objects.requireNonNull(client); } + // Detect Language @Test public abstract void detectSingleTextLanguage(); @@ -138,7 +142,7 @@ void detectLanguageShowStatisticsRunner(BiConsumer, } void detectLanguageDuplicateIdRunner(BiConsumer, - TextAnalyticsRequestOptions> testRunner) { + TextAnalyticsRequestOptions> testRunner) { final List detectLanguageInputs = Arrays.asList( new DetectLanguageInput("0", "This is written in English", "US"), new DetectLanguageInput("0", "Este es un document escrito en Español.") @@ -147,6 +151,30 @@ void detectLanguageDuplicateIdRunner(BiConsumer, testRunner.accept(detectLanguageInputs, setTextAnalyticsRequestOptions()); } + static void detectLanguagesCountryHintRunner(BiConsumer, String> testRunner) { + final List inputs = new ArrayList<>(Arrays.asList( + "This is written in English", "Este es un document escrito en Español.", "~@!~:)")); + + testRunner.accept(inputs, "US"); + } + + static void detectLanguageStringInputRunner(Consumer> testRunner) { + final List inputs = new ArrayList<>(Arrays.asList( + "This is written in English", "Este es un document escrito en Español.", "~@!~:)")); + + testRunner.accept(inputs); + } + + static void detectLanguageRunner(Consumer> testRunner) { + final List detectLanguageInputs = Arrays.asList( + new DetectLanguageInput("0", "This is written in English", "US"), + new DetectLanguageInput("1", "Este es un document escrito en Español."), + new DetectLanguageInput("2", "~@!~:)", "US") + ); + + testRunner.accept(detectLanguageInputs); + } + static DocumentResultCollection getExpectedBatchDetectedLanguages() { DetectedLanguage detectedLanguage1 = new DetectedLanguage().setName("English").setIso6391Name("en") .setScore(1.0); @@ -172,18 +200,65 @@ static DocumentResultCollection getExpectedBatchDetectedLa return new DocumentResultCollection<>(detectLanguageResultList, "2019-10-01", textBatchStatistics); } - static void detectLanguagesCountryHintRunner(BiConsumer, String> testRunner) { - final List inputs = new ArrayList<>(Arrays.asList( - "This is written in English", "Este es un document escrito en Español.", "~@!~:)")); + // Named Entities + @Test + public abstract void recognizeEntitiesForSimpleInput(); - testRunner.accept(inputs, "US"); + @Test + public abstract void recognizeEntitiesForEmptyText(); + + @Test + public abstract void recognizeEntitiesForFaultyText(); + + @Test + public abstract void recognizeEntitiesForBatchInput(); + + @Test + public abstract void recognizeEntitiesForBatchInputShowStatistics(); + + void recognizeEntitiesShowStatisticsRunner(BiConsumer, + TextAnalyticsRequestOptions> testRunner) { + final List detectLanguageInputs = Arrays.asList( + new TextDocumentInput("1", "Satya Nadella is the CEO of Microsoft", "en"), + new TextDocumentInput("2", "Elon Musk is the CEO of SpaceX and Tesla.", "en"), + new TextDocumentInput("2", "~@!~:)", "en") + // add error document => empty text + ); + + testRunner.accept(detectLanguageInputs, setTextAnalyticsRequestOptions()); } - static void detectLanguageStringInputRunner(Consumer> testRunner) { - final List inputs = new ArrayList<>(Arrays.asList( - "This is written in English", "Este es un document escrito en Español.", "~@!~:)")); + @Test + public abstract void recognizeEntitiesForBatchStringInput(); - testRunner.accept(inputs); + @Test + public abstract void recognizeEntitiesForBatchListCountryHint(); + + static DocumentResultCollection getExpectedBatchNamedEntityResult() { + NamedEntity namedEntity1 = new NamedEntity() + .setType("English").setText("Satya Nadella is the CEO of Microsoft").setSubtype("").setLength(1).setOffset(1).setScore(1.0); + NamedEntity namedEntity2 = new NamedEntity() + .setType("English").setText("").setSubtype("Elon Musk is the CEO of SpaceX and Tesla.").setLength(1).setOffset(1).setScore(1.0); + NamedEntity namedEntity3 = new NamedEntity() + .setType("English").setText("").setSubtype("").setLength(1).setOffset(1).setScore(1.0); + List namedEntityList1 = new ArrayList<>(Collections.singletonList(namedEntity1)); + List namedEntityList2 = new ArrayList<>(Collections.singletonList(namedEntity2)); + List namedEntityList3 = new ArrayList<>(Collections.singletonList(namedEntity3)); + + TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics().setCharacterCount(26).setTransactionCount(1); + TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics().setCharacterCount(39).setTransactionCount(1); + TextDocumentStatistics textDocumentStatistics3 = new TextDocumentStatistics().setCharacterCount(6).setTransactionCount(1); + + NamedEntityResult namedEntityResult1 = new NamedEntityResult("0", textDocumentStatistics1, namedEntityList1); + NamedEntityResult namedEntityResult2 = new NamedEntityResult("1", textDocumentStatistics2, namedEntityList2); + NamedEntityResult namedEntityResult3 = new NamedEntityResult("2", textDocumentStatistics3, namedEntityList3); + + TextBatchStatistics textBatchStatistics = new TextBatchStatistics().setDocumentCount(3) + .setErroneousDocumentCount(0).setTransactionCount(3).setValidDocumentCount(3); + List detectLanguageResultList = new ArrayList<>( + Arrays.asList(namedEntityResult1, namedEntityResult2, namedEntityResult3)); + + return new DocumentResultCollection<>(detectLanguageResultList, "2019-10-01", textBatchStatistics); } private TextAnalyticsRequestOptions setTextAnalyticsRequestOptions() { @@ -191,16 +266,6 @@ private TextAnalyticsRequestOptions setTextAnalyticsRequestOptions() { return new TextAnalyticsRequestOptions().setShowStatistics(true); } - static void detectLanguageRunner(Consumer> testRunner) { - final List detectLanguageInputs = Arrays.asList( - new DetectLanguageInput("0", "This is written in English", "US"), - new DetectLanguageInput("1", "Este es un document escrito en Español."), - new DetectLanguageInput("2", "~@!~:)", "US") - ); - - testRunner.accept(detectLanguageInputs); - } - String getEndPoint() { return interceptorManager.isPlaybackMode() ? "http://localhost:8080"