From 7b186254f11dbadec308d079c6fb3736df312c1e Mon Sep 17 00:00:00 2001 From: tg-msft Date: Mon, 8 Jun 2020 17:36:53 -0700 Subject: [PATCH 1/2] Add SearchModelFactory Fixes #10610 --- .../Azure.Search.Documents.netstandard2.0.cs | 34 ++ .../src/Models/SearchModelFactory.cs | 311 ++++++++++++++++++ .../src/Models/SearchResults{T}.cs | 44 ++- .../src/Models/SearchResult{T}.cs | 29 ++ .../src/Models/SearchSuggestion{T}.cs | 16 + .../src/Models/SuggestResults{T}.cs | 16 + 6 files changed, 448 insertions(+), 2 deletions(-) create mode 100644 sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs diff --git a/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs b/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs index 9e712d977c32..43a52c60bda2 100644 --- a/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs +++ b/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs @@ -1975,6 +1975,40 @@ public enum SearchMode Any = 0, All = 1, } + public static partial class SearchModelFactory + { + public static Azure.Search.Documents.Indexes.Models.AnalyzedTokenInfo AnalyzedTokenInfo(string token, int startOffset, int endOffset, int position) { throw null; } + public static Azure.Search.Documents.Models.AutocompleteResults AutocompleteResults(double? coverage, System.Collections.Generic.IReadOnlyList results) { throw null; } + public static Azure.Search.Documents.Models.Autocompletion Autocompletion(string text, string queryPlusText) { throw null; } + public static Azure.Search.Documents.Indexes.Models.CharFilter CharFilter(string oDataType, string name) { throw null; } + public static Azure.Search.Documents.Indexes.Models.CognitiveServicesAccount CognitiveServicesAccount(string oDataType, string description) { throw null; } + public static Azure.Search.Documents.Indexes.Models.DataChangeDetectionPolicy DataChangeDetectionPolicy(string oDataType) { throw null; } + public static Azure.Search.Documents.Indexes.Models.DataDeletionDetectionPolicy DataDeletionDetectionPolicy(string oDataType) { throw null; } + public static Azure.Search.Documents.Models.FacetResult FacetResult(long? count, System.Collections.Generic.IReadOnlyDictionary additionalProperties) { throw null; } + public static Azure.Search.Documents.Models.IndexDocumentsResult IndexDocumentsResult(System.Collections.Generic.IEnumerable results) { throw null; } + public static Azure.Search.Documents.Indexes.Models.IndexerExecutionResult IndexerExecutionResult(Azure.Search.Documents.Indexes.Models.IndexerExecutionStatus status, string errorMessage, System.DateTimeOffset? startTime, System.DateTimeOffset? endTime, System.Collections.Generic.IReadOnlyList errors, System.Collections.Generic.IReadOnlyList warnings, int itemCount, int failedItemCount, string initialTrackingState, string finalTrackingState) { throw null; } + public static Azure.Search.Documents.Models.IndexingResult IndexingResult(string key, string errorMessage, bool succeeded, int status) { throw null; } + public static Azure.Search.Documents.Indexes.Models.LexicalAnalyzer LexicalAnalyzer(string oDataType, string name) { throw null; } + public static Azure.Search.Documents.Indexes.Models.LexicalTokenizer LexicalTokenizer(string oDataType, string name) { throw null; } + public static Azure.Search.Documents.Indexes.Models.ScoringFunction ScoringFunction(string type, string fieldName, double boost, Azure.Search.Documents.Indexes.Models.ScoringFunctionInterpolation? interpolation) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexerError SearchIndexerError(string key, string errorMessage, int statusCode, string name, string details, string documentationLink) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexerLimits SearchIndexerLimits(System.TimeSpan? maxRunTime, long? maxDocumentExtractionSize, long? maxDocumentContentCharactersToExtract) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexerSkill SearchIndexerSkill(string oDataType, string name, string description, string context, System.Collections.Generic.IList inputs, System.Collections.Generic.IList outputs) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(Azure.Search.Documents.Indexes.Models.IndexerStatus status, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult, System.Collections.Generic.IReadOnlyList executionHistory, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexerWarning SearchIndexerWarning(string key, string message, string name, string details, string documentationLink) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexStatistics SearchIndexStatistics(long documentCount, long storageSize) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchResourceCounter SearchResourceCounter(long usage, long? quota) { throw null; } + public static Azure.Search.Documents.Models.SearchResultsPage SearchResultsPage(Azure.Search.Documents.Models.SearchResults results) { throw null; } + public static Azure.Search.Documents.Models.SearchResults SearchResults(System.Collections.Generic.IList> values, long? totalCount, System.Collections.Generic.IDictionary> facets, double? coverage, Azure.Response rawResponse) { throw null; } + public static Azure.Search.Documents.Models.SearchResult SearchResult(T document, double? score, System.Collections.Generic.IDictionary> highlights) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchServiceCounters SearchServiceCounters(Azure.Search.Documents.Indexes.Models.SearchResourceCounter documentCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter indexCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter indexerCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter dataSourceCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter storageSizeCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter synonymMapCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter skillsetCounter) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchServiceLimits SearchServiceLimits(int? maxFieldsPerIndex, int? maxFieldNestingDepthPerIndex, int? maxComplexCollectionFieldsPerIndex, int? maxComplexObjectsInCollectionsPerDocument) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchServiceStatistics SearchServiceStatistics(Azure.Search.Documents.Indexes.Models.SearchServiceCounters counters, Azure.Search.Documents.Indexes.Models.SearchServiceLimits limits) { throw null; } + public static Azure.Search.Documents.Models.SearchSuggestion SearchSuggestion(T document, string text) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SimilarityAlgorithm SimilarityAlgorithm(string oDataType) { throw null; } + public static Azure.Search.Documents.Models.SuggestResults SuggestResults(System.Collections.Generic.IList> results, double? coverage) { throw null; } + public static Azure.Search.Documents.Indexes.Models.TokenFilter TokenFilter(string oDataType, string name) { throw null; } + } public enum SearchQueryType { Simple = 0, diff --git a/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs b/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs new file mode 100644 index 000000000000..c197235dedfd --- /dev/null +++ b/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs @@ -0,0 +1,311 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using Azure.Search.Documents.Indexes.Models; + +namespace Azure.Search.Documents.Models +{ + /// + /// Helps mock the types in Azure.Search.Documents.Models. + /// + public static partial class SearchModelFactory + { + /// Initializes a new instance of AnalyzedTokenInfo. + /// The token returned by the analyzer. + /// The index of the first character of the token in the input text. + /// The index of the last character of the token in the input text. + /// The position of the token in the input text relative to other tokens. The first token in the input text has position 0, the next has position 1, and so on. Depending on the analyzer used, some tokens might have the same position, for example if they are synonyms of each other. + /// A new AnalyzedTokenInfo instance for mocking. + public static AnalyzedTokenInfo AnalyzedTokenInfo( + string token, + int startOffset, + int endOffset, + int position) => + new AnalyzedTokenInfo(token, startOffset, endOffset, position); + + /// Initializes a new instance of CharFilter. + /// Identifies the concrete type of the char filter. + /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. + /// A new CharFilter instance for mocking. + public static CharFilter CharFilter( + string oDataType, + string name) => + new CharFilter(oDataType, name); + + /// Initializes a new instance of CognitiveServicesAccount. + /// Identifies the concrete type of the cognitive service resource attached to a skillset. + /// Description of the cognitive service resource attached to a skillset. + /// A new CognitiveServicesAccount instance for mocking. + public static CognitiveServicesAccount CognitiveServicesAccount( + string oDataType, + string description) => + new CognitiveServicesAccount(oDataType, description); + + /// Initializes a new instance of DataChangeDetectionPolicy. + /// Identifies the concrete type of the data change detection policy. + /// A new DataChangeDetectionPolicy instance for mocking. + public static DataChangeDetectionPolicy DataChangeDetectionPolicy( + string oDataType) => + new DataChangeDetectionPolicy(oDataType); + + /// Initializes a new instance of DataDeletionDetectionPolicy. + /// Identifies the concrete type of the data deletion detection policy. + /// A new DataDeletionDetectionPolicy instance for mocking. + public static DataDeletionDetectionPolicy DataDeletionDetectionPolicy( + string oDataType) => + new DataDeletionDetectionPolicy(oDataType); + + /// Initializes a new instance of IndexerExecutionResult. + /// The outcome of this indexer execution. + /// The error message indicating the top-level error, if any. + /// The start time of this indexer execution. + /// The end time of this indexer execution, if the execution has already completed. + /// The item-level indexing errors. + /// The item-level indexing warnings. + /// The number of items that were processed during this indexer execution. This includes both successfully processed items and items where indexing was attempted but failed. + /// The number of items that failed to be indexed during this indexer execution. + /// Change tracking state with which an indexer execution started. + /// Change tracking state with which an indexer execution finished. + /// A new IndexerExecutionResult instance for mocking. + public static IndexerExecutionResult IndexerExecutionResult( + IndexerExecutionStatus status, + string errorMessage, + DateTimeOffset? startTime, + DateTimeOffset? endTime, + IReadOnlyList errors, + IReadOnlyList warnings, + int itemCount, + int failedItemCount, + string initialTrackingState, + string finalTrackingState) => + new IndexerExecutionResult(status, errorMessage, startTime, endTime, errors, warnings, itemCount, failedItemCount, initialTrackingState, finalTrackingState); + + /// Initializes a new instance of LexicalAnalyzer. + /// Identifies the concrete type of the analyzer. + /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. + /// A new LexicalAnalyzer instance for mocking. + public static LexicalAnalyzer LexicalAnalyzer( + string oDataType, + string name) => + new LexicalAnalyzer(oDataType, name); + + /// Initializes a new instance of LexicalTokenizer. + /// Identifies the concrete type of the tokenizer. + /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. + /// A new LexicalTokenizer instance for mocking. + public static LexicalTokenizer LexicalTokenizer( + string oDataType, + string name) => + new LexicalTokenizer(oDataType, name); + + /// Initializes a new instance of ScoringFunction. + /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. + /// The name of the field used as input to the scoring function. + /// A multiplier for the raw score. Must be a positive number not equal to 1.0. + /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". + /// A new ScoringFunction instance for mocking. + public static ScoringFunction ScoringFunction( + string type, + string fieldName, + double boost, + ScoringFunctionInterpolation? interpolation) => + new ScoringFunction(type, fieldName, boost, interpolation); + + /// Initializes a new instance of SearchIndexerError. + /// The key of the item for which indexing failed. + /// The message describing the error that occurred while processing the item. + /// The status code indicating why the indexing operation failed. Possible values include: 400 for a malformed input document, 404 for document not found, 409 for a version conflict, 422 when the index is temporarily unavailable, or 503 for when the service is too busy. + /// The name of the source at which the error originated. For example, this could refer to a particular skill in the attached skillset. This may not be always available. + /// Additional, verbose details about the error to assist in debugging the indexer. This may not be always available. + /// A link to a troubleshooting guide for these classes of errors. This may not be always available. + /// A new SearchIndexerError instance for mocking. + public static SearchIndexerError SearchIndexerError( + string key, + string errorMessage, + int statusCode, + string name, + string details, + string documentationLink) => + new SearchIndexerError(key, errorMessage, statusCode, name, details, documentationLink); + + /// Initializes a new instance of SearchIndexerLimits. + /// The maximum duration that the indexer is permitted to run for one execution. + /// The maximum size of a document, in bytes, which will be considered valid for indexing. + /// The maximum number of characters that will be extracted from a document picked up for indexing. + /// A new SearchIndexerLimits instance for mocking. + public static SearchIndexerLimits SearchIndexerLimits( + TimeSpan? maxRunTime, + long? maxDocumentExtractionSize, + long? maxDocumentContentCharactersToExtract) => + new SearchIndexerLimits(maxRunTime, maxDocumentExtractionSize, maxDocumentContentCharactersToExtract); + + /// Initializes a new instance of SearchIndexerSkill. + /// Identifies the concrete type of the skill. + /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. + /// The description of the skill which describes the inputs, outputs, and usage of the skill. + /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. + /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. + /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. + /// A new SearchIndexerSkill instance for mocking. + public static SearchIndexerSkill SearchIndexerSkill( + string oDataType, + string name, + string description, + string context, + IList inputs, + IList outputs) => + new SearchIndexerSkill(oDataType, name, description, context, inputs, outputs); + + /// Initializes a new instance of SearchIndexerStatus. + /// Overall indexer status. + /// The result of the most recent or an in-progress indexer execution. + /// History of the recent indexer executions, sorted in reverse chronological order. + /// The execution limits for the indexer. + /// A new SearchIndexerStatus instance for mocking. + public static SearchIndexerStatus SearchIndexerStatus( + IndexerStatus status, + IndexerExecutionResult lastResult, + IReadOnlyList executionHistory, + SearchIndexerLimits limits) => + new SearchIndexerStatus(status, lastResult, executionHistory, limits); + + /// Initializes a new instance of SearchIndexerWarning. + /// The key of the item which generated a warning. + /// The message describing the warning that occurred while processing the item. + /// The name of the source at which the warning originated. For example, this could refer to a particular skill in the attached skillset. This may not be always available. + /// Additional, verbose details about the warning to assist in debugging the indexer. This may not be always available. + /// A link to a troubleshooting guide for these classes of warnings. This may not be always available. + /// A new SearchIndexerWarning instance for mocking. + public static SearchIndexerWarning SearchIndexerWarning( + string key, + string message, + string name, + string details, + string documentationLink) => + new SearchIndexerWarning(key, message, name, details, documentationLink); + + /// Initializes a new instance of SearchIndexStatistics. + /// The number of documents in the index. + /// The amount of storage in bytes consumed by the index. + /// A new SearchIndexStatistics instance for mocking. + public static SearchIndexStatistics SearchIndexStatistics( + long documentCount, + long storageSize) => + new SearchIndexStatistics(documentCount, storageSize); + + /// Initializes a new instance of SearchResourceCounter. + /// The resource usage amount. + /// The resource amount quota. + /// A new SearchResourceCounter instance for mocking. + public static SearchResourceCounter SearchResourceCounter( + long usage, + long? quota) => + new SearchResourceCounter(usage, quota); + + /// Initializes a new instance of SearchServiceCounters. + /// Total number of documents across all indexes in the service. + /// Total number of indexes. + /// Total number of indexers. + /// Total number of data sources. + /// Total size of used storage in bytes. + /// Total number of synonym maps. + /// Total number of skillsets. + /// A new SearchServiceCounters instance for mocking. + public static SearchServiceCounters SearchServiceCounters( + SearchResourceCounter documentCounter, + SearchResourceCounter indexCounter, + SearchResourceCounter indexerCounter, + SearchResourceCounter dataSourceCounter, + SearchResourceCounter storageSizeCounter, + SearchResourceCounter synonymMapCounter, + SearchResourceCounter skillsetCounter) => + new SearchServiceCounters(documentCounter, indexCounter, indexerCounter, dataSourceCounter, storageSizeCounter, synonymMapCounter, skillsetCounter); + + /// Initializes a new instance of SearchServiceLimits. + /// The maximum allowed fields per index. + /// The maximum depth which you can nest sub-fields in an index, including the top-level complex field. For example, a/b/c has a nesting depth of 3. + /// The maximum number of fields of type Collection(Edm.ComplexType) allowed in an index. + /// The maximum number of objects in complex collections allowed per document. + /// A new SearchServiceLimits instance for mocking. + public static SearchServiceLimits SearchServiceLimits( + int? maxFieldsPerIndex, + int? maxFieldNestingDepthPerIndex, + int? maxComplexCollectionFieldsPerIndex, + int? maxComplexObjectsInCollectionsPerDocument) => + new SearchServiceLimits(maxFieldsPerIndex, maxFieldNestingDepthPerIndex, maxComplexCollectionFieldsPerIndex, maxComplexObjectsInCollectionsPerDocument); + + /// Initializes a new instance of SearchServiceStatistics. + /// Service level resource counters. + /// Service level general limits. + /// A new SearchServiceStatistics instance for mocking. + public static SearchServiceStatistics SearchServiceStatistics( + SearchServiceCounters counters, + SearchServiceLimits limits) => + new SearchServiceStatistics(counters, limits); + + /// Initializes a new instance of SimilarityAlgorithm. + /// . + /// A new SimilarityAlgorithm instance for mocking. + public static SimilarityAlgorithm SimilarityAlgorithm( + string oDataType) => + new SimilarityAlgorithm(oDataType); + + /// Initializes a new instance of TokenFilter. + /// Identifies the concrete type of the token filter. + /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. + /// A new TokenFilter instance for mocking. + public static TokenFilter TokenFilter( + string oDataType, + string name) => + new TokenFilter(oDataType, name); + + /// Initializes a new instance of AutocompleteResults. + /// A value indicating the percentage of the index that was considered by the autocomplete request, or null if minimumCoverage was not specified in the request. + /// The list of returned Autocompleted items. + /// A new AutocompleteResults instance for mocking. + public static AutocompleteResults AutocompleteResults( + double? coverage, + IReadOnlyList results) => + new AutocompleteResults(coverage, results); + + /// Initializes a new instance of Autocompletion. + /// The completed term. + /// The query along with the completed term. + /// A new Autocompletion instance for mocking. + public static Autocompletion Autocompletion( + string text, + string queryPlusText) => + new Autocompletion(text, queryPlusText); + + /// Initializes a new instance of FacetResult. + /// The approximate count of documents falling within the bucket described by this facet. + /// . + /// A new FacetResult instance for mocking. + public static FacetResult FacetResult( + long? count, + IReadOnlyDictionary additionalProperties) => + new FacetResult(count, additionalProperties); + + /// Initializes a new instance of IndexDocumentsResult. + /// The list of status information for each document in the indexing request. + /// A new IndexDocumentsResult instance for mocking. + public static IndexDocumentsResult IndexDocumentsResult( + IEnumerable results) => + new IndexDocumentsResult(results); + + /// Initializes a new instance of IndexingResult. + /// The key of a document that was in the indexing request. + /// The error message explaining why the indexing operation failed for the document identified by the key; null if indexing succeeded. + /// A value indicating whether the indexing operation succeeded for the document identified by the key. + /// The status code of the indexing operation. Possible values include: 200 for a successful update or delete, 201 for successful document creation, 400 for a malformed input document, 404 for document not found, 409 for a version conflict, 422 when the index is temporarily unavailable, or 503 for when the service is too busy. + /// A new IndexingResult instance for mocking. + public static IndexingResult IndexingResult( + string key, + string errorMessage, + bool succeeded, + int status) => + new IndexingResult(key, errorMessage, succeeded, status); + } +} diff --git a/sdk/search/Azure.Search.Documents/src/Models/SearchResults{T}.cs b/sdk/search/Azure.Search.Documents/src/Models/SearchResults{T}.cs index f0fe44768a17..87d9013d8397 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SearchResults{T}.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SearchResults{T}.cs @@ -56,7 +56,7 @@ public class SearchResults /// /// Gets the first (server side) page of search result values. /// - internal IList> Values { get; } = new List>(); + internal IList> Values { get; set; } = new List>(); /// /// Gets or sets the fully constructed URI for the next page of @@ -74,7 +74,7 @@ public class SearchResults /// Gets the raw Response that obtained these results from the service. /// This is only used when paging. /// - internal Response RawResponse { get; private set; } + internal Response RawResponse { get; set; } /// /// The SearchClient used to fetch the next page of results. This is @@ -469,4 +469,44 @@ private static SearchOptions ReadNextPageOptions(ref Utf8JsonReader reader) return SearchOptions.DeserializeSearchOptions(doc.RootElement); } } + + public static partial class SearchModelFactory + { + /// Initializes a new instance of SearchResults. + /// + /// The .NET type that maps to the index schema. Instances of this type can + /// be retrieved as documents from the index. + /// + /// The search result values. + /// The total count of results found by the search operation. + /// The facet query results for the search operation. + /// A value indicating the percentage of the index that was included in the query + /// The raw Response that obtained these results from the service. + /// A new SearchResults instance for mocking. + public static SearchResults SearchResults( + IList> values, + long? totalCount, + IDictionary> facets, + double? coverage, + Response rawResponse) => + new SearchResults() + { + TotalCount = totalCount, + Coverage = coverage, + Facets = facets, + Values = values, + RawResponse = rawResponse + }; + + /// Initializes a new instance of SearchResultsPage. + /// + /// The .NET type that maps to the index schema. Instances of this type can + /// be retrieved as documents from the index. + /// + /// The search results for this page. + /// A new SearchResultsPage instance for mocking. + public static SearchResultsPage SearchResultsPage( + SearchResults results) => + new SearchResultsPage(results); + } } diff --git a/sdk/search/Azure.Search.Documents/src/Models/SearchResult{T}.cs b/sdk/search/Azure.Search.Documents/src/Models/SearchResult{T}.cs index f92bed9aacd5..a24c0e0ca6db 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SearchResult{T}.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SearchResult{T}.cs @@ -163,4 +163,33 @@ private static void ReadHighlights(ref Utf8JsonReader reader, SearchResult re } } } + + public static partial class SearchModelFactory + { + /// Initializes a new instance of SearchResult. + /// + /// The .NET type that maps to the index schema. Instances of this type can + /// be retrieved as documents from the index. + /// + /// The document found by the search query. + /// + /// The relevance score of the document compared to other documents + /// returned by the query. + /// + /// + /// Text fragments from the document that indicate the matching search + /// terms, organized by each applicable field; null if hit highlighting + /// was not enabled for the query. + /// + /// A new SearchResult instance for mocking. + public static SearchResult SearchResult( + T document, + double? score, + IDictionary> highlights) => + new SearchResult() + { + Score = score, + Highlights = highlights, + Document = document }; + } } diff --git a/sdk/search/Azure.Search.Documents/src/Models/SearchSuggestion{T}.cs b/sdk/search/Azure.Search.Documents/src/Models/SearchSuggestion{T}.cs index e1c48c59791e..555b05e0caa1 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SearchSuggestion{T}.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SearchSuggestion{T}.cs @@ -108,4 +108,20 @@ public override SearchSuggestion Read( return suggestion; } } + + public static partial class SearchModelFactory + { + /// Initializes a new instance of SearchSuggestion. + /// + /// The .NET type that maps to the index schema. Instances of this type can + /// be retrieved as documents from the index. + /// + /// The document being suggested. + /// The text of the suggestion result. + /// A new SuggestResults instance for mocking. + public static SearchSuggestion SearchSuggestion( + T document, + string text) => + new SearchSuggestion() { Document = document, Text = text }; + } } diff --git a/sdk/search/Azure.Search.Documents/src/Models/SuggestResults{T}.cs b/sdk/search/Azure.Search.Documents/src/Models/SuggestResults{T}.cs index 6f52fc2bc662..941153fdbf84 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SuggestResults{T}.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SuggestResults{T}.cs @@ -133,4 +133,20 @@ public override SuggestResults Read( return suggestions; } } + + public static partial class SearchModelFactory + { + /// Initializes a new instance of SearchResult. + /// + /// The .NET type that maps to the index schema. Instances of this type can + /// be retrieved as documents from the index. + /// + /// + /// + /// A new SuggestResults instance for mocking. + public static SuggestResults SuggestResults( + IList> results, + double? coverage) => + new SuggestResults() { Coverage = coverage, Results = results }; + } } From a194ae9f7ac17eed68d0ff8a4b8b82f61c8205fe Mon Sep 17 00:00:00 2001 From: tg-msft Date: Tue, 9 Jun 2020 07:53:39 -0700 Subject: [PATCH 2/2] PR feedback --- .../Azure.Search.Documents/CHANGELOG.md | 5 + .../Azure.Search.Documents.netstandard2.0.cs | 16 +-- .../src/Models/SearchModelFactory.cs | 106 +----------------- .../src/Models/SearchResults{T}.cs | 13 ++- 4 files changed, 21 insertions(+), 119 deletions(-) diff --git a/sdk/search/Azure.Search.Documents/CHANGELOG.md b/sdk/search/Azure.Search.Documents/CHANGELOG.md index d38c738a1e35..d20d103a4705 100644 --- a/sdk/search/Azure.Search.Documents/CHANGELOG.md +++ b/sdk/search/Azure.Search.Documents/CHANGELOG.md @@ -2,6 +2,10 @@ ## 1.0.0-preview.4 (Unreleased) +### Added + +- Added `SearchModelFactory` to mock output model types. + ### Breaking Changes - Made collection- and dictionary-type properties read-only, i.e. has only get-accessors, based on [.NET Guidelines][net-guidelines-collection-properties]. @@ -12,6 +16,7 @@ - Renamed `AnalyzeTextOptions.Tokenizer` to `AnalyzeTextOptions.TokenizerName`. - Renamed `CustomAnalyzer.Tokenizer` to `CustomAnalyzer.TokenizerName`. - Renamed `SearchIndexerDataSource` to `SearchIndexerDataSourceConnection`. +- Renamed `Autocompletion` to `AutocompleteItem`. - Renamed methods on `SearchIndexerClient` matching "\*DataSource" to "\*DataSourceConnection". - Split `SearchServiceClient` into `SearchIndexClient` for managing indexes, and `SearchIndexerClient` for managing indexers, both of which are now in `Azure.Search.Documents.Indexes`. - `SearchClient.IndexDocuments` now throws an `AggregateException` wrapping all the `RequestFailedException`s in the batch. diff --git a/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs b/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs index 43a52c60bda2..d76aa984fdc8 100644 --- a/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs +++ b/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs @@ -1978,36 +1978,26 @@ public enum SearchMode public static partial class SearchModelFactory { public static Azure.Search.Documents.Indexes.Models.AnalyzedTokenInfo AnalyzedTokenInfo(string token, int startOffset, int endOffset, int position) { throw null; } - public static Azure.Search.Documents.Models.AutocompleteResults AutocompleteResults(double? coverage, System.Collections.Generic.IReadOnlyList results) { throw null; } - public static Azure.Search.Documents.Models.Autocompletion Autocompletion(string text, string queryPlusText) { throw null; } - public static Azure.Search.Documents.Indexes.Models.CharFilter CharFilter(string oDataType, string name) { throw null; } - public static Azure.Search.Documents.Indexes.Models.CognitiveServicesAccount CognitiveServicesAccount(string oDataType, string description) { throw null; } - public static Azure.Search.Documents.Indexes.Models.DataChangeDetectionPolicy DataChangeDetectionPolicy(string oDataType) { throw null; } - public static Azure.Search.Documents.Indexes.Models.DataDeletionDetectionPolicy DataDeletionDetectionPolicy(string oDataType) { throw null; } + public static Azure.Search.Documents.Models.AutocompleteResults AutocompleteResults(double? coverage, System.Collections.Generic.IReadOnlyList results) { throw null; } + public static Azure.Search.Documents.Models.AutocompleteItem Autocompletion(string text, string queryPlusText) { throw null; } public static Azure.Search.Documents.Models.FacetResult FacetResult(long? count, System.Collections.Generic.IReadOnlyDictionary additionalProperties) { throw null; } public static Azure.Search.Documents.Models.IndexDocumentsResult IndexDocumentsResult(System.Collections.Generic.IEnumerable results) { throw null; } public static Azure.Search.Documents.Indexes.Models.IndexerExecutionResult IndexerExecutionResult(Azure.Search.Documents.Indexes.Models.IndexerExecutionStatus status, string errorMessage, System.DateTimeOffset? startTime, System.DateTimeOffset? endTime, System.Collections.Generic.IReadOnlyList errors, System.Collections.Generic.IReadOnlyList warnings, int itemCount, int failedItemCount, string initialTrackingState, string finalTrackingState) { throw null; } public static Azure.Search.Documents.Models.IndexingResult IndexingResult(string key, string errorMessage, bool succeeded, int status) { throw null; } - public static Azure.Search.Documents.Indexes.Models.LexicalAnalyzer LexicalAnalyzer(string oDataType, string name) { throw null; } - public static Azure.Search.Documents.Indexes.Models.LexicalTokenizer LexicalTokenizer(string oDataType, string name) { throw null; } - public static Azure.Search.Documents.Indexes.Models.ScoringFunction ScoringFunction(string type, string fieldName, double boost, Azure.Search.Documents.Indexes.Models.ScoringFunctionInterpolation? interpolation) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerError SearchIndexerError(string key, string errorMessage, int statusCode, string name, string details, string documentationLink) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerLimits SearchIndexerLimits(System.TimeSpan? maxRunTime, long? maxDocumentExtractionSize, long? maxDocumentContentCharactersToExtract) { throw null; } - public static Azure.Search.Documents.Indexes.Models.SearchIndexerSkill SearchIndexerSkill(string oDataType, string name, string description, string context, System.Collections.Generic.IList inputs, System.Collections.Generic.IList outputs) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(Azure.Search.Documents.Indexes.Models.IndexerStatus status, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult, System.Collections.Generic.IReadOnlyList executionHistory, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerWarning SearchIndexerWarning(string key, string message, string name, string details, string documentationLink) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexStatistics SearchIndexStatistics(long documentCount, long storageSize) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchResourceCounter SearchResourceCounter(long usage, long? quota) { throw null; } public static Azure.Search.Documents.Models.SearchResultsPage SearchResultsPage(Azure.Search.Documents.Models.SearchResults results) { throw null; } - public static Azure.Search.Documents.Models.SearchResults SearchResults(System.Collections.Generic.IList> values, long? totalCount, System.Collections.Generic.IDictionary> facets, double? coverage, Azure.Response rawResponse) { throw null; } + public static Azure.Search.Documents.Models.SearchResults SearchResults(System.Collections.Generic.IEnumerable> values, long? totalCount, System.Collections.Generic.IDictionary> facets, double? coverage, Azure.Response rawResponse) { throw null; } public static Azure.Search.Documents.Models.SearchResult SearchResult(T document, double? score, System.Collections.Generic.IDictionary> highlights) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchServiceCounters SearchServiceCounters(Azure.Search.Documents.Indexes.Models.SearchResourceCounter documentCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter indexCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter indexerCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter dataSourceCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter storageSizeCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter synonymMapCounter, Azure.Search.Documents.Indexes.Models.SearchResourceCounter skillsetCounter) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchServiceLimits SearchServiceLimits(int? maxFieldsPerIndex, int? maxFieldNestingDepthPerIndex, int? maxComplexCollectionFieldsPerIndex, int? maxComplexObjectsInCollectionsPerDocument) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchServiceStatistics SearchServiceStatistics(Azure.Search.Documents.Indexes.Models.SearchServiceCounters counters, Azure.Search.Documents.Indexes.Models.SearchServiceLimits limits) { throw null; } public static Azure.Search.Documents.Models.SearchSuggestion SearchSuggestion(T document, string text) { throw null; } - public static Azure.Search.Documents.Indexes.Models.SimilarityAlgorithm SimilarityAlgorithm(string oDataType) { throw null; } public static Azure.Search.Documents.Models.SuggestResults SuggestResults(System.Collections.Generic.IList> results, double? coverage) { throw null; } - public static Azure.Search.Documents.Indexes.Models.TokenFilter TokenFilter(string oDataType, string name) { throw null; } } public enum SearchQueryType { diff --git a/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs b/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs index c197235dedfd..ee9fcfbee8a1 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs @@ -25,38 +25,6 @@ public static AnalyzedTokenInfo AnalyzedTokenInfo( int position) => new AnalyzedTokenInfo(token, startOffset, endOffset, position); - /// Initializes a new instance of CharFilter. - /// Identifies the concrete type of the char filter. - /// The name of the char filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A new CharFilter instance for mocking. - public static CharFilter CharFilter( - string oDataType, - string name) => - new CharFilter(oDataType, name); - - /// Initializes a new instance of CognitiveServicesAccount. - /// Identifies the concrete type of the cognitive service resource attached to a skillset. - /// Description of the cognitive service resource attached to a skillset. - /// A new CognitiveServicesAccount instance for mocking. - public static CognitiveServicesAccount CognitiveServicesAccount( - string oDataType, - string description) => - new CognitiveServicesAccount(oDataType, description); - - /// Initializes a new instance of DataChangeDetectionPolicy. - /// Identifies the concrete type of the data change detection policy. - /// A new DataChangeDetectionPolicy instance for mocking. - public static DataChangeDetectionPolicy DataChangeDetectionPolicy( - string oDataType) => - new DataChangeDetectionPolicy(oDataType); - - /// Initializes a new instance of DataDeletionDetectionPolicy. - /// Identifies the concrete type of the data deletion detection policy. - /// A new DataDeletionDetectionPolicy instance for mocking. - public static DataDeletionDetectionPolicy DataDeletionDetectionPolicy( - string oDataType) => - new DataDeletionDetectionPolicy(oDataType); - /// Initializes a new instance of IndexerExecutionResult. /// The outcome of this indexer execution. /// The error message indicating the top-level error, if any. @@ -82,37 +50,6 @@ public static IndexerExecutionResult IndexerExecutionResult( string finalTrackingState) => new IndexerExecutionResult(status, errorMessage, startTime, endTime, errors, warnings, itemCount, failedItemCount, initialTrackingState, finalTrackingState); - /// Initializes a new instance of LexicalAnalyzer. - /// Identifies the concrete type of the analyzer. - /// The name of the analyzer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A new LexicalAnalyzer instance for mocking. - public static LexicalAnalyzer LexicalAnalyzer( - string oDataType, - string name) => - new LexicalAnalyzer(oDataType, name); - - /// Initializes a new instance of LexicalTokenizer. - /// Identifies the concrete type of the tokenizer. - /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A new LexicalTokenizer instance for mocking. - public static LexicalTokenizer LexicalTokenizer( - string oDataType, - string name) => - new LexicalTokenizer(oDataType, name); - - /// Initializes a new instance of ScoringFunction. - /// Indicates the type of function to use. Valid values include magnitude, freshness, distance, and tag. The function type must be lower case. - /// The name of the field used as input to the scoring function. - /// A multiplier for the raw score. Must be a positive number not equal to 1.0. - /// A value indicating how boosting will be interpolated across document scores; defaults to "Linear". - /// A new ScoringFunction instance for mocking. - public static ScoringFunction ScoringFunction( - string type, - string fieldName, - double boost, - ScoringFunctionInterpolation? interpolation) => - new ScoringFunction(type, fieldName, boost, interpolation); - /// Initializes a new instance of SearchIndexerError. /// The key of the item for which indexing failed. /// The message describing the error that occurred while processing the item. @@ -141,23 +78,6 @@ public static SearchIndexerLimits SearchIndexerLimits( long? maxDocumentContentCharactersToExtract) => new SearchIndexerLimits(maxRunTime, maxDocumentExtractionSize, maxDocumentContentCharactersToExtract); - /// Initializes a new instance of SearchIndexerSkill. - /// Identifies the concrete type of the skill. - /// The name of the skill which uniquely identifies it within the skillset. A skill with no name defined will be given a default name of its 1-based index in the skills array, prefixed with the character '#'. - /// The description of the skill which describes the inputs, outputs, and usage of the skill. - /// Represents the level at which operations take place, such as the document root or document content (for example, /document or /document/content). The default is /document. - /// Inputs of the skills could be a column in the source data set, or the output of an upstream skill. - /// The output of a skill is either a field in a search index, or a value that can be consumed as an input by another skill. - /// A new SearchIndexerSkill instance for mocking. - public static SearchIndexerSkill SearchIndexerSkill( - string oDataType, - string name, - string description, - string context, - IList inputs, - IList outputs) => - new SearchIndexerSkill(oDataType, name, description, context, inputs, outputs); - /// Initializes a new instance of SearchIndexerStatus. /// Overall indexer status. /// The result of the most recent or an in-progress indexer execution. @@ -245,39 +165,23 @@ public static SearchServiceStatistics SearchServiceStatistics( SearchServiceLimits limits) => new SearchServiceStatistics(counters, limits); - /// Initializes a new instance of SimilarityAlgorithm. - /// . - /// A new SimilarityAlgorithm instance for mocking. - public static SimilarityAlgorithm SimilarityAlgorithm( - string oDataType) => - new SimilarityAlgorithm(oDataType); - - /// Initializes a new instance of TokenFilter. - /// Identifies the concrete type of the token filter. - /// The name of the token filter. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. - /// A new TokenFilter instance for mocking. - public static TokenFilter TokenFilter( - string oDataType, - string name) => - new TokenFilter(oDataType, name); - /// Initializes a new instance of AutocompleteResults. /// A value indicating the percentage of the index that was considered by the autocomplete request, or null if minimumCoverage was not specified in the request. /// The list of returned Autocompleted items. /// A new AutocompleteResults instance for mocking. public static AutocompleteResults AutocompleteResults( double? coverage, - IReadOnlyList results) => + IReadOnlyList results) => new AutocompleteResults(coverage, results); - /// Initializes a new instance of Autocompletion. + /// Initializes a new instance of AutocompleteItem. /// The completed term. /// The query along with the completed term. - /// A new Autocompletion instance for mocking. - public static Autocompletion Autocompletion( + /// A new AutocompleteItem instance for mocking. + public static AutocompleteItem Autocompletion( string text, string queryPlusText) => - new Autocompletion(text, queryPlusText); + new AutocompleteItem(text, queryPlusText); /// Initializes a new instance of FacetResult. /// The approximate count of documents falling within the bucket described by this facet. diff --git a/sdk/search/Azure.Search.Documents/src/Models/SearchResults{T}.cs b/sdk/search/Azure.Search.Documents/src/Models/SearchResults{T}.cs index 87d9013d8397..8e2112d2a537 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SearchResults{T}.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SearchResults{T}.cs @@ -56,7 +56,7 @@ public class SearchResults /// /// Gets the first (server side) page of search result values. /// - internal IList> Values { get; set; } = new List>(); + internal List> Values { get; } = new List>(); /// /// Gets or sets the fully constructed URI for the next page of @@ -484,19 +484,22 @@ public static partial class SearchModelFactory /// The raw Response that obtained these results from the service. /// A new SearchResults instance for mocking. public static SearchResults SearchResults( - IList> values, + IEnumerable> values, long? totalCount, IDictionary> facets, double? coverage, - Response rawResponse) => - new SearchResults() + Response rawResponse) + { + var results = new SearchResults() { TotalCount = totalCount, Coverage = coverage, Facets = facets, - Values = values, RawResponse = rawResponse }; + results.Values.AddRange(values); + return results; + } /// Initializes a new instance of SearchResultsPage. ///