Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions sdk/search/azure-search-documents/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,21 @@
- Split searchindex.json and searchservice.json models and operations into separate namespaces #11508
- Renamed `edm` to `SearchFieldDataType` #11511
- Now Search Synonym Map creation/update returns a model #11514
- Renaming #11565
SearchIndexerDataSource -> SearchIndexerDataSourceConnection
SearchField.SynonymMaps -> SearchField.SynonymMapNames
SearchField.Analyzer -> SearchField.AnalyzerName
SearchField.IndexAnalyzer -> SearchField.IndexAnalyzerName
SearchField.SearchAnalyzer -> SearchField.SearchAnalyzerName
SearchableField.SynonymMaps -> SearchableField.SynonymMapNames
SearchableField.Analyzer -> SearchableField.AnalyzerName
SearchableField.IndexAnalyzer -> SearchableField.IndexAnalyzerName
SearchableField.SearchAnalyzer -> SearchableField.SearchAnalyzerName
Similarity -> SimilarityAlgorithm
Suggester -> SearchSuggester
PathHierarchyTokenizerV2 -> PathHierarchyTokenizer



## 1.0.0b3 (2020-05-04)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# ------------------------------------
from ._index import ( # pylint: disable=unused-import
ComplexField,
SearchField,
SearchableField,
SimpleField,
)
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
from ._generated import SearchServiceClient as _SearchServiceClient
from ._generated.models import SynonymMap as _SynonymMap
from ._utils import (
delistize_flags_for_index,
listize_flags_for_index,
listize_synonyms,
unpack_search_index,
pack_search_index,
unpack_synonyms,
pack_search_resource_encryption_key,
get_access_conditions,
normalize_endpoint,
Expand Down Expand Up @@ -83,7 +83,7 @@ def list_indexes(self, **kwargs):
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))

return self._client.indexes.list(cls=lambda objs: [listize_flags_for_index(x) for x in objs], **kwargs)
return self._client.indexes.list(cls=lambda objs: [unpack_search_index(x) for x in objs], **kwargs)

@distributed_trace
def get_index(self, index_name, **kwargs):
Expand All @@ -107,7 +107,7 @@ def get_index(self, index_name, **kwargs):
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = self._client.indexes.get(index_name, **kwargs)
return listize_flags_for_index(result)
return unpack_search_index(result)

@distributed_trace
def get_index_statistics(self, index_name, **kwargs):
Expand Down Expand Up @@ -181,9 +181,9 @@ def create_index(self, index, **kwargs):
:caption: Creating a new index.
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
patched_index = delistize_flags_for_index(index)
patched_index = pack_search_index(index)
result = self._client.indexes.create(patched_index, **kwargs)
return result
return unpack_search_index(result)

@distributed_trace
def create_or_update_index(
Expand Down Expand Up @@ -226,15 +226,15 @@ def create_or_update_index(
index, kwargs.pop("match_condition", MatchConditions.Unconditionally)
)
kwargs.update(access_condition)
patched_index = delistize_flags_for_index(index)
patched_index = pack_search_index(index)
result = self._client.indexes.create_or_update(
index_name=index_name,
index=patched_index,
allow_index_downtime=allow_index_downtime,
error_map=error_map,
**kwargs
)
return result
return unpack_search_index(result)

@distributed_trace
def analyze_text(self, index_name, analyze_request, **kwargs):
Expand Down Expand Up @@ -285,7 +285,7 @@ def get_synonym_maps(self, **kwargs):
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = self._client.synonym_maps.list(**kwargs)
return [listize_synonyms(x) for x in result.synonym_maps]
return [unpack_synonyms(x) for x in result.synonym_maps]

@distributed_trace
def get_synonym_map(self, name, **kwargs):
Expand All @@ -310,7 +310,7 @@ def get_synonym_map(self, name, **kwargs):
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = self._client.synonym_maps.get(name, **kwargs)
return listize_synonyms(result)
return unpack_synonyms(result)

@distributed_trace
def delete_synonym_map(self, synonym_map, **kwargs):
Expand Down Expand Up @@ -375,7 +375,7 @@ def create_synonym_map(self, name, synonyms, **kwargs):
solr_format_synonyms = "\n".join(synonyms)
synonym_map = _SynonymMap(name=name, synonyms=solr_format_synonyms)
result = self._client.synonym_maps.create(synonym_map, **kwargs)
return listize_synonyms(result)
return unpack_synonyms(result)

@distributed_trace
def create_or_update_synonym_map(self, synonym_map, synonyms=None, **kwargs):
Expand Down Expand Up @@ -413,7 +413,7 @@ def create_or_update_synonym_map(self, synonym_map, synonyms=None, **kwargs):
error_map=error_map,
**kwargs
)
return listize_synonyms(result)
return unpack_synonyms(result)

@distributed_trace
def get_service_statistics(self, **kwargs):
Expand Down
Loading