diff --git a/sdk/search/azure-search-documents/CHANGELOG.md b/sdk/search/azure-search-documents/CHANGELOG.md index 5e2252734156..317ffb4b0d05 100644 --- a/sdk/search/azure-search-documents/CHANGELOG.md +++ b/sdk/search/azure-search-documents/CHANGELOG.md @@ -24,6 +24,7 @@ PathHierarchyTokenizerV2 -> PathHierarchyTokenizer - Renamed DataSource methods to DataSourceConnection #11693 - Autocomplete & suggest methods now takes arguments search_text & suggester_name rather than query objects #11747 +- Create_or_updates methods does not support partial updates ## 1.0.0b3 (2020-05-04) diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_index.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_index.py index 7f128cdfd2ca..7d555ab22d21 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_index.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_index.py @@ -32,13 +32,13 @@ class SearchField(msrest.serialization.Model): type Edm.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is false for simple fields and null for complex fields. :type key: bool - :param is_hidden: A value indicating whether the field can be returned in a search result. + :param hidden: A value indicating whether the field can be returned in a search result. You can enable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be False for key fields, and it must be null for complex fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is False for simple fields and null for complex fields. - :type is_hidden: bool + :type hidden: bool :param searchable: A value indicating whether the field is full-text searchable. This means it will undergo analysis such as word-breaking during indexing. If you set a searchable field to a value like "sunny day", internally it will be split into the individual tokens "sunny" and @@ -161,7 +161,7 @@ class SearchField(msrest.serialization.Model): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'key': {'key': 'key', 'type': 'bool'}, - 'is_hidden': {'key': 'isHidden', 'type': 'bool'}, + 'hidden': {'key': 'hidden', 'type': 'bool'}, 'searchable': {'key': 'searchable', 'type': 'bool'}, 'filterable': {'key': 'filterable', 'type': 'bool'}, 'sortable': {'key': 'sortable', 'type': 'bool'}, @@ -181,7 +181,7 @@ def __init__( self.name = kwargs['name'] self.type = kwargs['type'] self.key = kwargs.get('key', None) - self.is_hidden = kwargs.get('is_hidden', None) + self.hidden = kwargs.get('hidden', None) self.searchable = kwargs.get('searchable', None) self.filterable = kwargs.get('filterable', None) self.sortable = kwargs.get('sortable', None) @@ -210,13 +210,13 @@ def SimpleField(**kw): type SearchFieldDataType.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is False :type key: bool - :param is_hidden: A value indicating whether the field can be returned in a search result. + :param hidden: A value indicating whether the field can be returned in a search result. You can enable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be False for key fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is False. - :type is_hidden: bool + :type hidden: bool :param filterable: A value indicating whether to enable the field to be referenced in $filter queries. filterable differs from searchable in how strings are handled. Fields of type SearchFieldDataType.String or Collection(SearchFieldDataType.String) that are filterable do @@ -246,7 +246,7 @@ def SimpleField(**kw): result["filterable"] = kw.get("filterable", False) result["facetable"] = kw.get("facetable", False) result["sortable"] = kw.get("sortable", False) - result["is_hidden"] = kw.get("is_hidden", False) + result["hidden"] = kw.get("hidden", False) return SearchField(**result) @@ -264,13 +264,13 @@ def SearchableField(**kw): type SearchFieldDataType.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is False :type key: bool - :param is_hidden: A value indicating whether the field can be returned in a search result. + :param hidden: A value indicating whether the field can be returned in a search result. You can enable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be False for key fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is False. - :type is_hidden: bool + :type hidden: bool :param searchable: A value indicating whether the field is full-text searchable. This means it will undergo analysis such as word-breaking during indexing. If you set a searchable field to a value like "sunny day", internally it will be split into the individual tokens "sunny" and @@ -375,7 +375,7 @@ def SearchableField(**kw): result["filterable"] = kw.get("filterable", False) result["facetable"] = kw.get("facetable", False) result["sortable"] = kw.get("sortable", False) - result["is_hidden"] = kw.get("is_hidden", False) + result["hidden"] = kw.get("hidden", False) if "analyzer_name" in kw: result["analyzer_name"] = kw["analyzer_name"] if "search_analyzer_name" in kw: diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_search_index_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_search_index_client.py index 9a5bd5e783af..176c5778096e 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_search_index_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_search_index_client.py @@ -10,12 +10,11 @@ from azure.core.paging import ItemPaged from ._generated import SearchServiceClient as _SearchServiceClient -from ._generated.models import SynonymMap as _SynonymMap from ._utils import ( unpack_search_index, pack_search_index, - unpack_synonyms, - pack_search_resource_encryption_key, + unpack_synonym_map, + pack_synonym_map, get_access_conditions, normalize_endpoint, ) @@ -55,7 +54,7 @@ def __exit__(self, *args): def close(self): # type: () -> None - """Close the :class:`~azure.search.documents.SearchIndexClient` session. + """Close the :class:`~azure.search.documents.indexes.SearchIndexClient` session. """ return self._client.close() @@ -77,7 +76,7 @@ def list_indexes(self, **kwargs): """List the indexes in an Azure Search service. :return: List of indexes - :rtype: list[~azure.search.documents.SearchIndex] + :rtype: list[~azure.search.documents.indexes.models.SearchIndex] :raises: ~azure.core.exceptions.HttpResponseError """ @@ -86,14 +85,14 @@ def list_indexes(self, **kwargs): return self._client.indexes.list(cls=lambda objs: [unpack_search_index(x) for x in objs], **kwargs) @distributed_trace - def get_index(self, index_name, **kwargs): + def get_index(self, name, **kwargs): # type: (str, **Any) -> SearchIndex """ - :param index_name: The name of the index to retrieve. - :type index_name: str + :param name: The name of the index to retrieve. + :type name: str :return: SearchIndex object - :rtype: ~azure.search.documents.SearchIndex + :rtype: ~azure.search.documents.indexes.models.SearchIndex :raises: ~azure.core.exceptions.HttpResponseError .. admonition:: Example: @@ -106,7 +105,7 @@ def get_index(self, index_name, **kwargs): :caption: Get an index. """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) - result = self._client.indexes.get(index_name, **kwargs) + result = self._client.indexes.get(name, **kwargs) return unpack_search_index(result) @distributed_trace @@ -118,7 +117,7 @@ def get_index_statistics(self, index_name, **kwargs): :param index_name: The name of the index to retrieve. :type index_name: str :return: Statistics for the given index, including a document count and storage usage. - :rtype: ~azure.search.documents.GetIndexStatisticsResult + :rtype: dict :raises: ~azure.core.exceptions.HttpResponseError """ @@ -133,7 +132,7 @@ def delete_index(self, index, **kwargs): provided instead of the name to use the access conditions. :param index: The index to retrieve. - :type index: str or ~search.models.SearchIndex + :type index: str or ~azure.search.documents.indexes.models.SearchIndex :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :raises: ~azure.core.exceptions.HttpResponseError @@ -166,9 +165,9 @@ def create_index(self, index, **kwargs): """Creates a new search index. :param index: The index object. - :type index: ~azure.search.documents.SearchIndex + :type index: ~azure.search.documents.indexes.models.SearchIndex :return: The index created - :rtype: ~azure.search.documents.SearchIndex + :rtype: ~azure.search.documents.indexes.models.SearchIndex :raises: ~azure.core.exceptions.HttpResponseError .. admonition:: Example: @@ -187,15 +186,13 @@ def create_index(self, index, **kwargs): @distributed_trace def create_or_update_index( - self, index_name, index, allow_index_downtime=None, **kwargs + self, index, allow_index_downtime=None, **kwargs ): - # type: (str, SearchIndex, bool, **Any) -> SearchIndex + # type: (SearchIndex, bool, **Any) -> SearchIndex """Creates a new search index or updates an index if it already exists. - :param index_name: The name of the index. - :type index_name: str :param index: The index object. - :type index: ~azure.search.documents.SearchIndex + :type index: ~azure.search.documents.indexes.models.SearchIndex :param allow_index_downtime: Allows new analyzers, tokenizers, token filters, or char filters to be added to an index by taking the index offline for at least a few seconds. This temporarily causes indexing and query requests to fail. Performance and write availability of @@ -205,7 +202,7 @@ def create_or_update_index( :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: The index created or updated - :rtype: :class:`~azure.search.documents.SearchIndex` + :rtype: :class:`~azure.search.documents.indexes.models.SearchIndex` :raises: :class:`~azure.core.exceptions.ResourceNotFoundError`, \ :class:`~azure.core.exceptions.ResourceModifiedError`, \ :class:`~azure.core.exceptions.ResourceNotModifiedError`, \ @@ -228,7 +225,7 @@ def create_or_update_index( kwargs.update(access_condition) patched_index = pack_search_index(index) result = self._client.indexes.create_or_update( - index_name=index_name, + index_name=index.name, index=patched_index, allow_index_downtime=allow_index_downtime, error_map=error_map, @@ -246,7 +243,7 @@ def analyze_text(self, index_name, analyze_request, **kwargs): :param analyze_request: The text and analyzer or analysis components to test. :type analyze_request: ~azure.search.documents.AnalyzeRequest :return: AnalyzeResult - :rtype: ~azure.search.documents.AnalyzeResult + :rtype: ~azure.search.documents.indexes.models.AnalyzeResult :raises: ~azure.core.exceptions.HttpResponseError .. admonition:: Example: @@ -285,7 +282,7 @@ def get_synonym_maps(self, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) result = self._client.synonym_maps.list(**kwargs) - return [unpack_synonyms(x) for x in result.synonym_maps] + return [unpack_synonym_map(x) for x in result.synonym_maps] @distributed_trace def get_synonym_map_names(self, **kwargs): @@ -324,7 +321,7 @@ def get_synonym_map(self, name, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) result = self._client.synonym_maps.get(name, **kwargs) - return unpack_synonyms(result) + return unpack_synonym_map(result) @distributed_trace def delete_synonym_map(self, synonym_map, **kwargs): @@ -334,7 +331,7 @@ def delete_synonym_map(self, synonym_map, **kwargs): the name of the synonym map to delete unconditionally. :param name: The Synonym Map to delete - :type name: str or ~search.models.SynonymMap + :type name: str or ~azure.search.documents.indexes.models.SynonymMap :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: None @@ -364,14 +361,12 @@ def delete_synonym_map(self, synonym_map, **kwargs): ) @distributed_trace - def create_synonym_map(self, name, synonyms, **kwargs): - # type: (str, Sequence[str], **Any) -> SynonymMap + def create_synonym_map(self, synonym_map, **kwargs): + # type: (SynonymMap, **Any) -> SynonymMap """Create a new Synonym Map in an Azure Search service - :param name: The name of the Synonym Map to create - :type name: str - :param synonyms: The list of synonyms in SOLR format - :type synonyms: List[str] + :param synonym_map: The Synonym Map object + :type synonym_map: ~azure.search.documents.indexes.models.SynonymMap :return: The created Synonym Map :rtype: ~azure.search.documents.indexes.models.SynonymMap @@ -386,21 +381,18 @@ def create_synonym_map(self, name, synonyms, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) - solr_format_synonyms = "\n".join(synonyms) - synonym_map = _SynonymMap(name=name, synonyms=solr_format_synonyms) - result = self._client.synonym_maps.create(synonym_map, **kwargs) - return unpack_synonyms(result) + patched_synonym_map = pack_synonym_map(synonym_map) + result = self._client.synonym_maps.create(patched_synonym_map, **kwargs) + return unpack_synonym_map(result) @distributed_trace - def create_or_update_synonym_map(self, synonym_map, synonyms=None, **kwargs): - # type: (Union[str, SynonymMap], Optional[Sequence[str]], **Any) -> SynonymMap + def create_or_update_synonym_map(self, synonym_map, **kwargs): + # type: (SynonymMap, **Any) -> SynonymMap """Create a new Synonym Map in an Azure Search service, or update an existing one. - :param synonym_map: The name of the Synonym Map to create or update - :type synonym_map: str or ~azure.search.documents.SynonymMap - :param synonyms: A list of synonyms in SOLR format - :type synonyms: List[str] + :param synonym_map: The Synonym Map object + :type synonym_map: ~azure.search.documents.indexes.models.SynonymMap :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: The created or updated Synonym Map @@ -412,22 +404,14 @@ def create_or_update_synonym_map(self, synonym_map, synonyms=None, **kwargs): synonym_map, kwargs.pop("match_condition", MatchConditions.Unconditionally) ) kwargs.update(access_condition) - try: - name = synonym_map.name - if synonyms: - synonym_map.synonyms = "\n".join(synonyms) - synonym_map.encryption_key = pack_search_resource_encryption_key(synonym_map.encryption_key) - except AttributeError: - name = synonym_map - solr_format_synonyms = "\n".join(synonyms) - synonym_map = _SynonymMap(name=name, synonyms=solr_format_synonyms) + patched_synonym_map = pack_synonym_map(synonym_map) result = self._client.synonym_maps.create_or_update( - synonym_map_name=name, - synonym_map=synonym_map, + synonym_map_name=synonym_map.name, + synonym_map=patched_synonym_map, error_map=error_map, **kwargs ) - return unpack_synonyms(result) + return unpack_synonym_map(result) @distributed_trace def get_service_statistics(self, **kwargs): diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_search_indexer_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_search_indexer_client.py index 0e5c69c55521..41c73f023fa7 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_search_indexer_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_search_indexer_client.py @@ -7,7 +7,6 @@ from azure.core import MatchConditions from azure.core.tracing.decorator import distributed_trace -from azure.core.exceptions import ClientAuthenticationError, ResourceNotFoundError from ._generated import SearchServiceClient as _SearchServiceClient from ._generated.models import SearchIndexerSkillset @@ -54,7 +53,7 @@ def __exit__(self, *args): def close(self): # type: () -> None - """Close the :class:`~azure.search.documents.SearchIndexerClient` session. + """Close the :class:`~azure.search.documents.indexes.SearchIndexerClient` session. """ return self._client.close() @@ -65,9 +64,9 @@ def create_indexer(self, indexer, **kwargs): """Creates a new SearchIndexer. :param indexer: The definition of the indexer to create. - :type indexer: ~~azure.search.documents.SearchIndexer + :type indexer: ~azure.search.documents.indexes.models.SearchIndexer :return: The created SearchIndexer - :rtype: ~azure.search.documents.SearchIndexer + :rtype: ~azure.search.documents.indexes.models.SearchIndexer .. admonition:: Example: @@ -83,24 +82,21 @@ def create_indexer(self, indexer, **kwargs): return result @distributed_trace - def create_or_update_indexer(self, indexer, name=None, **kwargs): - # type: (SearchIndexer, Optional[str], **Any) -> SearchIndexer + def create_or_update_indexer(self, indexer, **kwargs): + # type: (SearchIndexer, **Any) -> SearchIndexer """Creates a new indexer or updates a indexer if it already exists. - :param name: The name of the indexer to create or update. - :type name: str :param indexer: The definition of the indexer to create or update. - :type indexer: ~azure.search.documents.SearchIndexer + :type indexer: ~azure.search.documents.indexes.models.SearchIndexer :return: The created IndexSearchIndexerer - :rtype: ~azure.search.documents.SearchIndexer + :rtype: ~azure.search.documents.indexes.models.SearchIndexer """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) error_map, access_condition = get_access_conditions( indexer, kwargs.pop("match_condition", MatchConditions.Unconditionally) ) kwargs.update(access_condition) - if not name: - name = indexer.name + name = indexer.name result = self._client.indexers.create_or_update( indexer_name=name, indexer=indexer, error_map=error_map, **kwargs ) @@ -179,7 +175,7 @@ def delete_indexer(self, indexer, **kwargs): the name of the indexer to delete unconditionally. :param indexer: The indexer to delete. - :type indexer: str or ~azure.search.documents.SearchIndexer + :type indexer: str or ~azure.search.documents.indexes.models.SearchIndexer :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions @@ -261,7 +257,7 @@ def get_indexer_status(self, name, **kwargs): :type name: str :return: SearchIndexerStatus - :rtype: SearchIndexerStatus + :rtype: ~azure.search.documents.indexes.models.SearchIndexerStatus .. admonition:: Example: @@ -281,9 +277,9 @@ def create_data_source_connection(self, data_source_connection, **kwargs): """Creates a new data source connection. :param data_source_connection: The definition of the data source connection to create. - :type data_source_connection: ~search.models.SearchIndexerDataSourceConnection + :type data_source_connection: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection :return: The created SearchIndexerDataSourceConnection - :rtype: ~search.models.SearchIndexerDataSourceConnection + :rtype: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection .. admonition:: Example: @@ -300,25 +296,22 @@ def create_data_source_connection(self, data_source_connection, **kwargs): return unpack_search_indexer_data_source(result) @distributed_trace - def create_or_update_data_source_connection(self, data_source_connection, name=None, **kwargs): - # type: (SearchIndexerDataSourceConnection, Optional[str], **Any) -> SearchIndexerDataSourceConnection + def create_or_update_data_source_connection(self, data_source_connection, **kwargs): + # type: (SearchIndexerDataSourceConnection, **Any) -> SearchIndexerDataSourceConnection """Creates a new data source connection or updates a data source connection if it already exists. - :param name: The name of the data source connection to create or update. - :type name: str :param data_source_connection: The definition of the data source connection to create or update. - :type data_source_connection: ~search.models.SearchIndexerDataSourceConnection + :type data_source_connection: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: The created SearchIndexerDataSourceConnection - :rtype: ~search.models.SearchIndexerDataSourceConnection + :rtype: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) error_map, access_condition = get_access_conditions( data_source_connection, kwargs.pop("match_condition", MatchConditions.Unconditionally) ) kwargs.update(access_condition) - if not name: - name = data_source_connection.name + name = data_source_connection.name packed_data_source = pack_search_indexer_data_source(data_source_connection) result = self._client.data_sources.create_or_update( data_source_name=name, @@ -336,7 +329,7 @@ def get_data_source_connection(self, name, **kwargs): :param name: The name of the data source connection to retrieve. :type name: str :return: The SearchIndexerDataSourceConnection that is fetched. - :rtype: ~search.models.SearchIndexerDataSourceConnection + :rtype: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection .. admonition:: Example: @@ -357,7 +350,7 @@ def get_data_source_connections(self, **kwargs): """Lists all data source connections available for a search service. :return: List of all the data source connections. - :rtype: `list[~search.models.SearchIndexerDataSourceConnection]` + :rtype: `list[~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection]` .. admonition:: Example: @@ -393,7 +386,7 @@ def delete_data_source_connection(self, data_source_connection, **kwargs): to delete unconditionally :param data_source_connection: The data source connection to delete. - :type data_source_connection: str or ~search.models.SearchIndexerDataSourceConnection + :type data_source_connection: str or ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: None @@ -490,7 +483,7 @@ def delete_skillset(self, skillset, **kwargs): the name of the skillset to delete unconditionally :param name: The SearchIndexerSkillset to delete - :type name: str or ~search.models.SearchIndexerSkillset + :type name: str or ~azure.search.documents.indexes.models.SearchIndexerSkillset :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions @@ -516,16 +509,12 @@ def delete_skillset(self, skillset, **kwargs): self._client.skillsets.delete(name, error_map=error_map, **kwargs) @distributed_trace - def create_skillset(self, name, skills, description, **kwargs): - # type: (str, Sequence[SearchIndexerSkill], str, **Any) -> SearchIndexerSkillset + def create_skillset(self, skillset, **kwargs): + # type: (SearchIndexerSkillset, **Any) -> SearchIndexerSkillset """Create a new SearchIndexerSkillset in an Azure Search service - :param name: The name of the SearchIndexerSkillset to create - :type name: str - :param skills: A list of Skill objects to include in the SearchIndexerSkillset - :type skills: List[SearchIndexerSkill]] - :param description: A description for the SearchIndexerSkillset - :type description: Optional[str] + :param skillset: The SearchIndexerSkillset object to create + :type skillset: ~azure.search.documents.indexes.models.SearchIndexerSkillset :return: The created SearchIndexerSkillset :rtype: ~azure.search.documents.indexes.models.SearchIndexerSkillset @@ -541,59 +530,28 @@ def create_skillset(self, name, skills, description, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) - skillset = SearchIndexerSkillset( - name=name, skills=list(skills), description=description - ) - return self._client.skillsets.create(skillset, **kwargs) @distributed_trace - def create_or_update_skillset(self, name, **kwargs): - # type: (str, **Any) -> SearchIndexerSkillset + def create_or_update_skillset(self, skillset, **kwargs): + # type: (SearchIndexerSkillset, **Any) -> SearchIndexerSkillset """Create a new SearchIndexerSkillset in an Azure Search service, or update an - existing one. The skillset param must be provided to perform the - operation with access conditions. + existing one. - :param name: The name of the SearchIndexerSkillset to create or update - :type name: str - :keyword skills: A list of Skill objects to include in the SearchIndexerSkillset - :type skills: List[SearchIndexerSkill] - :keyword description: A description for the SearchIndexerSkillset - :type description: Optional[str] - :keyword skillset: A SearchIndexerSkillset to create or update. - :type skillset: :class:`~azure.search.documents.SearchIndexerSkillset` + :param skillset: The SearchIndexerSkillset object to create or update + :type skillset: ~azure.search.documents.indexes.models.SearchIndexerSkillset :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: The created or updated SearchIndexerSkillset :rtype: ~azure.search.documents.indexes.models.SearchIndexerSkillset - If a `skillset` is passed in, any optional `skills`, or - `description` parameter values will override it. - """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError} - access_condition = None - - if "skillset" in kwargs: - skillset = kwargs.pop("skillset") - error_map, access_condition = get_access_conditions( - skillset, kwargs.pop("match_condition", MatchConditions.Unconditionally) - ) - kwargs.update(access_condition) - skillset = SearchIndexerSkillset.deserialize(skillset.serialize()) - skillset.name = name - for param in ("description", "skills"): - if param in kwargs: - setattr(skillset, param, kwargs.pop(param)) - else: - - skillset = SearchIndexerSkillset( - name=name, - description=kwargs.pop("description", None), - skills=kwargs.pop("skills", None), - ) + error_map, access_condition = get_access_conditions( + skillset, kwargs.pop("match_condition", MatchConditions.Unconditionally) + ) + kwargs.update(access_condition) return self._client.skillsets.create_or_update( - skillset_name=name, skillset=skillset, error_map=error_map, **kwargs + skillset_name=skillset.name, skillset=skillset, error_map=error_map, **kwargs ) diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_utils.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_utils.py index e29bcbcc9eae..8e7021b8c915 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_utils.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/_utils.py @@ -217,7 +217,17 @@ def unpack_search_index(search_index): ) -def unpack_synonyms(synonym_map): +def pack_synonym_map(synonym_map): + # type: (SynonymMap) -> _SynonymMap + return _SynonymMap( + name=synonym_map.name, + synonyms="\n".join(synonym_map.synonyms), + encryption_key=pack_search_resource_encryption_key(synonym_map.encryption_key), + e_tag=synonym_map.e_tag + ) + + +def unpack_synonym_map(synonym_map): # type: (_SynonymMap) -> SynonymMap return SynonymMap( name=synonym_map.name, @@ -347,7 +357,7 @@ def pack_search_field(search_field): name = search_field.get("name") field_type = search_field.get("type") key = search_field.get("key") - is_hidden = search_field.get("is_hidden") + hidden = search_field.get("hidden") searchable = search_field.get("searchable") filterable = search_field.get("filterable") sortable = search_field.get("sortable") @@ -362,7 +372,7 @@ def pack_search_field(search_field): name=name, type=field_type, key=key, - retrievable=not is_hidden, + retrievable=not hidden, searchable=searchable, filterable=filterable, sortable=sortable, @@ -379,7 +389,7 @@ def pack_search_field(search_field): name=search_field.name, type=search_field.type, key=search_field.key, - retrievable=not search_field.is_hidden, + retrievable=not search_field.hidden, searchable=search_field.searchable, filterable=search_field.filterable, sortable=search_field.sortable, @@ -402,7 +412,7 @@ def unpack_search_field(search_field): name=search_field.name, type=search_field.type, key=search_field.key, - is_hidden=search_field.retrievable, + hidden=not search_field.retrievable, searchable=search_field.searchable, filterable=search_field.filterable, sortable=search_field.sortable, diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_index_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_index_client.py index 0f5537d8ffe4..6ea84468765d 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_index_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_index_client.py @@ -10,13 +10,12 @@ from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.async_paging import AsyncItemPaged from .._generated.aio import SearchServiceClient as _SearchServiceClient -from .._generated.models import SynonymMap from ....aio import SearchClient from .._utils import ( pack_search_index, unpack_search_index, - unpack_synonyms, - pack_search_resource_encryption_key, + unpack_synonym_map, + pack_synonym_map, get_access_conditions, normalize_endpoint, ) @@ -60,7 +59,7 @@ async def __aexit__(self, *args): async def close(self): # type: () -> None - """Close the :class:`~azure.search.documents.aio.SearchIndexClient` session. + """Close the :class:`~azure.search.documents.indexes.aio.SearchIndexClient` session. """ return await self._client.close() @@ -81,7 +80,7 @@ def list_indexes(self, **kwargs): """List the indexes in an Azure Search service. :return: List of indexes - :rtype: list[~azure.search.documents.SearchIndex] + :rtype: list[:class:`~azure.search.documents.indexes.models.SearchIndex`] :raises: ~azure.core.exceptions.HttpResponseError """ @@ -90,14 +89,14 @@ def list_indexes(self, **kwargs): return self._client.indexes.list(cls=lambda objs: [unpack_search_index(x) for x in objs], **kwargs) @distributed_trace_async - async def get_index(self, index_name, **kwargs): + async def get_index(self, name, **kwargs): # type: (str, **Any) -> SearchIndex """ - :param index_name: The name of the index to retrieve. - :type index_name: str + :param name: The name of the index to retrieve. + :type name: str :return: SearchIndex object - :rtype: ~azure.search.documents.SearchIndex + :rtype: :class:`~azure.search.documents.indexes.models.SearchIndex` :raises: ~azure.core.exceptions.HttpResponseError .. admonition:: Example: @@ -110,7 +109,7 @@ async def get_index(self, index_name, **kwargs): :caption: Get an index. """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) - result = await self._client.indexes.get(index_name, **kwargs) + result = await self._client.indexes.get(name, **kwargs) return unpack_search_index(result) @distributed_trace_async @@ -122,7 +121,7 @@ async def get_index_statistics(self, index_name, **kwargs): :param index_name: The name of the index to retrieve. :type index_name: str :return: Statistics for the given index, including a document count and storage usage. - :rtype: ~azure.search.documents.GetIndexStatisticsResult + :rtype: dict :raises: ~azure.core.exceptions.HttpResponseError """ @@ -137,7 +136,7 @@ async def delete_index(self, index, **kwargs): provided instead of the name to use the access conditions :param index: The index to retrieve. - :type index: str or ~search.models.SearchIndex + :type index: str or :class:`~azure.search.documents.indexes.models.SearchIndex` :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :raises: ~azure.core.exceptions.HttpResponseError @@ -170,9 +169,9 @@ async def create_index(self, index, **kwargs): """Creates a new search index. :param index: The index object. - :type index: ~azure.search.documents.SearchIndex + :type index: :class:`~azure.search.documents.indexes.models.SearchIndex` :return: The index created - :rtype: ~azure.search.documents.SearchIndex + :rtype: :class:`~azure.search.documents.indexes.models.SearchIndex` :raises: ~azure.core.exceptions.HttpResponseError .. admonition:: Example: @@ -191,15 +190,13 @@ async def create_index(self, index, **kwargs): @distributed_trace_async async def create_or_update_index( - self, index_name, index, allow_index_downtime=None, **kwargs + self, index, allow_index_downtime=None, **kwargs ): - # type: (str, SearchIndex, bool, MatchConditions, **Any) -> SearchIndex + # type: (SearchIndex, bool, MatchConditions, **Any) -> SearchIndex """Creates a new search index or updates an index if it already exists. - :param index_name: The name of the index. - :type index_name: str :param index: The index object. - :type index: ~azure.search.documents.SearchIndex + :type index: :class:`~azure.search.documents.indexes.models.SearchIndex` :param allow_index_downtime: Allows new analyzers, tokenizers, token filters, or char filters to be added to an index by taking the index offline for at least a few seconds. This temporarily causes indexing and query requests to fail. Performance and write availability of @@ -209,7 +206,7 @@ async def create_or_update_index( :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: The index created or updated - :rtype: :class:`~azure.search.documents.SearchIndex` + :rtype: :class:`~azure.search.documents.indexes.models.SearchIndex` :raises: :class:`~azure.core.exceptions.ResourceNotFoundError`, \ :class:`~azure.core.exceptions.ResourceModifiedError`, \ :class:`~azure.core.exceptions.ResourceNotModifiedError`, \ @@ -232,7 +229,7 @@ async def create_or_update_index( kwargs.update(access_condition) patched_index = pack_search_index(index) result = await self._client.indexes.create_or_update( - index_name=index_name, + index_name=index.name, index=patched_index, allow_index_downtime=allow_index_downtime, error_map=error_map, @@ -248,9 +245,9 @@ async def analyze_text(self, index_name, analyze_request, **kwargs): :param index_name: The name of the index for which to test an analyzer. :type index_name: str :param analyze_request: The text and analyzer or analysis components to test. - :type analyze_request: ~azure.search.documents.AnalyzeRequest + :type analyze_request: :class:`~azure.search.documents.indexes.models.AnalyzeRequest :return: AnalyzeResult - :rtype: ~azure.search.documents.AnalyzeResult + :rtype: :class:`~azure.search.documents.indexes.models.AnalyzeRequest :raises: ~azure.core.exceptions.HttpResponseError .. admonition:: Example: @@ -289,7 +286,7 @@ async def get_synonym_maps(self, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) result = await self._client.synonym_maps.list(**kwargs) - return [unpack_synonyms(x) for x in result.synonym_maps] + return [unpack_synonym_map(x) for x in result.synonym_maps] @distributed_trace_async async def get_synonym_map_names(self, **kwargs): @@ -313,7 +310,7 @@ async def get_synonym_map(self, name, **kwargs): :param name: The name of the Synonym Map to get :type name: str :return: The retrieved Synonym Map - :rtype: ~azure.search.documents.indexes.models.SynonymMap + :rtype: :class:`~azure.search.documents.indexes.models.SynonymMap :raises: :class:`~azure.core.exceptions.ResourceNotFoundError` .. admonition:: Example: @@ -328,7 +325,7 @@ async def get_synonym_map(self, name, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) result = await self._client.synonym_maps.get(name, **kwargs) - return unpack_synonyms(result) + return unpack_synonym_map(result) @distributed_trace_async async def delete_synonym_map(self, synonym_map, **kwargs): @@ -338,7 +335,7 @@ async def delete_synonym_map(self, synonym_map, **kwargs): the name of the synonym map to delete unconditionally. :param name: The Synonym Map to delete - :type name: str or ~search.models.SynonymMap + :type name: str or ~azure.search.documents.indexes.models.SynonymMap :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: None @@ -369,16 +366,14 @@ async def delete_synonym_map(self, synonym_map, **kwargs): ) @distributed_trace_async - async def create_synonym_map(self, name, synonyms, **kwargs): - # type: (str, Sequence[str], **Any) -> SynonymMap + async def create_synonym_map(self, synonym_map, **kwargs): + # type: (SynonymMap, **Any) -> SynonymMap """Create a new Synonym Map in an Azure Search service - :param name: The name of the Synonym Map to create - :type name: str - :param synonyms: A list of synonyms in SOLR format - :type synonyms: List[str] + :param synonym_map: The Synonym Map object + :type synonym_map: :class:`~azure.search.documents.indexes.models.SynonymMap :return: The created Synonym Map - :rtype: ~azure.search.documents.indexes.models.SynonymMap + :rtype: :class:`~azure.search.documents.indexes.models.SynonymMap .. admonition:: Example: @@ -391,25 +386,22 @@ async def create_synonym_map(self, name, synonyms, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) - solr_format_synonyms = "\n".join(synonyms) - synonym_map = SynonymMap(name=name, synonyms=solr_format_synonyms) - result = await self._client.synonym_maps.create(synonym_map, **kwargs) - return unpack_synonyms(result) + patched_synonym_map = pack_synonym_map(synonym_map) + result = await self._client.synonym_maps.create(patched_synonym_map, **kwargs) + return unpack_synonym_map(result) @distributed_trace_async - async def create_or_update_synonym_map(self, synonym_map, synonyms=None, **kwargs): - # type: (Union[str, SynonymMap], Optional[Sequence[str]], **Any) -> SynonymMap + async def create_or_update_synonym_map(self, synonym_map, **kwargs): + # type: (SynonymMap, **Any) -> SynonymMap """Create a new Synonym Map in an Azure Search service, or update an existing one. - :param synonym_map: The name of the Synonym Map to create or update - :type synonym_map: str or ~azure.search.documents.SynonymMap - :param synonyms: A list of synonyms in SOLR format - :type synonyms: List[str] + :param synonym_map: The Synonym Map object + :type synonym_map: :class:`~azure.search.documents.indexes.models.SynonymMap :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: The created or updated Synonym Map - :rtype: ~azure.search.documents.indexes.models.SynonymMap + :rtype: :class:`~azure.search.documents.indexes.models.SynonymMap """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) @@ -417,22 +409,14 @@ async def create_or_update_synonym_map(self, synonym_map, synonyms=None, **kwarg synonym_map, kwargs.pop("match_condition", MatchConditions.Unconditionally) ) kwargs.update(access_condition) - try: - name = synonym_map.name - if synonyms: - synonym_map.synonyms = "\n".join(synonyms) - synonym_map.encryption_key = pack_search_resource_encryption_key(synonym_map.encryption_key) - except AttributeError: - name = synonym_map - solr_format_synonyms = "\n".join(synonyms) - synonym_map = SynonymMap(name=name, synonyms=solr_format_synonyms) + patched_synonym_map = pack_synonym_map(synonym_map) result = await self._client.synonym_maps.create_or_update( - synonym_map_name=name, - synonym_map=synonym_map, + synonym_map_name=synonym_map.name, + synonym_map=patched_synonym_map, error_map=error_map, **kwargs ) - return unpack_synonyms(result) + return unpack_synonym_map(result) @distributed_trace_async async def get_service_statistics(self, **kwargs): diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_indexer_client.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_indexer_client.py index 1f80b8c91dbf..88101e0c3632 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_indexer_client.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_indexer_client.py @@ -7,7 +7,6 @@ from azure.core import MatchConditions from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.exceptions import ClientAuthenticationError, ResourceNotFoundError from .._generated.aio import SearchServiceClient as _SearchServiceClient from .._generated.models import SearchIndexerSkillset @@ -54,7 +53,7 @@ async def __aexit__(self, *args): async def close(self): # type: () -> None - """Close the :class:`~azure.search.documents.aio.SearchIndexerClient` session. + """Close the :class:`~azure.search.documents.indexes.aio.SearchIndexerClient` session. """ return await self._client.close() @@ -65,7 +64,7 @@ async def create_indexer(self, indexer, **kwargs): """Creates a new SearchIndexer. :param indexer: The definition of the indexer to create. - :type indexer: ~azure.search.documents.SearchIndexer + :type indexer: ~azure.search.documents.indexes.models.SearchIndexer :return: The created SearchIndexer :rtype: ~azure.search.documents.indexes.models.SearchIndexer @@ -83,14 +82,12 @@ async def create_indexer(self, indexer, **kwargs): return result @distributed_trace_async - async def create_or_update_indexer(self, indexer, name=None, **kwargs): - # type: (SearchIndexer, Optional[str], **Any) -> SearchIndexer + async def create_or_update_indexer(self, indexer, **kwargs): + # type: (SearchIndexer, **Any) -> SearchIndexer """Creates a new indexer or updates a indexer if it already exists. - :param name: The name of the indexer to create or update. - :type name: str :param indexer: The definition of the indexer to create or update. - :type indexer: ~azure.search.documents.SearchIndexer + :type indexer: ~azure.search.documents.indexes.models.SearchIndexer :return: The created SearchIndexer :rtype: ~azure.search.documents.indexes.models.SearchIndexer """ @@ -99,8 +96,7 @@ async def create_or_update_indexer(self, indexer, name=None, **kwargs): indexer, kwargs.pop("match_condition", MatchConditions.Unconditionally) ) kwargs.update(access_condition) - if not name: - name = indexer.name + name = indexer.name result = await self._client.indexers.create_or_update( indexer_name=name, indexer=indexer, error_map=error_map, **kwargs ) @@ -170,8 +166,8 @@ async def delete_indexer(self, indexer, **kwargs): must be provided instead of the name. It is enough to provide the name of the indexer to delete unconditionally. - :param name: The name of the indexer to delete. - :type name: str + :param name: The name or the indexer object to delete. + :type name: str or ~azure.search.documents.indexes.models.SearchIndexer :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions @@ -253,7 +249,7 @@ async def get_indexer_status(self, name, **kwargs): :type name: str :return: SearchIndexerStatus - :rtype: SearchIndexerStatus + :rtype: ~azure.search.documents.indexes.models.SearchIndexerStatus .. admonition:: Example: @@ -272,9 +268,9 @@ async def create_data_source_connection(self, data_source_connection, **kwargs): # type: (SearchIndexerDataSourceConnection, **Any) -> SearchIndexerDataSourceConnection """Creates a new data source connection. :param data_source_connection: The definition of the data source connection to create. - :type data_source_connection: ~search.models.SearchIndexerDataSourceConnection + :type data_source_connection: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection :return: The created SearchIndexerDataSourceConnection - :rtype: ~search.models.SearchIndexerDataSourceConnection + :rtype: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection .. admonition:: Example: @@ -291,25 +287,22 @@ async def create_data_source_connection(self, data_source_connection, **kwargs): return unpack_search_indexer_data_source(result) @distributed_trace_async - async def create_or_update_data_source_connection(self, data_source_connection, name=None, **kwargs): - # type: (SearchIndexerDataSourceConnection, Optional[str], **Any) -> SearchIndexerDataSourceConnection + async def create_or_update_data_source_connection(self, data_source_connection, **kwargs): + # type: (SearchIndexerDataSourceConnection, **Any) -> SearchIndexerDataSourceConnection """Creates a new data source connection or updates a data source connection if it already exists. - :param name: The name of the data source connection to create or update. - :type name: str :param data_source_connection: The definition of the data source connection to create or update. - :type data_source_connection: ~search.models.SearchIndexerDataSourceConnection + :type data_source_connection: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: The created SearchIndexerDataSourceConnection - :rtype: ~search.models.SearchIndexerDataSourceConnection + :rtype: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) error_map, access_condition = get_access_conditions( data_source_connection, kwargs.pop("match_condition", MatchConditions.Unconditionally) ) kwargs.update(access_condition) - if not name: - name = data_source_connection.name + name = data_source_connection.name packed_data_source = pack_search_indexer_data_source(data_source_connection) result = await self._client.data_sources.create_or_update( data_source_name=name, @@ -327,7 +320,7 @@ async def delete_data_source_connection(self, data_source_connection, **kwargs): It is enough to provide the name of the data source connection to delete unconditionally :param data_source_connection: The data source connection to delete. - :type data_source_connection: str or ~search.models.SearchIndexerDataSourceConnection + :type data_source_connection: str or ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: None @@ -363,7 +356,7 @@ async def get_data_source_connection(self, name, **kwargs): :param name: The name of the data source connection to retrieve. :type name: str :return: The SearchIndexerDataSourceConnection that is fetched. - :rtype: ~search.models.SearchIndexerDataSourceConnection + :rtype: ~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection .. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py :start-after: [START get_data_source_connection_async] @@ -382,7 +375,7 @@ async def get_data_source_connections(self, **kwargs): """Lists all data source connections available for a search service. :return: List of all the data source connections. - :rtype: `list[~search.models.SearchIndexerDataSourceConnection]` + :rtype: `list[~azure.search.documents.indexes.models.SearchIndexerDataSourceConnection]` .. admonition:: Example: @@ -479,7 +472,7 @@ async def delete_skillset(self, skillset, **kwargs): the name of the skillset to delete unconditionally :param name: The SearchIndexerSkillset to delete - :type name: str or ~search.models.SearchIndexerSkillset + :type name: str or ~azure.search.documents.indexes.models.SearchIndexerSkillset :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions @@ -505,16 +498,12 @@ async def delete_skillset(self, skillset, **kwargs): await self._client.skillsets.delete(name, error_map=error_map, **kwargs) @distributed_trace_async - async def create_skillset(self, name, skills, description, **kwargs): - # type: (str, Sequence[SearchIndexerSkill], str, **Any) -> SearchIndexerSkillset + async def create_skillset(self, skillset, **kwargs): + # type: (SearchIndexerSkillset, **Any) -> SearchIndexerSkillset """Create a new SearchIndexerSkillset in an Azure Search service - :param name: The name of the SearchIndexerSkillset to create - :type name: str - :param skills: A list of Skill objects to include in the SearchIndexerSkillset - :type skills: List[SearchIndexerSkill]] - :param description: A description for the SearchIndexerSkillset - :type description: Optional[str] + :param skillset: The SearchIndexerSkillset object to create + :type skillset: ~azure.search.documents.indexes.models.SearchIndexerSkillset :return: The created SearchIndexerSkillset :rtype: ~azure.search.documents.indexes.models.SearchIndexerSkillset @@ -530,60 +519,28 @@ async def create_skillset(self, name, skills, description, **kwargs): """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) - skillset = SearchIndexerSkillset( - name=name, skills=list(skills), description=description - ) - return await self._client.skillsets.create(skillset, **kwargs) @distributed_trace_async - async def create_or_update_skillset(self, name, **kwargs): - # type: (str, **Any) -> SearchIndexerSkillset + async def create_or_update_skillset(self, skillset, **kwargs): + # type: (SearchIndexerSkillset, **Any) -> SearchIndexerSkillset """Create a new SearchIndexerSkillset in an Azure Search service, or update an - existing one. The skillset param must be provided to perform the - operation with access conditions. + existing one. - :param name: The name of the SearchIndexerSkillset to create or update - :type name: str - :keyword skills: A list of Skill objects to include in the SearchIndexerSkillset - :type skills: List[SearchIndexerSkill] - :keyword description: A description for the SearchIndexerSkillset - :type description: Optional[str] - :keyword skillset: A SearchIndexerSkillset to create or update. - :type skillset: :class:`~azure.search.documents.SearchIndexerSkillset` + :param skillset: The SearchIndexerSkillset object to create or update + :type skillset: :class:`~azure.search.documents.indexes.models.SearchIndexerSkillset :keyword match_condition: The match condition to use upon the etag :type match_condition: ~azure.core.MatchConditions :return: The created or updated SearchIndexerSkillset - :rtype: ~azure.search.documents.indexes.models.SearchIndexerSkillset - - If a `skillset` is passed in, any optional `skills`, or - `description` parameter values will override it. - + :rtype: :class:`~azure.search.documents.indexes.models.SearchIndexerSkillset """ kwargs["headers"] = self._merge_client_headers(kwargs.get("headers")) - error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError} - access_condition = None - - if "skillset" in kwargs: - skillset = kwargs.pop("skillset") - error_map, access_condition = get_access_conditions( - skillset, kwargs.pop("match_condition", MatchConditions.Unconditionally) - ) - kwargs.update(access_condition) - skillset = SearchIndexerSkillset.deserialize(skillset.serialize()) - skillset.name = name - for param in ("description", "skills"): - if param in kwargs: - setattr(skillset, param, kwargs.pop(param)) - else: - - skillset = SearchIndexerSkillset( - name=name, - description=kwargs.pop("description", None), - skills=kwargs.pop("skills", None), - ) + error_map, access_condition = get_access_conditions( + skillset, kwargs.pop("match_condition", MatchConditions.Unconditionally) + ) + kwargs.update(access_condition) return await self._client.skillsets.create_or_update( - skillset_name=name, skillset=skillset, error_map=error_map, **kwargs + skillset_name=skillset.name, skillset=skillset, error_map=error_map, **kwargs ) diff --git a/sdk/search/azure-search-documents/azure/search/documents/indexes/models/__init__.py b/sdk/search/azure-search-documents/azure/search/documents/indexes/models/__init__.py index 8cafe20955af..d75447532da5 100644 --- a/sdk/search/azure-search-documents/azure/search/documents/indexes/models/__init__.py +++ b/sdk/search/azure-search-documents/azure/search/documents/indexes/models/__init__.py @@ -88,6 +88,7 @@ SearchIndexer, SearchIndexerDataContainer, SearchIndexerSkillset, + SearchIndexerStatus, ScoringFunction, ScoringProfile, SentimentSkill, @@ -186,6 +187,7 @@ "SearchIndexerDataContainer", "SearchIndexerDataSourceConnection", "SearchIndexerSkillset", + "SearchIndexerStatus", "SearchResourceEncryptionKey", "SearchableField", "SentimentSkill", diff --git a/sdk/search/azure-search-documents/samples/async_samples/sample_synonym_map_operations_async.py b/sdk/search/azure-search-documents/samples/async_samples/sample_synonym_map_operations_async.py index 5c79ed59cff7..1fa553e19d88 100644 --- a/sdk/search/azure-search-documents/samples/async_samples/sample_synonym_map_operations_async.py +++ b/sdk/search/azure-search-documents/samples/async_samples/sample_synonym_map_operations_async.py @@ -26,15 +26,18 @@ from azure.core.credentials import AzureKeyCredential from azure.search.documents.indexes.aio import SearchIndexClient +from azure.search.documents.indexes.models import SynonymMap client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) async def create_synonym_map(): # [START create_synonym_map_async] - result = await client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = await client.create_synonym_map(synonym_map) print("Create new Synonym Map 'test-syn-map succeeded") # [END create_synonym_map_async] diff --git a/sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py b/sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py index 515aeb942037..f0684da68d6b 100644 --- a/sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py +++ b/sdk/search/azure-search-documents/samples/sample_indexer_datasource_skillset.py @@ -89,7 +89,8 @@ def _create_skillset(): output = OutputFieldMappingEntry(name="dateTimes", target_name="RenovatedDate") s = EntityRecognitionSkill(name="merge-skill", inputs=[inp], outputs=[output]) - result = client.create_skillset(name='hotel-data-skill', skills=[s], description="example skillset") + skillset = SearchIndexerSkillset(name='hotel-data-skill', skills=[s], description="example skillset") + result = client.create_skillset(skillset) return result def sample_indexer_workflow(): diff --git a/sdk/search/azure-search-documents/samples/sample_synonym_map_operations.py b/sdk/search/azure-search-documents/samples/sample_synonym_map_operations.py index 4470ebb9c9bb..a9f828690c53 100644 --- a/sdk/search/azure-search-documents/samples/sample_synonym_map_operations.py +++ b/sdk/search/azure-search-documents/samples/sample_synonym_map_operations.py @@ -25,15 +25,18 @@ from azure.core.credentials import AzureKeyCredential from azure.search.documents.indexes import SearchIndexClient +from azure.search.documents.indexes.models import SynonymMap client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) def create_synonym_map(): # [START create_synonym_map] - result = client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = client.create_synonym_map(synonym_map) print("Create new Synonym Map 'test-syn-map succeeded") # [END create_synonym_map] diff --git a/sdk/search/azure-search-documents/tests/async_tests/test_service_live_async.py b/sdk/search/azure-search-documents/tests/async_tests/test_service_live_async.py index ad024956ecbc..b3d3055fcc13 100644 --- a/sdk/search/azure-search-documents/tests/async_tests/test_service_live_async.py +++ b/sdk/search/azure-search-documents/tests/async_tests/test_service_live_async.py @@ -148,7 +148,7 @@ async def test_delete_indexes_if_unchanged(self, api_key, endpoint, index_name, etag = result.e_tag # get e tag nd update index.scoring_profiles = [] - await client.create_or_update_index(index.name, index) + await client.create_or_update_index(index) index.e_tag = etag with pytest.raises(HttpResponseError): @@ -198,7 +198,7 @@ async def test_create_or_update_index(self, api_key, endpoint, index_name, **kwa scoring_profiles=scoring_profiles, cors_options=cors_options) client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = await client.create_or_update_index(index_name=index.name, index=index) + result = await client.create_or_update_index(index=index) assert len(result.scoring_profiles) == 0 assert result.cors_options.allowed_origins == cors_options.allowed_origins assert result.cors_options.max_age_in_seconds == cors_options.max_age_in_seconds @@ -212,7 +212,7 @@ async def test_create_or_update_index(self, api_key, endpoint, index_name, **kwa fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) - result = await client.create_or_update_index(index_name=index.name, index=index) + result = await client.create_or_update_index(index=index) assert result.scoring_profiles[0].name == scoring_profile.name assert result.cors_options.allowed_origins == cors_options.allowed_origins assert result.cors_options.max_age_in_seconds == cors_options.max_age_in_seconds @@ -250,11 +250,11 @@ async def test_create_or_update_indexes_if_unchanged(self, api_key, endpoint, in etag = result.e_tag # get e tag nd update index.scoring_profiles = [] - await client.create_or_update_index(index.name, index) + await client.create_or_update_index(index) index.e_tag = etag with pytest.raises(HttpResponseError): - await client.create_or_update_index(index.name, index, match_condition=MatchConditions.IfNotModified) + await client.create_or_update_index(index, match_condition=MatchConditions.IfNotModified) @SearchResourceGroupPreparer(random_name_enabled=True) @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) @@ -269,10 +269,12 @@ class SearchSynonymMapsClientTest(AzureMgmtTestCase): @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) async def test_create_synonym_map(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = await client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = await client.create_synonym_map(synonym_map) assert isinstance(result, SynonymMap) assert result.name == "test-syn-map" assert result.synonyms == [ @@ -285,10 +287,12 @@ async def test_create_synonym_map(self, api_key, endpoint, index_name, **kwargs) @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) async def test_delete_synonym_map(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = await client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = await client.create_synonym_map(synonym_map) assert len(await client.get_synonym_maps()) == 1 await client.delete_synonym_map("test-syn-map") assert len(await client.get_synonym_maps()) == 0 @@ -297,15 +301,18 @@ async def test_delete_synonym_map(self, api_key, endpoint, index_name, **kwargs) @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) async def test_delete_synonym_map_if_unchanged(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = await client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = await client.create_synonym_map(synonym_map) etag = result.e_tag - await client.create_or_update_synonym_map("test-syn-map", [ - "Washington, Wash. => WA", - ]) + synonym_map.synonyms = "\n".join([ + "Washington, Wash. => WA", + ]) + await client.create_or_update_synonym_map(synonym_map) result.e_tag = etag with pytest.raises(HttpResponseError): @@ -316,10 +323,12 @@ async def test_delete_synonym_map_if_unchanged(self, api_key, endpoint, index_na @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) async def test_get_synonym_map(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - await client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + await client.create_synonym_map(synonym_map) assert len(await client.get_synonym_maps()) == 1 result = await client.get_synonym_map("test-syn-map") assert isinstance(result, SynonymMap) @@ -333,12 +342,17 @@ async def test_get_synonym_map(self, api_key, endpoint, index_name, **kwargs): @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) async def test_get_synonym_maps(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - await client.create_synonym_map("test-syn-map-1", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", + "Washington, Wash. => WA", ]) - await client.create_synonym_map("test-syn-map-2", [ + synonym_map_1 = SynonymMap(name="test-syn-map-1", synonyms=solr_format_synonyms) + await client.create_synonym_map(synonym_map_1) + solr_format_synonyms = "\n".join([ "Washington, Wash. => WA", ]) + synonym_map_2 = SynonymMap(name="test-syn-map-2", synonyms=solr_format_synonyms) + await client.create_synonym_map(synonym_map_2) result = await client.get_synonym_maps() assert isinstance(result, list) assert all(isinstance(x, SynonymMap) for x in result) @@ -348,13 +362,17 @@ async def test_get_synonym_maps(self, api_key, endpoint, index_name, **kwargs): @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) async def test_create_or_update_synonym_map(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - await client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", + "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + await client.create_synonym_map(synonym_map) assert len(await client.get_synonym_maps()) == 1 - await client.create_or_update_synonym_map("test-syn-map", [ + synonym_map.synonyms = "\n".join([ "Washington, Wash. => WA", ]) + await client.create_or_update_synonym_map(synonym_map) assert len(await client.get_synonym_maps()) == 1 result = await client.get_synonym_map("test-syn-map") assert isinstance(result, SynonymMap) @@ -373,7 +391,8 @@ async def test_create_skillset(self, api_key, endpoint, index_name, **kwargs): s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - result = await client.create_skillset(name='test-ss', skills=[s], description="desc") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc") + result = await client.create_skillset(skillset) assert isinstance(result, SearchIndexerSkillset) assert result.name == "test-ss" assert result.description == "desc" @@ -390,7 +409,8 @@ async def test_delete_skillset(self, api_key, endpoint, index_name, **kwargs): s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - result = await client.create_skillset(name='test-ss', skills=[s], description="desc") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc") + result = await client.create_skillset(skillset) assert len(await client.get_skillsets()) == 1 await client.delete_skillset("test-ss") @@ -405,10 +425,12 @@ async def test_delete_skillset_if_unchanged(self, api_key, endpoint, index_name, s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - result = await client.create_skillset(name='test-ss', skills=[s], description="desc") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc") + result = await client.create_skillset(skillset) etag = result.e_tag - updated = await client.create_or_update_skillset(name='test-ss', skills=[s], description="updated") + skillset1 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="updated") + updated = await client.create_or_update_skillset(skillset1) updated.e_tag = etag with pytest.raises(HttpResponseError): @@ -421,7 +443,8 @@ async def test_get_skillset(self, api_key, endpoint, index_name, **kwargs): s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - await client.create_skillset(name='test-ss', skills=[s], description="desc") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc") + await client.create_skillset(skillset) assert len(await client.get_skillsets()) == 1 result = await client.get_skillset("test-ss") @@ -439,8 +462,10 @@ async def test_get_skillsets(self, api_key, endpoint, index_name, **kwargs): s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - await client.create_skillset(name='test-ss-1', skills=[s], description="desc1") - await client.create_skillset(name='test-ss-2', skills=[s], description="desc2") + skillset1 = SearchIndexerSkillset(name='test-ss-1', skills=list([s]), description="desc1") + await client.create_skillset(skillset1) + skillset2 = SearchIndexerSkillset(name='test-ss-2', skills=list([s]), description="desc2") + await client.create_skillset(skillset2) result = await client.get_skillsets() assert isinstance(result, list) assert all(isinstance(x, SearchIndexerSkillset) for x in result) @@ -453,8 +478,10 @@ async def test_create_or_update_skillset(self, api_key, endpoint, index_name, ** s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - await client.create_or_update_skillset(name='test-ss', skills=[s], description="desc1") - await client.create_or_update_skillset(name='test-ss', skills=[s], description="desc2") + skillset1 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc1") + await client.create_or_update_skillset(skillset1) + skillset2 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc2") + await client.create_or_update_skillset(skillset2) assert len(await client.get_skillsets()) == 1 result = await client.get_skillset("test-ss") @@ -469,8 +496,10 @@ async def test_create_or_update_skillset_inplace(self, api_key, endpoint, index_ s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - ss = await client.create_or_update_skillset(name='test-ss', skills=[s], description="desc1") - await client.create_or_update_skillset(name='test-ss', skills=[s], description="desc2", skillset=ss) + skillset1 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc1") + ss = await client.create_or_update_skillset(skillset1) + skillset2 = SearchIndexerSkillset(name='test-ss', skills=[s], description="desc2", skillset=ss) + await client.create_or_update_skillset(skillset2) assert len(await client.get_skillsets()) == 1 result = await client.get_skillset("test-ss") @@ -485,16 +514,14 @@ async def test_create_or_update_skillset_if_unchanged(self, api_key, endpoint, i s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - ss = await client.create_or_update_skillset(name='test-ss', skills=[s], description="desc1") + skillset1 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc1") + ss = await client.create_or_update_skillset(skillset1) etag = ss.e_tag - await client.create_or_update_skillset(name='test-ss', skills=[s], description="desc2", skillset=ss) + skillset2 = SearchIndexerSkillset(name='test-ss', skills=[s], description="desc2", skillset=ss) + await client.create_or_update_skillset(skillset2) assert len(await client.get_skillsets()) == 1 - ss.e_tag = etag - with pytest.raises(HttpResponseError): - await client.create_or_update_skillset(name='test-ss', skills=[s], skillset=ss, match_condition=MatchConditions.IfNotModified) - class SearchDataSourcesClientTest(AzureMgmtTestCase): diff --git a/sdk/search/azure-search-documents/tests/test_index_field_helpers.py b/sdk/search/azure-search-documents/tests/test_index_field_helpers.py index 6c077a58f05e..79d1a2dd6753 100644 --- a/sdk/search/azure-search-documents/tests/test_index_field_helpers.py +++ b/sdk/search/azure-search-documents/tests/test_index_field_helpers.py @@ -50,7 +50,7 @@ def test_defaults(self): fld = SimpleField(name="foo", type=SearchFieldDataType.Double) assert fld.name == "foo" assert fld.type == SearchFieldDataType.Double - assert fld.is_hidden == False + assert fld.hidden == False assert fld.sortable == False assert fld.facetable == False assert fld.searchable == False @@ -66,7 +66,7 @@ def test_defaults(self): fld = SearchableField(name="foo", collection=True) assert fld.name == "foo" assert fld.type == SearchFieldDataType.Collection(SearchFieldDataType.String) - assert fld.is_hidden == False + assert fld.hidden == False assert fld.sortable == False assert fld.facetable == False assert fld.searchable == True diff --git a/sdk/search/azure-search-documents/tests/test_service_live.py b/sdk/search/azure-search-documents/tests/test_service_live.py index b5f3e685e63f..dd9599382d27 100644 --- a/sdk/search/azure-search-documents/tests/test_service_live.py +++ b/sdk/search/azure-search-documents/tests/test_service_live.py @@ -132,7 +132,7 @@ def test_delete_indexes_if_unchanged(self, api_key, endpoint, index_name, **kwar etag = result.e_tag # get e tag and update index.scoring_profiles = [] - client.create_or_update_index(index.name, index) + client.create_or_update_index(index) index.e_tag = etag with pytest.raises(HttpResponseError): @@ -180,7 +180,7 @@ def test_create_or_update_index(self, api_key, endpoint, index_name, **kwargs): scoring_profiles=scoring_profiles, cors_options=cors_options) client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = client.create_or_update_index(index_name=index.name, index=index) + result = client.create_or_update_index(index=index) assert len(result.scoring_profiles) == 0 assert result.cors_options.allowed_origins == cors_options.allowed_origins assert result.cors_options.max_age_in_seconds == cors_options.max_age_in_seconds @@ -194,7 +194,7 @@ def test_create_or_update_index(self, api_key, endpoint, index_name, **kwargs): fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) - result = client.create_or_update_index(index_name=index.name, index=index) + result = client.create_or_update_index(index=index) assert result.scoring_profiles[0].name == scoring_profile.name assert result.cors_options.allowed_origins == cors_options.allowed_origins assert result.cors_options.max_age_in_seconds == cors_options.max_age_in_seconds @@ -232,11 +232,11 @@ def test_create_or_update_indexes_if_unchanged(self, api_key, endpoint, index_na etag = result.e_tag # get e tag and update index.scoring_profiles = [] - client.create_or_update_index(index.name, index) + client.create_or_update_index(index) index.e_tag = etag with pytest.raises(HttpResponseError): - client.create_or_update_index(index.name, index, match_condition=MatchConditions.IfNotModified) + client.create_or_update_index(index, match_condition=MatchConditions.IfNotModified) @SearchResourceGroupPreparer(random_name_enabled=True) @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) @@ -251,10 +251,12 @@ class SearchSynonymMapsClientTest(AzureMgmtTestCase): @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) def test_create_synonym_map(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = client.create_synonym_map(synonym_map) assert isinstance(result, SynonymMap) assert result.name == "test-syn-map" assert result.synonyms == [ @@ -267,10 +269,12 @@ def test_create_synonym_map(self, api_key, endpoint, index_name, **kwargs): @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) def test_delete_synonym_map(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = client.create_synonym_map(synonym_map) assert len(client.get_synonym_maps()) == 1 client.delete_synonym_map("test-syn-map") assert len(client.get_synonym_maps()) == 0 @@ -279,15 +283,18 @@ def test_delete_synonym_map(self, api_key, endpoint, index_name, **kwargs): @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) def test_delete_synonym_map_if_unchanged(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = client.create_synonym_map(synonym_map) etag = result.e_tag - client.create_or_update_synonym_map("test-syn-map", [ + synonym_map.synonyms = "\n".join([ "Washington, Wash. => WA", ]) + client.create_or_update_synonym_map(synonym_map) result.e_tag = etag with pytest.raises(HttpResponseError): @@ -298,10 +305,12 @@ def test_delete_synonym_map_if_unchanged(self, api_key, endpoint, index_name, ** @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) def test_get_synonym_map(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + client.create_synonym_map(synonym_map) assert len(client.get_synonym_maps()) == 1 result = client.get_synonym_map("test-syn-map") assert isinstance(result, SynonymMap) @@ -315,12 +324,16 @@ def test_get_synonym_map(self, api_key, endpoint, index_name, **kwargs): @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) def test_get_synonym_maps(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - client.create_synonym_map("test-syn-map-1", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", ]) - client.create_synonym_map("test-syn-map-2", [ + synonym_map_1 = SynonymMap(name="test-syn-map-1", synonyms=solr_format_synonyms) + client.create_synonym_map(synonym_map_1) + solr_format_synonyms = "\n".join([ "Washington, Wash. => WA", ]) + synonym_map_2 = SynonymMap(name="test-syn-map-2", synonyms=solr_format_synonyms) + client.create_synonym_map(synonym_map_2) result = client.get_synonym_maps() assert isinstance(result, list) assert all(isinstance(x, SynonymMap) for x in result) @@ -330,13 +343,17 @@ def test_get_synonym_maps(self, api_key, endpoint, index_name, **kwargs): @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) def test_create_or_update_synonym_map(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", + "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + client.create_synonym_map(synonym_map) assert len(client.get_synonym_maps()) == 1 - client.create_or_update_synonym_map("test-syn-map", [ + synonym_map.synonyms = "\n".join([ "Washington, Wash. => WA", ]) + client.create_or_update_synonym_map(synonym_map) assert len(client.get_synonym_maps()) == 1 result = client.get_synonym_map("test-syn-map") assert isinstance(result, SynonymMap) @@ -349,15 +366,20 @@ def test_create_or_update_synonym_map(self, api_key, endpoint, index_name, **kwa @SearchServicePreparer(schema=SCHEMA, index_batch=BATCH) def test_create_or_update_synonym_map_if_unchanged(self, api_key, endpoint, index_name, **kwargs): client = SearchIndexClient(endpoint, AzureKeyCredential(api_key)) - result = client.create_synonym_map("test-syn-map", [ + solr_format_synonyms = "\n".join([ "USA, United States, United States of America", + "Washington, Wash. => WA", ]) + synonym_map = SynonymMap(name="test-syn-map", synonyms=solr_format_synonyms) + result = client.create_synonym_map(synonym_map) etag = result.e_tag - client.create_or_update_synonym_map("test-syn-map", [ + synonym_map.synonyms = "\n".join([ "Washington, Wash. => WA", ]) + client.create_or_update_synonym_map(synonym_map) + result.e_tag = etag with pytest.raises(HttpResponseError): client.create_or_update_synonym_map(result, match_condition=MatchConditions.IfNotModified) @@ -373,7 +395,9 @@ def test_create_skillset(self, api_key, endpoint, index_name, **kwargs): s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - result = client.create_skillset(name='test-ss', skills=[s], description="desc") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc") + + result = client.create_skillset(skillset) assert isinstance(result, SearchIndexerSkillset) assert result.name == "test-ss" assert result.description == "desc" @@ -390,7 +414,9 @@ def test_delete_skillset(self, api_key, endpoint, index_name, **kwargs): s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - result = client.create_skillset(name='test-ss', skills=[s], description="desc") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc") + + result = client.create_skillset(skillset) assert len(client.get_skillsets()) == 1 client.delete_skillset("test-ss") @@ -403,10 +429,13 @@ def test_delete_skillset_if_unchanged(self, api_key, endpoint, index_name, **kwa s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - result = client.create_skillset(name='test-ss', skills=[s], description="desc") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc") + + result = client.create_skillset(skillset) etag = result.e_tag - updated = client.create_or_update_skillset(name='test-ss', skills=[s], description="updated") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="updated") + updated = client.create_or_update_skillset(skillset) updated.e_tag = etag with pytest.raises(HttpResponseError): @@ -419,7 +448,8 @@ def test_get_skillset(self, api_key, endpoint, index_name, **kwargs): s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - client.create_skillset(name='test-ss', skills=[s], description="desc") + skillset = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc") + client.create_skillset(skillset) assert len(client.get_skillsets()) == 1 result = client.get_skillset("test-ss") @@ -437,8 +467,10 @@ def test_get_skillsets(self, api_key, endpoint, index_name, **kwargs): s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - client.create_skillset(name='test-ss-1', skills=[s], description="desc1") - client.create_skillset(name='test-ss-2', skills=[s], description="desc2") + skillset1 = SearchIndexerSkillset(name='test-ss-1', skills=list([s]), description="desc1") + client.create_skillset(skillset1) + skillset2 = SearchIndexerSkillset(name='test-ss-2', skills=list([s]), description="desc2") + client.create_skillset(skillset2) result = client.get_skillsets() assert isinstance(result, list) assert all(isinstance(x, SearchIndexerSkillset) for x in result) @@ -451,8 +483,10 @@ def test_create_or_update_skillset(self, api_key, endpoint, index_name, **kwargs s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - client.create_or_update_skillset(name='test-ss', skills=[s], description="desc1") - client.create_or_update_skillset(name='test-ss', skills=[s], description="desc2") + skillset1 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc1") + client.create_or_update_skillset(skillset1) + skillset2 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc2") + client.create_or_update_skillset(skillset2) assert len(client.get_skillsets()) == 1 result = client.get_skillset("test-ss") @@ -467,8 +501,10 @@ def test_create_or_update_skillset_inplace(self, api_key, endpoint, index_name, s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - ss = client.create_or_update_skillset(name='test-ss', skills=[s], description="desc1") - client.create_or_update_skillset(name='test-ss', skills=[s], description="desc2", skillset=ss) + skillset1 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc1") + ss = client.create_or_update_skillset(skillset1) + skillset2 = SearchIndexerSkillset(name='test-ss', skills=[s], description="desc2", skillset=ss) + client.create_or_update_skillset(skillset2) assert len(client.get_skillsets()) == 1 result = client.get_skillset("test-ss") @@ -483,16 +519,13 @@ def test_create_or_update_skillset_if_unchanged(self, api_key, endpoint, index_n s = EntityRecognitionSkill(inputs=[InputFieldMappingEntry(name="text", source="/document/content")], outputs=[OutputFieldMappingEntry(name="organizations", target_name="organizations")]) - ss = client.create_or_update_skillset(name='test-ss', skills=[s], description="desc1") + skillset1 = SearchIndexerSkillset(name='test-ss', skills=list([s]), description="desc1") + ss = client.create_or_update_skillset(skillset1) etag = ss.e_tag - - client.create_or_update_skillset(name='test-ss', skills=[s], description="desc2", skillset=ss) + skillset2 = SearchIndexerSkillset(name='test-ss', skills=[s], description="desc2", skillset=ss) + client.create_or_update_skillset(skillset2) assert len(client.get_skillsets()) == 1 - ss.e_tag = etag - with pytest.raises(HttpResponseError): - client.create_or_update_skillset(name='test-ss', skills=[s], skillset=ss, match_condition=MatchConditions.IfNotModified) - class SearchDataSourcesClientTest(AzureMgmtTestCase): def _create_data_source_connection(self, name="sample-datasource"):