Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

typing fixes. #34208

Merged
merged 2 commits into from
Feb 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,7 @@ def search(
:keyword vector_filter_mode: Determines whether or not filters are applied before or after the
vector search is performed. Default is 'preFilter'. Known values are: "postFilter" and "preFilter".
:paramtype vector_filter_mode: str or VectorFilterMode
:return: List of search results.
:rtype: SearchItemPaged[dict]

.. admonition:: Example:
Expand Down Expand Up @@ -429,7 +430,7 @@ def suggest(
:keyword int top: The number of suggestions to retrieve. The value must be a number between 1 and
100. The default is 5.

:return: List of documents.
:return: List of suggestion results.
:rtype: list[dict]

.. admonition:: Example:
Expand Down Expand Up @@ -474,6 +475,7 @@ def autocomplete(
suggester_name: str,
*,
mode: Optional[Union[str, AutocompleteMode]] = None,
filter: Optional[str] = None,
use_fuzzy_matching: Optional[bool] = None,
highlight_post_tag: Optional[str] = None,
highlight_pre_tag: Optional[str] = None,
Expand Down Expand Up @@ -510,6 +512,7 @@ def autocomplete(
terms. Target fields must be included in the specified suggester.
:keyword int top: The number of auto-completed terms to retrieve. This must be a value between 1 and
100. The default is 5.
:return: List of auto-completion results.
:rtype: list[dict]

.. admonition:: Example:
Expand All @@ -522,7 +525,7 @@ def autocomplete(
:caption: Get a auto-completions.
"""
autocomplete_mode = mode
filter_arg = kwargs.pop("filter", None)
filter_arg = filter
search_fields_str = ",".join(search_fields) if search_fields else None
query = AutocompleteQuery(
search_text=search_text,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,7 @@ async def search(
vector search is performed. Default is 'preFilter'. Known values are: "postFilter" and "preFilter".
:paramtype vector_filter_mode: str or VectorFilterMode
:return: A list of documents (dicts) matching the specified search criteria.
:return: List of search results.
:rtype: AsyncSearchItemPaged[dict]

.. admonition:: Example:
Expand Down Expand Up @@ -427,7 +428,7 @@ async def suggest(
included in the results.
:keyword int top: The number of suggestions to retrieve. The value must be a number between 1 and
100. The default is 5.
:return: List of documents.
:return: List of suggestion results.
:rtype: list[dict]

.. admonition:: Example:
Expand Down Expand Up @@ -472,6 +473,7 @@ async def autocomplete(
suggester_name: str,
*,
mode: Optional[Union[str, AutocompleteMode]] = None,
filter: Optional[str] = None,
use_fuzzy_matching: Optional[bool] = None,
highlight_post_tag: Optional[str] = None,
highlight_pre_tag: Optional[str] = None,
Expand Down Expand Up @@ -508,6 +510,7 @@ async def autocomplete(
terms. Target fields must be included in the specified suggester.
:keyword int top: The number of auto-completed terms to retrieve. This must be a value between 1 and
100. The default is 5.
:return: List of auto-completion results.
:rtype: list[Dict]

.. admonition:: Example:
Expand All @@ -520,7 +523,7 @@ async def autocomplete(
:caption: Get a auto-completions.
"""
autocomplete_mode = mode
filter_arg = kwargs.pop("filter", None)
filter_arg = filter
search_fields_str = ",".join(search_fields) if search_fields else None
query = AutocompleteQuery(
search_text=search_text,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -385,8 +385,8 @@ def delete_synonym_map(
the SynonymMap model must be provided instead of the name. It is enough to provide
the name of the synonym map to delete unconditionally.

:param name: The synonym map name or object to delete
:type name: str or ~azure.search.documents.indexes.models.SynonymMap
:param synonym_map: The synonym map name or object to delete
:type synonym_map: str or ~azure.search.documents.indexes.models.SynonymMap
:keyword match_condition: The match condition to use upon the etag
:paramtype match_condition: ~azure.core.MatchConditions

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,8 @@ def create_or_update_indexer(
indexer: SearchIndexer,
*,
match_condition: MatchConditions = MatchConditions.Unconditionally,
skip_indexer_reset_requirement_for_cache: Optional[bool] = None,
disable_cache_reprocessing_change_detection: Optional[bool] = None,
**kwargs: Any
) -> SearchIndexer:
"""Creates a new indexer or updates an indexer if it already exists.
Expand All @@ -132,7 +134,13 @@ def create_or_update_indexer(
kwargs.update(access_condition)
name = indexer.name
result = self._client.indexers.create_or_update(
indexer_name=name, indexer=indexer, prefer="return=representation", error_map=error_map, **kwargs
indexer_name=name,
indexer=indexer,
prefer="return=representation",
error_map=error_map,
skip_indexer_reset_requirement_for_cache=skip_indexer_reset_requirement_for_cache,
disable_cache_reprocessing_change_detection=disable_cache_reprocessing_change_detection,
**kwargs
)
return result

Expand Down Expand Up @@ -281,7 +289,12 @@ def reset_indexer(self, name: str, **kwargs: Any) -> None:

@distributed_trace
def reset_documents(
self, indexer: Union[str, SearchIndexer], keys_or_ids: DocumentKeysOrIds, **kwargs: Any
self,
indexer: Union[str, SearchIndexer],
keys_or_ids: DocumentKeysOrIds,
*,
overwrite: bool = False,
**kwargs: Any
) -> None:
"""Resets specific documents in the datasource to be selectively re-ingested by the indexer.

Expand All @@ -302,7 +315,7 @@ def reset_documents(
name = indexer.name # type: ignore
except AttributeError:
name = indexer
return self._client.indexers.reset_docs(name, **kwargs)
return self._client.indexers.reset_docs(name, overwrite=overwrite, **kwargs)

@distributed_trace
def get_indexer_status(self, name: str, **kwargs: Any) -> SearchIndexerStatus:
Expand Down Expand Up @@ -358,6 +371,7 @@ def create_or_update_data_source_connection(
data_source_connection: SearchIndexerDataSourceConnection,
*,
match_condition: MatchConditions = MatchConditions.Unconditionally,
skip_indexer_reset_requirement_for_cache: Optional[bool] = None,
**kwargs: Any
) -> SearchIndexerDataSourceConnection:
"""Creates a new data source connection or updates a data source connection if it already exists.
Expand All @@ -381,6 +395,7 @@ def create_or_update_data_source_connection(
data_source=packed_data_source,
prefer="return=representation",
error_map=error_map,
skip_indexer_reset_requirement_for_cache=skip_indexer_reset_requirement_for_cache,
**kwargs
)
# pylint:disable=protected-access
Expand Down Expand Up @@ -584,6 +599,8 @@ def create_or_update_skillset(
skillset: SearchIndexerSkillset,
*,
match_condition: MatchConditions = MatchConditions.Unconditionally,
skip_indexer_reset_requirement_for_cache: Optional[bool] = None,
disable_cache_reprocessing_change_detection: Optional[bool] = None,
**kwargs: Any
) -> SearchIndexerSkillset:
# pylint:disable=protected-access
Expand Down Expand Up @@ -614,6 +631,8 @@ def create_or_update_skillset(
skillset=skillset_gen,
prefer="return=representation",
error_map=error_map,
skip_indexer_reset_requirement_for_cache=skip_indexer_reset_requirement_for_cache,
disable_cache_reprocessing_change_detection=disable_cache_reprocessing_change_detection,
**kwargs
)
return SearchIndexerSkillset._from_generated(result) # pylint:disable=protected-access
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def get_access_conditions(
error_map[412] = ResourceNotFoundError
if match_condition == MatchConditions.IfMissing:
error_map[412] = ResourceExistsError
return error_map, dict(if_match=if_match, if_none_match=if_none_match)
return error_map, {"if_match": if_match, "if_none_match": if_none_match}
except AttributeError as ex:
raise ValueError("Unable to get e_tag from the model") from ex

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,13 @@ async def get_index_statistics(self, index_name: str, **kwargs: Any) -> MutableM
return result.as_dict()

@distributed_trace_async
async def delete_index(self, index: Union[str, SearchIndex], **kwargs: Any) -> None:
async def delete_index(
self,
index: Union[str, SearchIndex],
*,
match_condition: MatchConditions = MatchConditions.Unconditionally,
**kwargs: Any
) -> None:
"""Deletes a search index and all the documents it contains. The model must be
provided instead of the name to use the access conditions

Expand All @@ -183,9 +189,7 @@ async def delete_index(self, index: Union[str, SearchIndex], **kwargs: Any) -> N
:caption: Delete an index.
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map, access_condition = get_access_conditions(
index, kwargs.pop("match_condition", MatchConditions.Unconditionally)
)
error_map, access_condition = get_access_conditions(index, match_condition)
kwargs.update(access_condition)
try:
index_name = index.name # type: ignore
Expand Down Expand Up @@ -378,8 +382,8 @@ async def delete_synonym_map(
the SynonymMap model must be provided instead of the name. It is enough to provide
the name of the synonym map to delete unconditionally.

:param name: The synonym map name or object to delete
:type name: str or ~azure.search.documents.indexes.models.SynonymMap
:param synonym_map: The synonym map name or object to delete
:type synonym_map: str or ~azure.search.documents.indexes.models.SynonymMap
:keyword match_condition: The match condition to use upon the etag
:paramtype match_condition: ~azure.core.MatchConditions

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,12 +107,16 @@ async def create_or_update_indexer(
indexer: SearchIndexer,
*,
match_condition: MatchConditions = MatchConditions.Unconditionally,
skip_indexer_reset_requirement_for_cache: Optional[bool] = None,
disable_cache_reprocessing_change_detection: Optional[bool] = None,
**kwargs: Any
) -> SearchIndexer:
"""Creates a new indexer or updates a indexer if it already exists.

:param indexer: The definition of the indexer to create or update.
:type indexer: ~azure.search.documents.indexes.models.SearchIndexer
:keyword match_condition: The match condition to use upon the etag
:paramtype match_condition: ~azure.core.MatchConditions
:keyword skip_indexer_reset_requirement_for_cache: Ignores cache reset requirements.
:paramtype skip_indexer_reset_requirement_for_cache: bool
:keyword disable_cache_reprocessing_change_detection: Disables cache reprocessing change
Expand All @@ -126,7 +130,13 @@ async def create_or_update_indexer(
kwargs.update(access_condition)
name = indexer.name
result = await self._client.indexers.create_or_update(
indexer_name=name, indexer=indexer, prefer="return=representation", error_map=error_map, **kwargs
indexer_name=name,
indexer=indexer,
prefer="return=representation",
error_map=error_map,
skip_indexer_reset_requirement_for_cache=skip_indexer_reset_requirement_for_cache,
disable_cache_reprocessing_change_detection=disable_cache_reprocessing_change_detection,
**kwargs
)
return result

Expand Down Expand Up @@ -203,8 +213,8 @@ async def delete_indexer(
must be provided instead of the name. It is enough to provide
the name of the indexer to delete unconditionally.

:param name: The name or the indexer object to delete.
:type name: str or ~azure.search.documents.indexes.models.SearchIndexer
:param indexer: The name or the indexer object to delete.
:type indexer: str or ~azure.search.documents.indexes.models.SearchIndexer
:keyword match_condition: The match condition to use upon the etag
:paramtype match_condition: ~azure.core.MatchConditions

Expand Down Expand Up @@ -266,7 +276,12 @@ async def reset_indexer(self, name: str, **kwargs: Any) -> None:

@distributed_trace_async
async def reset_documents(
self, indexer: Union[str, SearchIndexer], keys_or_ids: DocumentKeysOrIds, **kwargs: Any
self,
indexer: Union[str, SearchIndexer],
keys_or_ids: DocumentKeysOrIds,
*,
overwrite: bool = False,
**kwargs: Any
) -> None:
"""Resets specific documents in the datasource to be selectively re-ingested by the indexer.

Expand All @@ -287,7 +302,7 @@ async def reset_documents(
name = indexer.name # type: ignore
except AttributeError:
name = indexer
await self._client.indexers.reset_docs(name, **kwargs)
await self._client.indexers.reset_docs(name, overwrite=overwrite, **kwargs)
return

@distributed_trace_async
Expand Down Expand Up @@ -343,6 +358,7 @@ async def create_or_update_data_source_connection(
data_source_connection: SearchIndexerDataSourceConnection,
*,
match_condition: MatchConditions = MatchConditions.Unconditionally,
skip_indexer_reset_requirement_for_cache: Optional[bool] = None,
**kwargs: Any
) -> SearchIndexerDataSourceConnection:
"""Creates a new data source connection or updates a data source connection if it already exists.
Expand All @@ -369,6 +385,7 @@ async def create_or_update_data_source_connection(
data_source=packed_data_source,
prefer="return=representation",
error_map=error_map,
skip_indexer_reset_requirement_for_cache=skip_indexer_reset_requirement_for_cache,
**kwargs
)
return SearchIndexerDataSourceConnection._from_generated(result)
Expand Down Expand Up @@ -570,6 +587,8 @@ async def create_or_update_skillset(
skillset: SearchIndexerSkillset,
*,
match_condition: MatchConditions = MatchConditions.Unconditionally,
skip_indexer_reset_requirement_for_cache: Optional[bool] = None,
disable_cache_reprocessing_change_detection: Optional[bool] = None,
**kwargs: Any
) -> SearchIndexerSkillset:
# pylint:disable=protected-access
Expand Down Expand Up @@ -599,6 +618,8 @@ async def create_or_update_skillset(
skillset=skillset_gen,
prefer="return=representation",
error_map=error_map,
skip_indexer_reset_requirement_for_cache=skip_indexer_reset_requirement_for_cache,
disable_cache_reprocessing_change_detection=disable_cache_reprocessing_change_detection,
**kwargs
)
return SearchIndexerSkillset._from_generated(result) # pylint:disable=protected-access
Expand Down
Loading
Loading