diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c0dbff6..e9d8d47c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Deprecated ### Removed - Removed redundant dependency on six ([#781](https://github.com/opensearch-project/opensearch-py/pull/781)) +- Removed redundant dependency on mock and upgrade Python syntax ([#785](https://github.com/opensearch-project/opensearch-py/pull/785)) ### Fixed - Fixed Search helper to ensure proper retention of the _collapse attribute in chained operations. ([#771](https://github.com/opensearch-project/opensearch-py/pull/771)) ### Updated APIs diff --git a/dev-requirements.txt b/dev-requirements.txt index 979bc04a..6af5b651 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -2,7 +2,6 @@ requests>=2, <3 pytest pytest-cov coverage -mock sphinx<7.4 sphinx_rtd_theme jinja2 diff --git a/noxfile.py b/noxfile.py index b727b60a..a0fcc795 100644 --- a/noxfile.py +++ b/noxfile.py @@ -93,7 +93,6 @@ def lint(session: Any) -> None: "types-simplejson", "types-python-dateutil", "types-PyYAML", - "types-mock", "types-pytz", ) diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index 4cf251db..94b6d056 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -26,7 +26,6 @@ # flake8: noqa -from __future__ import absolute_import import logging import re @@ -34,9 +33,9 @@ from ._version import __versionstr__ -_major, _minor, _patch = [ +_major, _minor, _patch = ( int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore -] +) VERSION = __version__ = (_major, _minor, _patch) diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index fa2b9f65..b78952ac 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -34,8 +34,6 @@ # -----------------------------------------------------------------------------------------+ -from __future__ import unicode_literals - import logging from typing import Any, Type @@ -197,7 +195,7 @@ def __init__( self, hosts: Any = None, transport_class: Type[AsyncTransport] = AsyncTransport, - **kwargs: Any + **kwargs: Any, ) -> None: """ :arg hosts: list of nodes, or a single node, we should connect to. @@ -240,10 +238,10 @@ def __repr__(self) -> Any: # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] - return "<{cls}({cons})>".format(cls=self.__class__.__name__, cons=cons) + return f"<{self.__class__.__name__}({cons})>" except Exception: # probably operating on custom transport and connection_pool, ignore - return super(AsyncOpenSearch, self).__repr__() + return super().__repr__() async def __aenter__(self) -> Any: if hasattr(self.transport, "_async_call"): diff --git a/opensearchpy/_async/client/client.py b/opensearchpy/_async/client/client.py index 091bb5e9..40807360 100644 --- a/opensearchpy/_async/client/client.py +++ b/opensearchpy/_async/client/client.py @@ -13,7 +13,7 @@ from opensearchpy.transport import Transport -class Client(object): +class Client: """ A generic async OpenSearch client. """ diff --git a/opensearchpy/_async/client/http.py b/opensearchpy/_async/client/http.py index 63d4df27..b0125d9c 100644 --- a/opensearchpy/_async/client/http.py +++ b/opensearchpy/_async/client/http.py @@ -15,7 +15,7 @@ class HttpClient(NamespacedClient): def __init__(self, client: Client) -> None: - super(HttpClient, self).__init__(client) + super().__init__(client) async def get( self, diff --git a/opensearchpy/_async/client/plugins.py b/opensearchpy/_async/client/plugins.py index 5d79d20e..231abf97 100644 --- a/opensearchpy/_async/client/plugins.py +++ b/opensearchpy/_async/client/plugins.py @@ -26,7 +26,7 @@ class PluginsClient(NamespacedClient): index_management: Any def __init__(self, client: Client) -> None: - super(PluginsClient, self).__init__(client) + super().__init__(client) self.ml = MlClient(client) self.transforms = TransformsClient(client) self.rollups = RollupsClient(client) diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index dc7c5ff3..594687b1 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -117,7 +117,7 @@ def __repr__(self) -> str: return "{}({})".format( self.__class__.__name__, ", ".join( - "{}={!r}".format(key, getattr(self.meta, key)) + f"{key}={getattr(self.meta, key)!r}" for key in ("index", "id") if key in self.meta ), @@ -249,7 +249,7 @@ async def mget( raise RequestError(400, message, error_docs) if missing_docs: missing_ids = [doc["_id"] for doc in missing_docs] - message = "Documents %s not found." % ", ".join(missing_ids) + message = f"Documents {', '.join(missing_ids)} not found." raise NotFoundError(404, message, {"docs": missing_docs}) return objs diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index e7b33748..424ca531 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -18,7 +18,7 @@ from opensearchpy.helpers.utils import merge -class AsyncIndexTemplate(object): +class AsyncIndexTemplate: def __init__( self, name: Any, @@ -57,7 +57,7 @@ async def save(self, using: Any = None) -> Any: ) -class AsyncIndex(object): +class AsyncIndex: def __init__(self, name: Any, using: Any = "default") -> None: """ :arg name: name of the index diff --git a/opensearchpy/_async/helpers/update_by_query.py b/opensearchpy/_async/helpers/update_by_query.py index 7ea3b906..93a826d6 100644 --- a/opensearchpy/_async/helpers/update_by_query.py +++ b/opensearchpy/_async/helpers/update_by_query.py @@ -31,7 +31,7 @@ def __init__(self, **kwargs: Any) -> None: overridden by methods (`using`, `index` and `doc_type` respectively). """ - super(AsyncUpdateByQuery, self).__init__(**kwargs) + super().__init__(**kwargs) self._response_class = UpdateByQueryResponse self._script: Any = {} self._query_proxy = QueryProxy(self, "query") @@ -70,7 +70,7 @@ def _clone(self) -> Any: of all the underlying objects. Used internally by most state modifying APIs. """ - ubq = super(AsyncUpdateByQuery, self)._clone() + ubq = super()._clone() ubq._response_class = self._response_class ubq._script = self._script.copy() diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index c49fd574..4c383914 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -93,7 +93,7 @@ def __init__( opaque_id: Optional[str] = None, loop: Any = None, trust_env: Optional[bool] = False, - **kwargs: Any + **kwargs: Any, ) -> None: """ Default connection class for ``AsyncOpenSearch`` using the `aiohttp` library and the http protocol. @@ -140,7 +140,7 @@ def __init__( headers=headers, http_compress=http_compress, opaque_id=opaque_id, - **kwargs + **kwargs, ) if http_auth is not None: @@ -276,7 +276,7 @@ async def perform_request( else: url = self.url_prefix + url if query_string: - url = "%s?%s" % (url, query_string) + url = f"{url}?{query_string}" url = self.host + url timeout = aiohttp.ClientTimeout( diff --git a/opensearchpy/_async/transport.py b/opensearchpy/_async/transport.py index 69db28d9..bc9e257f 100644 --- a/opensearchpy/_async/transport.py +++ b/opensearchpy/_async/transport.py @@ -121,7 +121,7 @@ def __init__( self._async_init_called = False self._sniff_on_start_event: Optional[asyncio.Event] = None - super(AsyncTransport, self).__init__( + super().__init__( hosts=[], connection_class=connection_class, connection_pool_class=connection_pool_class, diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 37d69b54..07558cc5 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -34,8 +34,6 @@ # -----------------------------------------------------------------------------------------+ -from __future__ import unicode_literals - import logging from typing import Any, Type @@ -197,7 +195,7 @@ def __init__( self, hosts: Any = None, transport_class: Type[Transport] = Transport, - **kwargs: Any + **kwargs: Any, ) -> None: """ :arg hosts: list of nodes, or a single node, we should connect to. @@ -240,10 +238,10 @@ def __repr__(self) -> Any: # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] - return "<{cls}({cons})>".format(cls=self.__class__.__name__, cons=cons) + return f"<{self.__class__.__name__}({cons})>" except Exception: # probably operating on custom transport and connection_pool, ignore - return super(OpenSearch, self).__repr__() + return super().__repr__() def __enter__(self) -> Any: if hasattr(self.transport, "_async_call"): diff --git a/opensearchpy/client/client.py b/opensearchpy/client/client.py index 091bb5e9..40807360 100644 --- a/opensearchpy/client/client.py +++ b/opensearchpy/client/client.py @@ -13,7 +13,7 @@ from opensearchpy.transport import Transport -class Client(object): +class Client: """ A generic async OpenSearch client. """ diff --git a/opensearchpy/client/http.py b/opensearchpy/client/http.py index 465a724c..b9b80085 100644 --- a/opensearchpy/client/http.py +++ b/opensearchpy/client/http.py @@ -15,7 +15,7 @@ class HttpClient(NamespacedClient): def __init__(self, client: Client) -> None: - super(HttpClient, self).__init__(client) + super().__init__(client) def get( self, diff --git a/opensearchpy/client/plugins.py b/opensearchpy/client/plugins.py index 5d79d20e..231abf97 100644 --- a/opensearchpy/client/plugins.py +++ b/opensearchpy/client/plugins.py @@ -26,7 +26,7 @@ class PluginsClient(NamespacedClient): index_management: Any def __init__(self, client: Client) -> None: - super(PluginsClient, self).__init__(client) + super().__init__(client) self.ml = MlClient(client) self.transforms = TransformsClient(client) self.rollups = RollupsClient(client) diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index a38c30cd..d7fd5c5a 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -25,8 +25,6 @@ # under the License. -from __future__ import unicode_literals - import base64 import weakref from datetime import date, datetime @@ -59,7 +57,7 @@ def _normalize_hosts(hosts: Any) -> Any: for host in hosts: if isinstance(host, string_types): if "://" not in host: - host = "//%s" % host # type: ignore + host = f"//{host}" # type: ignore parsed_url = urlparse(host) h = {"host": parsed_url.hostname} @@ -72,7 +70,7 @@ def _normalize_hosts(hosts: Any) -> Any: h["use_ssl"] = True if parsed_url.username or parsed_url.password: - h["http_auth"] = "%s:%s" % ( + h["http_auth"] = "{}:{}".format( unquote(parsed_url.username), unquote(parsed_url.password), ) @@ -160,11 +158,9 @@ def _wrapped(*args: Any, **kwargs: Any) -> Any: "Only one of 'http_auth' and 'api_key' may be passed at a time" ) elif http_auth is not None: - headers["authorization"] = "Basic %s" % ( - _base64_auth_header(http_auth), - ) + headers["authorization"] = f"Basic {_base64_auth_header(http_auth)}" elif api_key is not None: - headers["authorization"] = "ApiKey %s" % (_base64_auth_header(api_key),) + headers["authorization"] = f"ApiKey {_base64_auth_header(api_key)}" # don't escape ignore, request_timeout, or timeout for p in ("ignore", "request_timeout", "timeout"): @@ -209,7 +205,7 @@ def _base64_auth_header(auth_value: Any) -> str: return to_str(auth_value) -class NamespacedClient(object): +class NamespacedClient: def __init__(self, client: Any) -> None: self.client = client diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index dfba0101..2d39a9ca 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -68,7 +68,7 @@ async def remove_connection(self, alias: str) -> None: errors += 1 if errors == 2: - raise KeyError("There is no connection with alias %r." % alias) + raise KeyError(f"There is no connection with alias {alias!r}.") async def create_connection(self, alias: str = "default", **kwargs: Any) -> Any: """ @@ -104,7 +104,7 @@ async def get_connection(self, alias: str = "default") -> Any: return await self.create_connection(alias, **self._kwargs[alias]) except KeyError: # no connection and no kwargs to set one up - raise KeyError("There is no connection with alias %r." % alias) + raise KeyError(f"There is no connection with alias {alias!r}.") async_connections = AsyncConnections() diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index 845598a6..b9b398a5 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -53,7 +53,7 @@ _WARNING_RE = re.compile(r"\"([^\"]*)\"") -class Connection(object): +class Connection: """ Class responsible for maintaining a connection to an OpenSearch node. It holds persistent connection pool to it and its main interface @@ -81,7 +81,7 @@ def __init__( headers: Optional[Dict[str, str]] = None, http_compress: Optional[bool] = None, opaque_id: Optional[str] = None, - **kwargs: Any + **kwargs: Any, ) -> None: if port is None: port = 9200 @@ -119,27 +119,27 @@ def __init__( self.hostname = host self.port = port if ":" in host: # IPv6 - self.host = "%s://[%s]" % (scheme, host) + self.host = f"{scheme}://[{host}]" else: - self.host = "%s://%s" % (scheme, host) + self.host = f"{scheme}://{host}" if self.port is not None: - self.host += ":%s" % self.port + self.host += f":{self.port}" if url_prefix: url_prefix = "/" + url_prefix.strip("/") self.url_prefix = url_prefix self.timeout = timeout def __repr__(self) -> str: - return "<%s: %s>" % (self.__class__.__name__, self.host) + return f"<{self.__class__.__name__}: {self.host}>" def __eq__(self, other: object) -> bool: if not isinstance(other, Connection): - raise TypeError("Unsupported equality check for %s and %s" % (self, other)) + raise TypeError(f"Unsupported equality check for {self} and {other}") return self.__hash__() == other.__hash__() def __lt__(self, other: object) -> bool: if not isinstance(other, Connection): - raise TypeError("Unsupported lt check for %s and %s" % (self, other)) + raise TypeError(f"Unsupported lt check for {self} and {other}") return self.__hash__() < other.__hash__() def __hash__(self) -> int: @@ -317,7 +317,7 @@ def _raise_error( ) def _get_default_user_agent(self) -> str: - return "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()) + return f"opensearch-py/{__versionstr__} (Python {python_version()})" @staticmethod def default_ca_certs() -> Union[str, None]: diff --git a/opensearchpy/connection/connections.py b/opensearchpy/connection/connections.py index e4869616..1dc26bf9 100644 --- a/opensearchpy/connection/connections.py +++ b/opensearchpy/connection/connections.py @@ -82,7 +82,7 @@ def remove_connection(self, alias: str) -> None: errors += 1 if errors == 2: - raise KeyError("There is no connection with alias %r." % alias) + raise KeyError(f"There is no connection with alias {alias!r}.") def create_connection(self, alias: str = "default", **kwargs: Any) -> Any: """ @@ -118,7 +118,7 @@ def get_connection(self, alias: str = "default") -> Any: return self.create_connection(alias, **self._kwargs[alias]) except KeyError: # no connection and no kwargs to set one up - raise KeyError("There is no connection with alias %r." % alias) + raise KeyError(f"There is no connection with alias {alias!r}.") connections = Connections() diff --git a/opensearchpy/connection/http_async.py b/opensearchpy/connection/http_async.py index f5a4ec7c..ea4af477 100644 --- a/opensearchpy/connection/http_async.py +++ b/opensearchpy/connection/http_async.py @@ -52,7 +52,7 @@ def __init__( http_compress: Optional[bool] = None, opaque_id: Optional[str] = None, loop: Any = None, - **kwargs: Any + **kwargs: Any, ) -> None: self.headers = {} @@ -63,7 +63,7 @@ def __init__( headers=headers, http_compress=http_compress, opaque_id=opaque_id, - **kwargs + **kwargs, ) if http_auth is not None: @@ -186,7 +186,7 @@ async def perform_request( # then we pass a string into ClientSession.request() instead. url = self.url_prefix + url if query_string: - url = "%s?%s" % (url, query_string) + url = f"{url}?{query_string}" url = self.host + url timeout = aiohttp.ClientTimeout( diff --git a/opensearchpy/connection/http_requests.py b/opensearchpy/connection/http_requests.py index 2c173725..431992e9 100644 --- a/opensearchpy/connection/http_requests.py +++ b/opensearchpy/connection/http_requests.py @@ -92,7 +92,7 @@ def __init__( opaque_id: Any = None, pool_maxsize: Any = None, metrics: Metrics = MetricsNone(), - **kwargs: Any + **kwargs: Any, ) -> None: self.metrics = metrics if not REQUESTS_AVAILABLE: @@ -111,14 +111,14 @@ def __init__( self.session.mount("http://", pool_adapter) self.session.mount("https://", pool_adapter) - super(RequestsHttpConnection, self).__init__( + super().__init__( host=host, port=port, use_ssl=use_ssl, headers=headers, http_compress=http_compress, opaque_id=opaque_id, - **kwargs + **kwargs, ) if not self.http_compress: @@ -132,10 +132,7 @@ def __init__( http_auth = tuple(http_auth.split(":", 1)) # type: ignore self.session.auth = http_auth - self.base_url = "%s%s" % ( - self.host, - self.url_prefix, - ) + self.base_url = f"{self.host}{self.url_prefix}" self.session.verify = verify_certs if not client_key: self.session.cert = client_cert @@ -176,7 +173,7 @@ def perform_request( # type: ignore url = self.base_url + url headers = headers or {} if params: - url = "%s?%s" % (url, urlencode(params or {})) + url = f"{url}?{urlencode(params or {})}" orig_body = body if self.http_compress and body: diff --git a/opensearchpy/connection/http_urllib3.py b/opensearchpy/connection/http_urllib3.py index e3b60cf3..1c0d6243 100644 --- a/opensearchpy/connection/http_urllib3.py +++ b/opensearchpy/connection/http_urllib3.py @@ -121,20 +121,20 @@ def __init__( http_compress: Any = None, opaque_id: Any = None, metrics: Metrics = MetricsNone(), - **kwargs: Any + **kwargs: Any, ) -> None: self.metrics = metrics # Initialize headers before calling super().__init__(). self.headers = urllib3.make_headers(keep_alive=True) - super(Urllib3HttpConnection, self).__init__( + super().__init__( host=host, port=port, use_ssl=use_ssl, headers=headers, http_compress=http_compress, opaque_id=opaque_id, - **kwargs + **kwargs, ) self.http_auth = http_auth @@ -245,7 +245,7 @@ def perform_request( url = self.url_prefix + url if params: - url = "%s?%s" % (url, urlencode(params)) + url = f"{url}?{urlencode(params)}" full_url = self.host + url diff --git a/opensearchpy/connection/pooling.py b/opensearchpy/connection/pooling.py index a7a61f4b..4ba06d3c 100644 --- a/opensearchpy/connection/pooling.py +++ b/opensearchpy/connection/pooling.py @@ -47,7 +47,7 @@ class PoolingConnection(Connection): def __init__(self, *args: Any, **kwargs: Any) -> None: self._free_connections = queue.Queue() - super(PoolingConnection, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def _make_connection(self) -> Connection: raise NotImplementedError diff --git a/opensearchpy/connection_pool.py b/opensearchpy/connection_pool.py index d796aa13..854974be 100644 --- a/opensearchpy/connection_pool.py +++ b/opensearchpy/connection_pool.py @@ -38,7 +38,7 @@ logger: logging.Logger = logging.getLogger("opensearch") -class ConnectionSelector(object): +class ConnectionSelector: """ Simple class used to select a connection from a list of currently live connection instances. In init time it is passed a dictionary containing all @@ -87,7 +87,7 @@ class RoundRobinSelector(ConnectionSelector): """ def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: - super(RoundRobinSelector, self).__init__(opts) + super().__init__(opts) self.data = threading.local() def select(self, connections: Sequence[Connection]) -> Any: @@ -96,7 +96,7 @@ def select(self, connections: Sequence[Connection]) -> Any: return connections[self.data.rr] -class ConnectionPool(object): +class ConnectionPool: """ Container holding the :class:`~opensearchpy.Connection` instances, managing the selection process (via a @@ -135,7 +135,7 @@ def __init__( timeout_cutoff: int = 5, selector_class: Type[ConnectionSelector] = RoundRobinSelector, randomize_hosts: bool = True, - **kwargs: Any + **kwargs: Any, ) -> None: """ :arg connections: list of tuples containing the @@ -290,7 +290,7 @@ def close(self) -> Any: conn.close() def __repr__(self) -> str: - return "<%s: %r>" % (type(self).__name__, self.connections) + return f"<{type(self).__name__}: {self.connections!r}>" class DummyConnectionPool(ConnectionPool): diff --git a/opensearchpy/exceptions.py b/opensearchpy/exceptions.py index fdea1e2a..419feadc 100644 --- a/opensearchpy/exceptions.py +++ b/opensearchpy/exceptions.py @@ -120,7 +120,7 @@ def __str__(self) -> str: except LookupError: pass msg = ", ".join(filter(None, [str(self.status_code), repr(self.error), cause])) - return "%s(%s)" % (self.__class__.__name__, msg) + return f"{self.__class__.__name__}({msg})" class ConnectionError(TransportError): @@ -131,7 +131,7 @@ class ConnectionError(TransportError): """ def __str__(self) -> str: - return "ConnectionError(%s) caused by: %s(%s)" % ( + return "ConnectionError({}) caused by: {}({})".format( self.error, self.info.__class__.__name__, self.info, @@ -146,7 +146,7 @@ class ConnectionTimeout(ConnectionError): """A network timeout. Doesn't cause a node retry by default.""" def __str__(self) -> str: - return "ConnectionTimeout caused by - %s(%s)" % ( + return "ConnectionTimeout caused by - {}({})".format( self.info.__class__.__name__, self.info, ) diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index c7f24139..bb6c6266 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -198,7 +198,7 @@ def _process_bulk_chunk_success( yield ok, {op_type: item} if errors: - raise BulkIndexError("%i document(s) failed to index." % len(errors), errors) + raise BulkIndexError(f"{len(errors)} document(s) failed to index.", errors) def _process_bulk_chunk_error( @@ -228,7 +228,7 @@ def _process_bulk_chunk_error( # emulate standard behavior for failed actions if raise_on_error and error.status_code not in ignore_status: raise BulkIndexError( - "%i document(s) failed to index." % len(exc_errors), exc_errors + f"{len(exc_errors)} document(s) failed to index.", exc_errors ) else: for err in exc_errors: @@ -243,7 +243,7 @@ def _process_bulk_chunk( raise_on_error: bool = True, ignore_status: Any = (), *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Any: """ Send a bulk request to opensearch and process the output. @@ -269,8 +269,7 @@ def _process_bulk_chunk( ignore_status=ignore_status, raise_on_error=raise_on_error, ) - for item in gen: - yield item + yield from gen def streaming_bulk( @@ -287,7 +286,7 @@ def streaming_bulk( yield_ok: bool = True, ignore_status: Any = (), *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Any: """ Streaming bulk consumes actions from the iterable passed in and yields @@ -344,7 +343,7 @@ def streaming_bulk( raise_on_error, ignore_status, *args, - **kwargs + **kwargs, ), ): if not ok: @@ -384,7 +383,7 @@ def bulk( stats_only: bool = False, ignore_status: Any = (), *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Any: """ Helper for the :meth:`~opensearchpy.OpenSearch.bulk` api that provides @@ -445,7 +444,7 @@ def parallel_bulk( raise_on_error: bool = True, ignore_status: Any = (), *args: Any, - **kwargs: Any + **kwargs: Any, ) -> Any: """ Parallel version of the bulk helper run in multiple threads at once. @@ -474,7 +473,7 @@ def parallel_bulk( class BlockingPool(ThreadPool): def _setup_queues(self) -> None: - super(BlockingPool, self)._setup_queues() # type: ignore + super()._setup_queues() # type: ignore # The queue must be at least the size of the number of threads to # prevent hanging when inserting sentinel values during teardown. self._inqueue: Any = Queue(max(queue_size, thread_count)) @@ -493,15 +492,14 @@ def _setup_queues(self) -> None: raise_on_error, ignore_status, *args, - **kwargs + **kwargs, ) ), _chunk_actions( actions, chunk_size, max_chunk_bytes, client.transport.serializer ), ): - for item in result: - yield item + yield from result finally: pool.close() @@ -518,7 +516,7 @@ def scan( request_timeout: Optional[float] = None, clear_scroll: Optional[bool] = True, scroll_kwargs: Any = None, - **kwargs: Any + **kwargs: Any, ) -> Any: """ Simple abstraction on top of the @@ -587,8 +585,7 @@ def scan( try: while scroll_id and resp.get("hits", {}).get("hits"): - for hit in resp.get("hits", {}).get("hits", []): - yield hit + yield from resp.get("hits", {}).get("hits", []) _shards = resp.get("_shards") @@ -687,5 +684,5 @@ def _change_doc_index(hits: Any, index: Any) -> Any: target_client, _change_doc_index(docs, target_index), chunk_size=chunk_size, - **kwargs + **kwargs, ) diff --git a/opensearchpy/helpers/aggs.py b/opensearchpy/helpers/aggs.py index a9b87521..0133287f 100644 --- a/opensearchpy/helpers/aggs.py +++ b/opensearchpy/helpers/aggs.py @@ -89,7 +89,7 @@ def __contains__(self, key: Any) -> bool: return False def to_dict(self) -> Any: - d = super(Agg, self).to_dict() + d = super().to_dict() if "meta" in d[self.name]: d["meta"] = d[self.name].pop("meta") return d @@ -98,7 +98,7 @@ def result(self, search: Any, data: Any) -> Any: return AggResponse(self, search, data) -class AggBase(object): +class AggBase: _param_defs = { "aggs": {"type": "agg", "hash": True}, } @@ -151,7 +151,7 @@ def result(self: Any, search: Any, data: Any) -> Any: class Bucket(AggBase, Agg): def __init__(self, **params: Any) -> None: - super(Bucket, self).__init__(**params) + super().__init__(**params) # remember self for chaining self._base = self @@ -172,10 +172,10 @@ class Filter(Bucket): def __init__(self, filter: Any = None, **params: Any) -> None: if filter is not None: params["filter"] = filter - super(Filter, self).__init__(**params) + super().__init__(**params) def to_dict(self) -> Any: - d = super(Filter, self).to_dict() + d = super().to_dict() d[self.name].update(d[self.name].pop("filter", {})) return d diff --git a/opensearchpy/helpers/analysis.py b/opensearchpy/helpers/analysis.py index ea05b26a..5c7dd29e 100644 --- a/opensearchpy/helpers/analysis.py +++ b/opensearchpy/helpers/analysis.py @@ -38,7 +38,7 @@ def _type_shortcut( ) -> Any: if isinstance(name_or_instance, cls): if type or kwargs: - raise ValueError("%s() cannot accept parameters." % cls.__name__) + raise ValueError(f"{cls.__name__}() cannot accept parameters.") return name_or_instance if not (type or kwargs): diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index 7d3f34fa..370797a9 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -188,7 +188,7 @@ def __repr__(self) -> str: return "{}({})".format( self.__class__.__name__, ", ".join( - "{}={!r}".format(key, getattr(self.meta, key)) + f"{key}={getattr(self.meta, key)!r}" for key in ("index", "id") if key in self.meta ), @@ -310,7 +310,7 @@ def mget( raise RequestError(400, message, error_docs) if missing_docs: missing_ids = [doc["_id"] for doc in missing_docs] - message = "Documents %s not found." % ", ".join(missing_ids) + message = f"Documents {', '.join(missing_ids)} not found." raise NotFoundError(404, message, {"docs": missing_docs}) return objs diff --git a/opensearchpy/helpers/errors.py b/opensearchpy/helpers/errors.py index 7012c050..ad25d3f8 100644 --- a/opensearchpy/helpers/errors.py +++ b/opensearchpy/helpers/errors.py @@ -41,5 +41,5 @@ class ScanError(OpenSearchException): scroll_id: str def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: - super(ScanError, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.scroll_id = scroll_id diff --git a/opensearchpy/helpers/field.py b/opensearchpy/helpers/field.py index c2431324..e41aa46d 100644 --- a/opensearchpy/helpers/field.py +++ b/opensearchpy/helpers/field.py @@ -157,7 +157,7 @@ def __init__( doc_class: Any = None, dynamic: Any = None, properties: Any = None, - **kwargs: Any + **kwargs: Any, ) -> None: """ :arg document.InnerDoc doc_class: base doc class that handles mapping. @@ -281,7 +281,7 @@ def _deserialize(self, data: Any) -> Any: data = parser.parse(data) except Exception as e: raise ValidationException( - "Could not parse date from the value (%r)" % data, e + f"Could not parse date from the value ({data!r})", e ) if isinstance(data, datetime): @@ -294,7 +294,7 @@ def _deserialize(self, data: Any) -> Any: # Divide by a float to preserve milliseconds on the datetime. return datetime.utcfromtimestamp(data / 1000.0) - raise ValidationException("Could not parse date from the value (%r)" % data) + raise ValidationException(f"Could not parse date from the value ({data!r})") class Text(Field): diff --git a/opensearchpy/helpers/function.py b/opensearchpy/helpers/function.py index 21f5407b..8b7fd301 100644 --- a/opensearchpy/helpers/function.py +++ b/opensearchpy/helpers/function.py @@ -48,7 +48,7 @@ def SF(name_or_sf: Any, **params: Any) -> Any: # pylint: disable=invalid-name elif len(sf) == 1: name, params = sf.popitem() else: - raise ValueError("SF() got an unexpected fields in the dictionary: %r" % sf) + raise ValueError(f"SF() got an unexpected fields in the dictionary: {sf!r}") # boost factor special case, see https://github.com/elastic/elasticsearch/issues/6343 if not isinstance(params, collections_abc.Mapping): @@ -81,7 +81,7 @@ class ScoreFunction(DslBase): name: Optional[str] = None def to_dict(self) -> Any: - d = super(ScoreFunction, self).to_dict() + d = super().to_dict() # filter and query dicts should be at the same level as us for k in self._param_defs: if k in d[self.name]: @@ -97,7 +97,7 @@ class BoostFactor(ScoreFunction): name = "boost_factor" def to_dict(self) -> Any: - d = super(BoostFactor, self).to_dict() + d = super().to_dict() if "value" in d[self.name]: d[self.name] = d[self.name].pop("value") else: diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index 25bc0664..93b86008 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -37,7 +37,7 @@ from .utils import merge -class IndexTemplate(object): +class IndexTemplate: def __init__( self, name: Any, @@ -76,7 +76,7 @@ def save(self, using: Any = None) -> Any: ) -class Index(object): +class Index: def __init__(self, name: Any, using: Any = "default") -> None: """ :arg name: name of the index diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index 1d3b6671..c7878f60 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -261,7 +261,7 @@ def __init__(self, **kwargs: Any) -> None: for name in ScoreFunction._classes: if name in kwargs: fns.append({name: kwargs.pop(name)}) - super(FunctionScore, self).__init__(**kwargs) + super().__init__(**kwargs) # compound queries diff --git a/opensearchpy/helpers/response/__init__.py b/opensearchpy/helpers/response/__init__.py index 4858f951..3d33343c 100644 --- a/opensearchpy/helpers/response/__init__.py +++ b/opensearchpy/helpers/response/__init__.py @@ -34,7 +34,7 @@ class Response(AttrDict): def __init__(self, search: Any, response: Any, doc_class: Any = None) -> None: super(AttrDict, self).__setattr__("_search", search) super(AttrDict, self).__setattr__("_doc_class", doc_class) - super(Response, self).__init__(response) + super().__init__(response) def __iter__(self) -> Any: return iter(self.hits) @@ -43,7 +43,7 @@ def __getitem__(self, key: Any) -> Any: if isinstance(key, (slice, int)): # for slicing etc return self.hits[key] - return super(Response, self).__getitem__(key) + return super().__getitem__(key) def __nonzero__(self) -> Any: return bool(self.hits) @@ -103,14 +103,14 @@ def aggs(self) -> Any: class AggResponse(AttrDict): def __init__(self, aggs: Any, search: Any, data: Any) -> None: super(AttrDict, self).__setattr__("_meta", {"search": search, "aggs": aggs}) - super(AggResponse, self).__init__(data) + super().__init__(data) def __getitem__(self, attr_name: Any) -> Any: if attr_name in self._meta["aggs"]: # don't do self._meta['aggs'][attr_name] to avoid copying agg = self._meta["aggs"].aggs[attr_name] return agg.result(self._meta["search"], self._d_[attr_name]) - return super(AggResponse, self).__getitem__(attr_name) + return super().__getitem__(attr_name) def __iter__(self) -> Any: for name in self._meta["aggs"]: @@ -121,7 +121,7 @@ class UpdateByQueryResponse(AttrDict): def __init__(self, search: Any, response: Any, doc_class: Any = None) -> None: super(AttrDict, self).__setattr__("_search", search) super(AttrDict, self).__setattr__("_doc_class", doc_class) - super(UpdateByQueryResponse, self).__init__(response) + super().__init__(response) def success(self) -> bool: return not self.timed_out and not self.failures diff --git a/opensearchpy/helpers/response/aggs.py b/opensearchpy/helpers/response/aggs.py index f52208e6..3d2abfa4 100644 --- a/opensearchpy/helpers/response/aggs.py +++ b/opensearchpy/helpers/response/aggs.py @@ -32,14 +32,14 @@ class Bucket(AggResponse): def __init__(self, aggs: Any, search: Any, data: Any, field: Any = None) -> None: - super(Bucket, self).__init__(aggs, search, data) + super().__init__(aggs, search, data) class FieldBucket(Bucket): def __init__(self, aggs: Any, search: Any, data: Any, field: Any = None) -> None: if field: data["key"] = field.deserialize(data["key"]) - super(FieldBucket, self).__init__(aggs, search, data, field) + super().__init__(aggs, search, data, field) class BucketData(AggResponse): @@ -62,7 +62,7 @@ def __len__(self) -> int: def __getitem__(self, key: Any) -> Any: if isinstance(key, (int, slice)): return self.buckets[key] - return super(BucketData, self).__getitem__(key) + return super().__getitem__(key) @property def buckets(self) -> Any: @@ -88,7 +88,7 @@ def __init__(self, agg: Any, search: Any, data: Any) -> None: super(AttrDict, self).__setattr__( "meta", AttrDict({"agg": agg, "search": search}) ) - super(TopHitsData, self).__init__(search, data) + super().__init__(search, data) __all__ = ["AggResponse"] diff --git a/opensearchpy/helpers/response/hit.py b/opensearchpy/helpers/response/hit.py index 319a886f..35ef2bad 100644 --- a/opensearchpy/helpers/response/hit.py +++ b/opensearchpy/helpers/response/hit.py @@ -37,28 +37,28 @@ def __init__(self, document: Any) -> None: if "fields" in document: data.update(document["fields"]) - super(Hit, self).__init__(data) + super().__init__(data) # assign meta as attribute and not as key in self._d_ super(AttrDict, self).__setattr__("meta", HitMeta(document)) def __getstate__(self) -> Any: # add self.meta since it is not in self.__dict__ - return super(Hit, self).__getstate__() + (self.meta,) + return super().__getstate__() + (self.meta,) def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("meta", state[-1]) - super(Hit, self).__setstate__(state[:-1]) + super().__setstate__(state[:-1]) def __dir__(self) -> Any: # be sure to expose meta in dir(self) - return super(Hit, self).__dir__() + ["meta"] + return super().__dir__() + ["meta"] def __repr__(self) -> str: return "".format( "/".join( getattr(self.meta, key) for key in ("index", "id") if key in self.meta ), - super(Hit, self).__repr__(), + super().__repr__(), ) diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index 95d00836..cc11e598 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -96,7 +96,7 @@ class ProxyDescriptor: """ def __init__(self, name: str) -> None: - self._attr_name = "_%s_proxy" % name + self._attr_name = f"_{name}_proxy" def __get__(self, instance: Any, owner: Any) -> Any: return getattr(instance, self._attr_name) diff --git a/opensearchpy/helpers/signer.py b/opensearchpy/helpers/signer.py index a497ed5b..b435d276 100644 --- a/opensearchpy/helpers/signer.py +++ b/opensearchpy/helpers/signer.py @@ -116,9 +116,9 @@ def _fetch_url(self, prepared_request): # type: ignore ) # fetch the host information from headers - headers = dict( - (key.lower(), value) for key, value in prepared_request.headers.items() - ) + headers = { + key.lower(): value for key, value in prepared_request.headers.items() + } location = headers.get("host") or url.netloc # construct the url and return diff --git a/opensearchpy/helpers/update_by_query.py b/opensearchpy/helpers/update_by_query.py index 5eac0f51..9ba48f70 100644 --- a/opensearchpy/helpers/update_by_query.py +++ b/opensearchpy/helpers/update_by_query.py @@ -49,7 +49,7 @@ def __init__(self, **kwargs: Any) -> None: overridden by methods (`using`, `index` and `doc_type` respectively). """ - super(UpdateByQuery, self).__init__(**kwargs) + super().__init__(**kwargs) self._response_class = UpdateByQueryResponse self._script: Any = {} self._query_proxy = QueryProxy(self, "query") @@ -88,7 +88,7 @@ def _clone(self) -> Any: of all the underlying objects. Used internally by most state modifying APIs. """ - ubq = super(UpdateByQuery, self)._clone() + ubq = super()._clone() ubq._response_class = self._response_class ubq._script = self._script.copy() diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index deec1ccc..5d9108a4 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -163,9 +163,7 @@ def __getattr__(self, attr_name: Any) -> Any: return self.__getitem__(attr_name) except KeyError: raise AttributeError( - "{!r} object has no attribute {!r}".format( - self.__class__.__name__, attr_name - ) + f"{self.__class__.__name__!r} object has no attribute {attr_name!r}" ) def get(self, key: Any, default: Any = None) -> Any: @@ -181,9 +179,7 @@ def __delattr__(self, attr_name: Any) -> None: del self._d_[attr_name] except KeyError: raise AttributeError( - "{!r} object has no attribute {!r}".format( - self.__class__.__name__, attr_name - ) + f"{self.__class__.__name__!r} object has no attribute {attr_name!r}" ) def __getitem__(self, key: Any) -> Any: @@ -245,7 +241,7 @@ def get_dsl_type(cls, name: Any) -> Any: try: return cls._types[name] except KeyError: - raise UnknownDslObject("DSL type %s does not exist." % name) + raise UnknownDslObject(f"DSL type {name} does not exist.") class DslBase(metaclass=DslMeta): @@ -275,7 +271,7 @@ def get_dsl_class(cls: Any, name: Any, default: Optional[bool] = None) -> Any: if default is not None: return cls._classes[default] raise UnknownDslObject( - "DSL class `{}` does not exist in {}.".format(name, cls._type_name) + f"DSL class `{name}` does not exist in {cls._type_name}." ) def __init__(self, _expand__to_dot: Any = EXPAND__TO_DOT, **params: Any) -> None: @@ -288,14 +284,14 @@ def __init__(self, _expand__to_dot: Any = EXPAND__TO_DOT, **params: Any) -> None def _repr_params(self) -> str: """Produce a repr of all our parameters to be used in __repr__.""" return ", ".join( - "{}={!r}".format(n.replace(".", "__"), v) + f"{n.replace('.', '__')}={v!r}" for (n, v) in sorted(self._params.items()) # make sure we don't include empty typed params if "type" not in self._param_defs.get(n, {}) or v ) def __repr__(self) -> str: - return "{}({})".format(self.__class__.__name__, self._repr_params()) + return f"{self.__class__.__name__}({self._repr_params()})" def __eq__(self, other: Any) -> bool: return isinstance(other, self.__class__) and other.to_dict() == self.to_dict() @@ -341,9 +337,7 @@ def _setattr(self, name: Any, value: Any) -> None: def __getattr__(self, name: str) -> Any: if name.startswith("_"): raise AttributeError( - "{!r} object has no attribute {!r}".format( - self.__class__.__name__, name - ) + f"{self.__class__.__name__!r} object has no attribute {name!r}" ) value = None @@ -360,9 +354,7 @@ def __getattr__(self, name: str) -> Any: value = self._params.setdefault(name, {}) if value is None: raise AttributeError( - "{!r} object has no attribute {!r}".format( - self.__class__.__name__, name - ) + f"{self.__class__.__name__!r} object has no attribute {name!r}" ) # wrap nested dicts in AttrDict for convenient access @@ -567,16 +559,12 @@ def merge(data: Any, new_data: Any, raise_on_conflict: bool = False) -> None: and isinstance(new_data, (AttrDict, collections_abc.Mapping)) ): raise ValueError( - "You can only merge two dicts! Got {!r} and {!r} instead.".format( - data, new_data - ) + f"You can only merge two dicts! Got {data!r} and {new_data!r} instead." ) if not isinstance(new_data, Dict): raise ValueError( - "You can only merge two dicts! Got {!r} and {!r} instead.".format( - data, new_data - ) + f"You can only merge two dicts! Got {data!r} and {new_data!r} instead." ) for key, value in new_data.items(): @@ -587,7 +575,7 @@ def merge(data: Any, new_data: Any, raise_on_conflict: bool = False) -> None: ): merge(data[key], value, raise_on_conflict) elif key in data and data[key] != value and raise_on_conflict: - raise ValueError("Incompatible data for key %r, cannot be merged." % key) + raise ValueError(f"Incompatible data for key {key!r}, cannot be merged.") else: data[key] = value # type: ignore diff --git a/opensearchpy/helpers/wrappers.py b/opensearchpy/helpers/wrappers.py index aa3b2784..73fc51d9 100644 --- a/opensearchpy/helpers/wrappers.py +++ b/opensearchpy/helpers/wrappers.py @@ -47,7 +47,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: for k in data: if k not in self.OPS: - raise ValueError("Range received an unknown operator %r" % k) + raise ValueError(f"Range received an unknown operator {k!r}") if "gt" in data and "gte" in data: raise ValueError("You cannot specify both gt and gte for Range.") diff --git a/opensearchpy/serializer.py b/opensearchpy/serializer.py index d16c287e..0fd02378 100644 --- a/opensearchpy/serializer.py +++ b/opensearchpy/serializer.py @@ -45,7 +45,7 @@ TIME_TYPES = (date, datetime) -class Serializer(object): +class Serializer: mimetype: str = "" def loads(self, s: str) -> Any: @@ -65,7 +65,7 @@ def dumps(self, data: Any) -> Any: if isinstance(data, string_types): return data - raise SerializationError("Cannot serialize %r into text." % data) + raise SerializationError(f"Cannot serialize {data!r} into text.") class JSONSerializer(Serializer): @@ -140,7 +140,7 @@ def default(self, data: Any) -> Any: except ImportError: pass - raise TypeError("Unable to serialize %r (type: %s)" % (data, type(data))) + raise TypeError(f"Unable to serialize {data!r} (type: {type(data)})") def loads(self, s: str) -> Any: try: @@ -167,7 +167,7 @@ def dumps(self, data: Any) -> Any: } -class Deserializer(object): +class Deserializer: def __init__( self, serializers: Dict[str, Serializer], @@ -177,7 +177,7 @@ def __init__( self.default = serializers[default_mimetype] except KeyError: raise ImproperlyConfigured( - "Cannot find default serializer (%s)" % default_mimetype + f"Cannot find default serializer ({default_mimetype})" ) self.serializers = serializers @@ -196,7 +196,7 @@ def loads(self, s: str, mimetype: Optional[str] = None) -> Any: deserializer = self.serializers[mimetype] except KeyError: raise SerializationError( - "Unknown mimetype, unable to deserialize: %s" % mimetype + f"Unknown mimetype, unable to deserialize: {mimetype}" ) return deserializer.loads(s) @@ -208,7 +208,7 @@ def default(self, data: Any) -> Any: return data._l_ if hasattr(data, "to_dict"): return data.to_dict() - return super(AttrJSONSerializer, self).default(data) + return super().default(data) serializer = AttrJSONSerializer() diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index 5c7e6297..2385b2b8 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -64,7 +64,7 @@ def get_host_info( return host -class Transport(object): +class Transport: """ Encapsulation of transport-related to logic. Handles instantiation of the individual connections as well as creating a connection pool to hold them. diff --git a/setup.py b/setup.py index 9af1488c..d60a3341 100644 --- a/setup.py +++ b/setup.py @@ -64,7 +64,6 @@ tests_require = [ "requests>=2.0.0, <3.0.0", "coverage<8.0.0", - "mock", "pyyaml", "pytest>=3.0.0", "pytest-cov", diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index a16393d0..c08fdc55 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -31,8 +31,6 @@ # under the License. -from __future__ import print_function - import subprocess import sys from os import environ @@ -78,10 +76,10 @@ def fetch_opensearch_repo() -> None: # no test directory if not exists(repo_path): - subprocess.check_call("mkdir %s" % repo_path, shell=True) + subprocess.check_call(f"mkdir {repo_path}", shell=True) # make a new blank repository in the test directory - subprocess.check_call("cd %s && git init" % repo_path, shell=True) + subprocess.check_call(f"cd {repo_path} && git init", shell=True) try: # add a remote @@ -104,7 +102,7 @@ def fetch_opensearch_repo() -> None: # fetch the sha commit, version from info() print("Fetching opensearch repo...") - subprocess.check_call("cd %s && git fetch origin %s" % (repo_path, sha), shell=True) + subprocess.check_call(f"cd {repo_path} && git fetch origin {sha}", shell=True) def run_all(argv: Any = None) -> None: @@ -136,18 +134,18 @@ def run_all(argv: Any = None) -> None: argv = [ "pytest", "--cov=opensearchpy", - "--junitxml=%s" % junit_xml, + f"--junitxml={junit_xml}", "--log-level=DEBUG", "--cache-clear", "-vv", - "--cov-report=xml:%s" % codecov_xml, + f"--cov-report=xml:{codecov_xml}", ] if ( "OPENSEARCHPY_GEN_HTML_COV" in environ and environ.get("OPENSEARCHPY_GEN_HTML_COV") == "true" ): codecov_html = join(abspath(dirname(dirname(__file__))), "junit", "html") - argv.append("--cov-report=html:%s" % codecov_html) + argv.append(f"--cov-report=html:{codecov_html}") secured = False if environ.get("OPENSEARCH_URL", "").startswith("https://"): @@ -156,7 +154,7 @@ def run_all(argv: Any = None) -> None: # check TEST_PATTERN env var for specific test to run test_pattern = environ.get("TEST_PATTERN") if test_pattern: - argv.append("-k %s" % test_pattern) + argv.append(f"-k {test_pattern}") else: ignores = [ "test_opensearchpy/test_server/", @@ -196,7 +194,7 @@ def run_all(argv: Any = None) -> None: ) if ignores: - argv.extend(["--ignore=%s" % ignore for ignore in ignores]) + argv.extend([f"--ignore={ignore}" for ignore in ignores]) # Not in CI, run all tests specified. else: diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 662cf146..5c8f6e26 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -32,11 +32,11 @@ import warnings from platform import python_version from typing import Any +from unittest.mock import MagicMock, patch import aiohttp import pytest from _pytest.mark.structures import MarkDecorator -from mock import MagicMock, patch from multidict import CIMultiDict from pytest import raises @@ -154,7 +154,7 @@ async def test_url_prefix(self) -> None: async def test_default_user_agent(self) -> None: con = AIOHttpConnection() - assert con._get_default_user_agent() == "opensearch-py/%s (Python %s)" % ( + assert con._get_default_user_agent() == "opensearch-py/{} (Python {})".format( __versionstr__, python_version(), ) @@ -342,7 +342,7 @@ async def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) _, _, data = await con.perform_request("GET", "/") - assert u"你好\uda6a" == data # fmt: skip + assert "你好\uda6a" == data # fmt: skip @pytest.mark.parametrize("exception_cls", reraise_exceptions) # type: ignore async def test_recursion_error_reraised(self, exception_cls: Any) -> None: diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index b0d4a68b..9287c5f1 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -9,10 +9,10 @@ from typing import Any +from unittest.mock import Mock import pytest from _pytest.mark.structures import MarkDecorator -from mock import Mock from pytest import fixture from opensearchpy.connection.async_connections import add_connection, async_connections diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index 26c854c2..51fb5f30 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -7,7 +7,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from __future__ import unicode_literals import codecs import ipaddress diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index eccbe773..8fc40acd 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -73,7 +73,7 @@ async def test_cloned_index_has_analysis_attribute() -> None: client = object() i = AsyncIndex("my-index", using=client) - random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) + random_analyzer_name = "".join(choice(string.ascii_letters) for _ in range(100)) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) @@ -117,7 +117,7 @@ async def test_registered_doc_type_included_in_search() -> None: async def test_aliases_add_to_object() -> None: - random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) + random_alias = "".join(choice(string.ascii_letters) for _ in range(100)) alias_dict: Any = {random_alias: {}} index = AsyncIndex("i", using="alias") @@ -127,7 +127,7 @@ async def test_aliases_add_to_object() -> None: async def test_aliases_returned_from_to_dict() -> None: - random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) + random_alias = "".join(choice(string.ascii_letters) for _ in range(100)) alias_dict: Any = {random_alias: {}} index = AsyncIndex("i", using="alias") @@ -137,7 +137,7 @@ async def test_aliases_returned_from_to_dict() -> None: async def test_analyzers_added_to_object() -> None: - random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) + random_analyzer_name = "".join(choice(string.ascii_letters) for _ in range(100)) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) @@ -153,7 +153,7 @@ async def test_analyzers_added_to_object() -> None: async def test_analyzers_returned_from_to_dict() -> None: - random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) + random_analyzer_name = "".join(choice(string.ascii_letters) for _ in range(100)) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index 415f34cc..fe60c977 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -26,8 +26,8 @@ from typing import Any +from unittest import mock -import mock import pytest from multidict import CIMultiDict diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index d95d68f9..f663f82f 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -25,8 +25,6 @@ # under the License. -from __future__ import unicode_literals - from typing import Any import pytest diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 2ea6afe9..9f37df92 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -27,9 +27,9 @@ import asyncio from typing import Any, List +from unittest.mock import MagicMock, patch import pytest -from mock import MagicMock, patch from opensearchpy import TransportError from opensearchpy._async.helpers import actions @@ -40,13 +40,13 @@ class AsyncMock(MagicMock): async def __call__(self, *args: Any, **kwargs: Any) -> Any: - return super(AsyncMock, self).__call__(*args, **kwargs) + return super().__call__(*args, **kwargs) def __await__(self) -> Any: return self().__await__() -class FailingBulkClient(object): +class FailingBulkClient: def __init__( self, client: Any, @@ -69,7 +69,7 @@ async def bulk(self, *args: Any, **kwargs: Any) -> Any: return await self.client.bulk(*args, **kwargs) -class TestStreamingBulk(object): +class TestStreamingBulk: async def test_actions_remain_unchanged(self, async_client: Any) -> None: actions1 = [{"_id": 1}, {"_id": 2}] async for ok, _ in actions.async_streaming_bulk( @@ -281,7 +281,7 @@ async def streaming_bulk() -> Any: assert 4 == failing_client._called -class TestBulk(object): +class TestBulk: async def test_bulk_works_with_single_item(self, async_client: Any) -> None: docs = [{"answer": 42, "_id": 1}] success, failed = await actions.async_bulk( @@ -453,7 +453,7 @@ async def scan_teardown(async_client: Any) -> Any: await async_client.clear_scroll(scroll_id="_all") -class TestScan(object): +class TestScan: async def test_order_can_be_preserved( self, async_client: Any, scan_teardown: Any ) -> None: @@ -492,8 +492,8 @@ async def test_all_documents_are_read( ] assert 100 == len(docs) - assert set(map(str, range(100))) == set(d["_id"] for d in docs) - assert set(range(100)) == set(d["_source"]["answer"] for d in docs) + assert set(map(str, range(100))) == {d["_id"] for d in docs} + assert set(range(100)) == {d["_source"]["answer"] for d in docs} async def test_scroll_error(self, async_client: Any, scan_teardown: Any) -> None: bulk: Any = [] @@ -824,7 +824,7 @@ async def reindex_setup(async_client: Any) -> Any: yield -class TestReindex(object): +class TestReindex: async def test_reindex_passes_kwargs_to_scan_and_bulk( self, async_client: Any, reindex_setup: Any ) -> None: diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index b0f396f2..61a70cca 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -7,7 +7,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from __future__ import unicode_literals from typing import Any, Dict diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index 6a14cb7d..8dda5c59 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -64,7 +64,7 @@ class Repository(AsyncDocument): @classmethod def search(cls, using: Any = None, index: Optional[str] = None) -> Any: - return super(Repository, cls).search().filter("term", commit_repo="repo") + return super().search().filter("term", commit_repo="repo") class Index: name = "git" diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index 70861f0b..577644c0 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -98,7 +98,7 @@ class RepoSearch(AsyncFacetedSearch): } def search(self) -> Any: - s = super(RepoSearch, self).search() + s = super().search() return s.filter("term", commit_repo="repo") return RepoSearch diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index a42daf6c..96134484 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -7,7 +7,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from __future__ import unicode_literals from typing import Any @@ -31,7 +30,7 @@ class Repository(AsyncDocument): @classmethod def search(cls, using: Any = None, index: Any = None) -> Any: - return super(Repository, cls).search().filter("term", commit_repo="repo") + return super().search().filter("term", commit_repo="repo") class Index: name = "git" diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index 4e742c2f..6fc0e178 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -8,8 +8,6 @@ # GitHub history for details. -from __future__ import unicode_literals - import unittest import pytest diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py index 0ffae69f..a4eb91d2 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py @@ -8,8 +8,6 @@ # GitHub history for details. -from __future__ import unicode_literals - import pytest from _pytest.mark.structures import MarkDecorator diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index 9e412d7d..b927fc47 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -121,7 +121,7 @@ async def run_code(self, test: Any) -> Any: if hasattr(self, "run_" + action_type): await await_if_coro(getattr(self, "run_" + action_type)(action)) else: - raise RuntimeError("Invalid action type %r" % (action_type,)) + raise RuntimeError(f"Invalid action type {action_type!r}") async def run_do(self, action: Any) -> Any: api = self.client @@ -170,7 +170,7 @@ async def run_do(self, action: Any) -> Any: else: if catch: raise AssertionError( - "Failed to catch %r in %r." % (catch, self.last_response) + f"Failed to catch {catch!r} in {self.last_response!r}." ) # Filter out warnings raised by other components. @@ -197,7 +197,7 @@ async def run_skip(self, skip: Any) -> Any: for feature in features: if feature in IMPLEMENTED_FEATURES: continue - pytest.skip("feature '%s' is not supported" % feature) + pytest.skip(f"feature '{feature}' is not supported") if "version" in skip: version, reason = skip["version"], skip["reason"] diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 9b4bacde..7b09be9a 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -8,8 +8,6 @@ # GitHub history for details. -from __future__ import unicode_literals - import os from unittest import IsolatedAsyncioTestCase diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 664f6a95..821d5ae7 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -8,10 +8,10 @@ # GitHub history for details. import uuid +from unittest.mock import Mock import pytest from _pytest.mark.structures import MarkDecorator -from mock import Mock pytestmark: MarkDecorator = pytest.mark.asyncio diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index 6efa4b6c..e3048a48 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -25,15 +25,13 @@ # under the License. -from __future__ import unicode_literals - import asyncio import json from typing import Any +from unittest.mock import patch import pytest from _pytest.mark.structures import MarkDecorator -from mock import patch from opensearchpy import AIOHttpConnection, AsyncTransport from opensearchpy.connection import Connection @@ -51,7 +49,7 @@ def __init__(self, **kwargs: Any) -> None: self.delay = kwargs.pop("delay", 0) self.calls: Any = [] self.closed = False - super(DummyConnection, self).__init__(**kwargs) + super().__init__(**kwargs) async def perform_request(self, *args: Any, **kwargs: Any) -> Any: if self.closed: @@ -253,7 +251,7 @@ async def test_kwargs_passed_on_to_connection_pool(self) -> None: assert dt is t.connection_pool.dead_timeout async def test_custom_connection_class(self) -> None: - class MyConnection(object): + class MyConnection: def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index a03f0e44..aa82a42b 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -32,7 +32,7 @@ from opensearchpy import OpenSearch -class DummyTransport(object): +class DummyTransport: def __init__( self, hosts: Sequence[str], responses: Any = None, **kwargs: Any ) -> None: @@ -59,7 +59,7 @@ def perform_request( class OpenSearchTestCase(TestCase): def setUp(self) -> None: - super(OpenSearchTestCase, self).setUp() + super().setUp() self.client: Any = OpenSearch(transport_class=DummyTransport) # type: ignore def assert_call_count_equals(self, count: int) -> None: diff --git a/test_opensearchpy/test_client/__init__.py b/test_opensearchpy/test_client/__init__.py index 55fcd4a9..be2e9632 100644 --- a/test_opensearchpy/test_client/__init__.py +++ b/test_opensearchpy/test_client/__init__.py @@ -25,8 +25,6 @@ # under the License. -from __future__ import unicode_literals - import warnings from opensearchpy.client import OpenSearch diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index efed662a..a0cde64d 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -25,8 +25,6 @@ # under the License. -from __future__ import unicode_literals - from typing import Any from opensearchpy.client.utils import _bulk_body, _escape, _make_path, query_params diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index 325f29a0..ff6fc8c2 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -30,9 +30,9 @@ import uuid import warnings from typing import Any +from unittest.mock import MagicMock, Mock, patch import pytest -from mock import MagicMock, Mock, patch from requests.auth import AuthBase from opensearchpy.connection import Connection, RequestsHttpConnection @@ -258,7 +258,7 @@ def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: "GET", "/", {"param": 42}, - "{}".encode("utf-8"), + b"{}", ) # trace request @@ -282,7 +282,7 @@ def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: "GET", "/", {"param": 42}, - """{"question": "what's that?"}""".encode("utf-8"), + b"""{"question": "what's that?"}""", ) # trace request @@ -397,7 +397,7 @@ def test_body_attached(self) -> None: self.assertEqual("http://localhost:9200/", request.url) self.assertEqual("GET", request.method) - self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) + self.assertEqual(b'{"answer": 42}', request.body) def test_http_auth_attached(self) -> None: con = self._get_mock_connection({"http_auth": "username:secret"}) @@ -414,7 +414,7 @@ def test_url_prefix(self, tracer: Any) -> None: self.assertEqual("http://localhost:9200/some-prefix/_search", request.url) self.assertEqual("GET", request.method) - self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) + self.assertEqual(b'{"answer": 42}', request.body) # trace request trace_curl_cmd = ( @@ -431,7 +431,7 @@ def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) _, _, data = con.perform_request("GET", "/") - self.assertEqual(u"你好\uda6a", data) # fmt: skip + self.assertEqual("你好\uda6a", data) # fmt: skip def test_recursion_error_reraised(self) -> None: conn = RequestsHttpConnection() diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index ef9cbc73..6661408d 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -32,10 +32,10 @@ from io import BytesIO from platform import python_version from typing import Any +from unittest.mock import MagicMock, Mock, patch import pytest import urllib3 -from mock import MagicMock, Mock, patch from urllib3._collections import HTTPHeaderDict from opensearchpy import __versionstr__ @@ -130,7 +130,7 @@ def test_default_user_agent(self) -> None: con = Urllib3HttpConnection() self.assertEqual( con._get_default_user_agent(), - "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()), + f"opensearch-py/{__versionstr__} (Python {python_version()})", ) def test_timeout_set(self) -> None: @@ -385,7 +385,7 @@ def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) _, _, data = con.perform_request("GET", "/") - self.assertEqual(u"你好\uda6a", data) # fmt: skip + self.assertEqual("你好\uda6a", data) # fmt: skip def test_recursion_error_reraised(self) -> None: conn = Urllib3HttpConnection() diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index 5fa750c6..83ed4208 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -68,9 +68,7 @@ def test_disable_shuffling(self) -> None: def test_selectors_have_access_to_connection_opts(self) -> None: class MySelector(RoundRobinSelector): def select(self, connections: Any) -> Any: - return self.connection_opts[ - super(MySelector, self).select(connections) - ]["actual"] + return self.connection_opts[super().select(connections)]["actual"] pool = ConnectionPool( [(x, {"actual": x * x}) for x in range(100)], diff --git a/test_opensearchpy/test_helpers/conftest.py b/test_opensearchpy/test_helpers/conftest.py index 06355e24..e7ef9dcb 100644 --- a/test_opensearchpy/test_helpers/conftest.py +++ b/test_opensearchpy/test_helpers/conftest.py @@ -26,8 +26,8 @@ from typing import Any +from unittest.mock import Mock -from mock import Mock from pytest import fixture from opensearchpy.connection.connections import add_connection, connections diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index c0d07138..35d35a32 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -28,9 +28,9 @@ import threading import time from typing import Any +from unittest import mock from unittest.mock import Mock -import mock import pytest from opensearchpy import OpenSearch, helpers @@ -131,7 +131,7 @@ def test_chunk_sent_from_different_threads(self, _process_bulk_chunk: Any) -> No results = list( helpers.parallel_bulk(OpenSearch(), actions, thread_count=10, chunk_size=2) ) - self.assertTrue(len(set([r[1] for r in results])) > 1) + self.assertTrue(len({r[1] for r in results}) > 1) class TestChunkActions(TestCase): @@ -266,7 +266,7 @@ def test_chunks_are_chopped_by_byte_size_properly(self) -> None: ) self.assertEqual(25, len(chunks)) for _, chunk_actions in chunks: - chunk = u"".join(chunk_actions) # fmt: skip + chunk = "".join(chunk_actions) # fmt: skip chunk = chunk if isinstance(chunk, str) else chunk.encode("utf-8") self.assertLessEqual(len(chunk), max_byte_size) diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index d60dd6b2..b261e54e 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -24,7 +24,6 @@ # specific language governing permissions and limitations # under the License. -from __future__ import unicode_literals import codecs import ipaddress diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index eac720b4..41469a52 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -84,7 +84,7 @@ def test_cloned_index_has_analysis_attribute() -> None: client = object() i: Any = Index("my-index", using=client) - random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) + random_analyzer_name = "".join(choice(string.ascii_letters) for _ in range(100)) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) @@ -128,7 +128,7 @@ def test_registered_doc_type_included_in_search() -> None: def test_aliases_add_to_object() -> None: - random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) + random_alias = "".join(choice(string.ascii_letters) for _ in range(100)) alias_dict: Any = {random_alias: {}} index: Any = Index("i", using="alias") @@ -138,7 +138,7 @@ def test_aliases_add_to_object() -> None: def test_aliases_returned_from_to_dict() -> None: - random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) + random_alias = "".join(choice(string.ascii_letters) for _ in range(100)) alias_dict: Any = {random_alias: {}} index: Any = Index("i", using="alias") @@ -148,7 +148,7 @@ def test_aliases_returned_from_to_dict() -> None: def test_analyzers_added_to_object() -> None: - random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) + random_analyzer_name = "".join(choice(string.ascii_letters) for _ in range(100)) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) @@ -164,7 +164,7 @@ def test_analyzers_added_to_object() -> None: def test_analyzers_returned_from_to_dict() -> None: - random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) + random_analyzer_name = "".join(choice(string.ascii_letters) for _ in range(100)) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index fa535d57..3651aeaf 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -95,7 +95,7 @@ def test_interactive_helpers(dummy_response: Any) -> None: ) assert res - assert "" % rhits == repr(res) + assert f"" == repr(res) assert rhits == repr(hits) assert {"meta", "city", "name"} == set(dir(h)) assert "" % dummy_response["hits"]["hits"][0][ diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index c67bcb8b..9106de0f 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -99,7 +99,7 @@ def test_serializer_deals_with_attr_versions() -> None: def test_serializer_deals_with_objects_with_to_dict() -> None: - class MyClass(object): + class MyClass: def to_dict(self) -> int: return 42 diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index 67f2fe50..ac7afe91 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -66,7 +66,7 @@ class AutoNowDate(Date): def clean(self, data: Any) -> Any: if data is None: data = datetime.now() - return super(AutoNowDate, self).clean(data) + return super().clean(data) class Log(Document): diff --git a/test_opensearchpy/test_server/conftest.py b/test_opensearchpy/test_server/conftest.py index e1f83e4b..9fc745bf 100644 --- a/test_opensearchpy/test_server/conftest.py +++ b/test_opensearchpy/test_server/conftest.py @@ -74,7 +74,7 @@ def sync_client_factory() -> Any: except ConnectionError: time.sleep(0.1) else: - pytest.skip("OpenSearch wasn't running at %r" % (OPENSEARCH_URL,)) + pytest.skip(f"OpenSearch wasn't running at {OPENSEARCH_URL!r}") wipe_cluster(client) yield client diff --git a/test_opensearchpy/test_server/test_clients.py b/test_opensearchpy/test_server/test_clients.py index a77b0f37..7639a161 100644 --- a/test_opensearchpy/test_server/test_clients.py +++ b/test_opensearchpy/test_server/test_clients.py @@ -25,8 +25,6 @@ # under the License. -from __future__ import unicode_literals - from . import OpenSearchTestCase diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index 4d4bd893..5d8786ed 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -26,8 +26,7 @@ from typing import Any - -from mock import patch +from unittest.mock import patch from opensearchpy import TransportError, helpers from opensearchpy.helpers import ScanError @@ -36,7 +35,7 @@ from .. import OpenSearchTestCase -class FailingBulkClient(object): +class FailingBulkClient: def __init__( self, client: Any, @@ -383,7 +382,7 @@ class TestScan(OpenSearchTestCase): def teardown_method(self, m: Any) -> None: self.client.transport.perform_request("DELETE", "/_search/scroll/_all") - super(TestScan, self).teardown_method(m) + super().teardown_method(m) def test_order_can_be_preserved(self) -> None: bulk: Any = [] @@ -415,8 +414,8 @@ def test_all_documents_are_read(self) -> None: docs = list(helpers.scan(self.client, index="test_index", size=2)) self.assertEqual(100, len(docs)) - self.assertEqual(set(map(str, range(100))), set(d["_id"] for d in docs)) - self.assertEqual(set(range(100)), set(d["_source"]["answer"] for d in docs)) + self.assertEqual(set(map(str, range(100))), {d["_id"] for d in docs}) + self.assertEqual(set(range(100)), {d["_source"]["answer"] for d in docs}) def test_scroll_error(self) -> None: bulk: Any = [] diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 9118ffed..d3d357ab 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -24,7 +24,6 @@ # specific language governing permissions and limitations # under the License. -from __future__ import unicode_literals from typing import Any, Dict diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index 573240a2..3c33a798 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -79,7 +79,7 @@ class Repository(Document): @classmethod def search(cls, using: Any = None, index: Any = None) -> Any: - return super(Repository, cls).search().filter("term", commit_repo="repo") + return super().search().filter("term", commit_repo="repo") class Index: name = "git" diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index a9eebc94..c45e29d6 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -106,7 +106,7 @@ class RepoSearch(FacetedSearch): } def search(self) -> Any: - s = super(RepoSearch, self).search() + s = super().search() return s.filter("term", commit_repo="repo") return RepoSearch diff --git a/test_opensearchpy/test_server/test_helpers/test_search.py b/test_opensearchpy/test_server/test_helpers/test_search.py index bbad6cc3..483f4040 100644 --- a/test_opensearchpy/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_search.py @@ -24,7 +24,6 @@ # specific language governing permissions and limitations # under the License. -from __future__ import unicode_literals from typing import Any @@ -52,7 +51,7 @@ class Repository(Document): @classmethod def search(cls, using: Any = None, index: Any = None) -> Any: - return super(Repository, cls).search().filter("term", commit_repo="repo") + return super().search().filter("term", commit_repo="repo") class Index: name = "git" diff --git a/test_opensearchpy/test_server/test_metrics.py b/test_opensearchpy/test_server/test_metrics.py index 189fc739..43d34052 100644 --- a/test_opensearchpy/test_server/test_metrics.py +++ b/test_opensearchpy/test_server/test_metrics.py @@ -7,7 +7,6 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from __future__ import unicode_literals import time diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index 6ecac372..51be201e 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -8,8 +8,6 @@ # GitHub history for details. -from __future__ import unicode_literals - import unittest from opensearchpy.helpers.test import OPENSEARCH_VERSION diff --git a/test_opensearchpy/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_server/test_plugins/test_index_management.py index 1d2b696f..6e9e9715 100644 --- a/test_opensearchpy/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_server/test_plugins/test_index_management.py @@ -8,8 +8,6 @@ # GitHub history for details. -from __future__ import unicode_literals - from opensearchpy.exceptions import NotFoundError from .. import OpenSearchTestCase diff --git a/test_opensearchpy/test_server/test_plugins/test_notification.py b/test_opensearchpy/test_server/test_plugins/test_notification.py index 6782b427..eb55dd59 100644 --- a/test_opensearchpy/test_server/test_plugins/test_notification.py +++ b/test_opensearchpy/test_server/test_plugins/test_notification.py @@ -8,8 +8,6 @@ # GitHub history for details. -from __future__ import unicode_literals - import unittest from typing import Any, Dict diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index 046b4208..e40c4e09 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -169,7 +169,7 @@ def run_code(self, test: Any) -> Any: if hasattr(self, "run_" + action_type): getattr(self, "run_" + action_type)(action) else: - raise RuntimeError("Invalid action type %r" % (action_type,)) + raise RuntimeError(f"Invalid action type {action_type!r}") def run_do(self, action: Any) -> Any: api = self.client @@ -218,7 +218,7 @@ def run_do(self, action: Any) -> Any: else: if catch: raise AssertionError( - "Failed to catch %r in %r." % (catch, self.last_response) + f"Failed to catch {catch!r} in {self.last_response!r}." ) # Filter out warnings raised by other components. @@ -248,7 +248,7 @@ def run_catch(self, catch: Any, exception: Any) -> None: elif catch[0] == "/" and catch[-1] == "/": assert ( re.search(catch[1:-1], exception.error + " " + repr(exception.info)), - "%s not in %r" % (catch, exception.info), + f"{catch} not in {exception.info!r}", ) is not None self.last_response = exception.info @@ -262,7 +262,7 @@ def run_skip(self, skip: Any) -> Any: for feature in features: if feature in IMPLEMENTED_FEATURES: continue - pytest.skip("feature '%s' is not supported" % feature) + pytest.skip(f"feature '{feature}' is not supported") if "version" in skip: version, reason = skip["version"], skip["reason"] @@ -328,10 +328,7 @@ def run_match(self, action: Any) -> None: and expected.strip().endswith("/") ): expected = re.compile(expected.strip()[1:-1], re.VERBOSE | re.MULTILINE) - assert expected.search(value), "%r does not match %r" % ( - value, - expected, - ) + assert expected.search(value), f"{value!r} does not match {expected!r}" else: self._assert_match_equals(value, expected) @@ -341,7 +338,7 @@ def run_contains(self, action: Any) -> None: expected = self._resolve(expected) # dict[str, str] if expected not in value: - raise AssertionError("%s is not contained by %s" % (expected, value)) + raise AssertionError(f"{expected} is not contained by {value}") def run_transform_and_set(self, action: Any) -> None: for key, value in action.items(): @@ -371,7 +368,7 @@ def _resolve(self, value: Any) -> Any: break if isinstance(value, dict): - value = dict((k, self._resolve(v)) for (k, v) in value.items()) + value = {k: self._resolve(v) for (k, v) in value.items()} elif isinstance(value, list): value = list(map(self._resolve, value)) return value @@ -412,7 +409,7 @@ def _assert_match_equals(self, a: Any, b: Any) -> None: if isinstance(b, string_types) and isinstance(a, float) and "e" in repr(a): a = repr(a).replace("e+", "E") - assert a == b, "%r does not match %r" % (a, b) + assert a == b, f"{a!r} does not match {b!r}" @pytest.fixture(scope="function") # type: ignore @@ -473,7 +470,7 @@ def load_rest_api_tests() -> None: for prefix in ("rest-api-spec/", "test/", "oss/"): if pytest_test_name.startswith(prefix): pytest_test_name = pytest_test_name[len(prefix) :] - pytest_param_id = "%s[%d]" % (pytest_test_name, test_number) + pytest_param_id = f"{pytest_test_name}[{test_number}]" pytest_param = { "setup": setup_steps, @@ -487,7 +484,7 @@ def load_rest_api_tests() -> None: YAML_TEST_SPECS.append(pytest.param(pytest_param, id=pytest_param_id)) except Exception as e: - warnings.warn("Could not load REST API tests: %s" % (str(e),)) + warnings.warn(f"Could not load REST API tests: {str(e)}") load_rest_api_tests() diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index 55443989..c1eb25ca 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -8,8 +8,6 @@ # GitHub history for details. -from __future__ import unicode_literals - import os from unittest import TestCase diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index 6a09b83b..ccef21df 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -25,13 +25,10 @@ # under the License. -from __future__ import unicode_literals - import json import time from typing import Any - -from mock import patch +from unittest.mock import patch from opensearchpy.connection import Connection from opensearchpy.connection_pool import DummyConnectionPool @@ -47,7 +44,7 @@ def __init__(self, **kwargs: Any) -> None: self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) self.calls: Any = [] - super(DummyConnection, self).__init__(**kwargs) + super().__init__(**kwargs) def perform_request(self, *args: Any, **kwargs: Any) -> Any: self.calls.append((args, kwargs)) diff --git a/utils/build_dists.py b/utils/build_dists.py index e1cd6ed1..c1145958 100644 --- a/utils/build_dists.py +++ b/utils/build_dists.py @@ -69,7 +69,7 @@ def run(*argv: Any, expect_exit_code: int = 0) -> None: else: os.chdir(TMP_DIR) - cmd = " ".join(shlex.quote(x) for x in argv) + cmd = shlex.join(argv) print("$ " + cmd) exit_code = os.system(cmd) if exit_code != expect_exit_code: @@ -270,7 +270,7 @@ def main() -> None: # Rename the module to fit the suffix. shutil.move( os.path.join(BASE_DIR, "opensearchpy"), - os.path.join(BASE_DIR, "opensearchpy%s" % suffix), + os.path.join(BASE_DIR, f"opensearchpy{suffix}"), ) # Ensure that the version within 'opensearchpy/_version.py' is correct. @@ -279,7 +279,7 @@ def main() -> None: version_data = file.read() version_data = re.sub( r"__versionstr__: str = \"[^\"]+\"", - '__versionstr__: str = "%s"' % version, + f'__versionstr__: str = "{version}"', version_data, ) with open(version_path, "w", encoding="utf-8") as file: @@ -296,7 +296,7 @@ def main() -> None: file.write( setup_py.replace( 'PACKAGE_NAME = "opensearch-py"', - 'PACKAGE_NAME = "opensearch-py%s"' % suffix, + f'PACKAGE_NAME = "opensearch-py{suffix}"', ) ) @@ -306,7 +306,7 @@ def main() -> None: # Clean up everything. run("git", "checkout", "--", "setup.py", "opensearchpy/") if suffix: - run("rm", "-rf", "opensearchpy%s/" % suffix) + run("rm", "-rf", f"opensearchpy{suffix}/") # Test everything that got created dists = os.listdir(os.path.join(BASE_DIR, "dist")) diff --git a/utils/generate_api.py b/utils/generate_api.py index 9bb8674c..dd9ca3f8 100644 --- a/utils/generate_api.py +++ b/utils/generate_api.py @@ -95,7 +95,7 @@ def blacken(filename: Any) -> None: assert result.exit_code == 0, result.output -@lru_cache() +@lru_cache def is_valid_url(url: str) -> bool: """ makes a call to the url @@ -222,7 +222,7 @@ def dump(self) -> None: # Identifying the insertion point for the "THIS CODE IS AUTOMATICALLY GENERATED" header. if os.path.exists(self.filepath): - with open(self.filepath, "r", encoding="utf-8") as file: + with open(self.filepath, encoding="utf-8") as file: content = file.read() if header_separator in content: update_header = False @@ -249,7 +249,7 @@ def dump(self) -> None: generated_file_header_path = os.path.join( current_script_folder, "generated_file_headers.txt" ) - with open(generated_file_header_path, "r", encoding="utf-8") as header_file: + with open(generated_file_header_path, encoding="utf-8") as header_file: header_content = header_file.read() # Imports are temporarily removed from the header and are regenerated @@ -287,7 +287,7 @@ def dump(self) -> None: # Generating imports for each module utils_imports = "" file_content = "" - with open(self.filepath, "r", encoding="utf-8") as file: + with open(self.filepath, encoding="utf-8") as file: content = file.read() keywords = [ "SKIP_IN_PATH", diff --git a/utils/license_headers.py b/utils/license_headers.py index 86582c9d..1092a43e 100644 --- a/utils/license_headers.py +++ b/utils/license_headers.py @@ -55,7 +55,7 @@ def does_file_need_fix(filepath: str) -> bool: if not re.search(r"\.py$", filepath): return False existing_header = "" - with open(filepath, mode="r", encoding="utf-8") as file: + with open(filepath, encoding="utf-8") as file: for line in file: line = line.strip() if len(line) == 0 or line in LINES_TO_KEEP: @@ -73,7 +73,7 @@ def add_header_to_file(filepath: str) -> None: writes the license header to the beginning of a file :param filepath: relative or absolute filepath to update """ - with open(filepath, mode="r", encoding="utf-8") as file: + with open(filepath, encoding="utf-8") as file: lines = list(file) i = 0 for i, line in enumerate(lines):