From d7f12028ad5de19ec0b63c35818bd7ed8fc2df17 Mon Sep 17 00:00:00 2001 From: Sam Date: Thu, 13 Jun 2024 12:14:13 -0500 Subject: [PATCH 1/2] Updating to support ES 8.X --- .travis.yml | 3 + Makefile | 8 +- elasticmock/__init__.py | 8 +- elasticmock/fake_cluster.py | 18 +- elasticmock/fake_elasticsearch.py | 752 ++++++++++-------- elasticmock/fake_indices.py | 87 +- elasticmock/utilities/__init__.py | 10 +- elasticmock/utilities/decorator.py | 13 + requirements.txt | 4 +- setup.py | 7 +- tests/__init__.py | 4 +- tests/fake_cluster/test_health.py | 2 +- tests/fake_elasticsearch/test_count.py | 4 +- tests/fake_elasticsearch/test_delete.py | 12 +- tests/fake_elasticsearch/test_get.py | 14 +- tests/fake_elasticsearch/test_index.py | 7 +- tests/fake_elasticsearch/test_instance.py | 4 +- tests/fake_elasticsearch/test_scroll.py | 24 +- tests/fake_elasticsearch/test_search.py | 7 +- tests/fake_elasticsearch/test_suggest.py | 61 -- .../test_transport_failure.py | 10 + tests/fake_indices/test_create.py | 6 +- tests/fake_indices/test_delete.py | 19 +- tests/fake_indices/test_exists.py | 6 +- tests/fake_indices/test_refresh.py | 6 +- tox.ini | 12 +- 26 files changed, 647 insertions(+), 461 deletions(-) delete mode 100644 tests/fake_elasticsearch/test_suggest.py create mode 100644 tests/fake_elasticsearch/test_transport_failure.py diff --git a/.travis.yml b/.travis.yml index 7ec3c19..7f1e047 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,9 @@ python: - "3.7" - "3.8" - "3.9" + - "3.10" + - "3.11" + - "3.12" install: - pip install tox-travis - pip install coveralls diff --git a/Makefile b/Makefile index 11fcd10..d95d121 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -ELASTICMOCK_VERSION='1.8.1' +ELASTICMOCK_VERSION='2.0.0' install: pip3 install -r requirements.txt @@ -7,7 +7,7 @@ test_install: install pip3 install -r requirements_test.txt test: test_install - python3.9 setup.py test + python3.12 setup.py test upload: create_dist pip3 install twine @@ -16,11 +16,11 @@ upload: create_dist create_dist: create_dist_no_commit update_pip rm -rf dist - python3.9 setup.py sdist + python3.12 setup.py sdist create_dist_no_commit: update_pip rm -rf dist - python3.9 setup.py sdist + python3.12 setup.py sdist create_dist_commit: git commit --all -m "Bump version ${ELASTICMOCK_VERSION}" diff --git a/elasticmock/__init__.py b/elasticmock/__init__.py index 607fa1b..f58795b 100644 --- a/elasticmock/__init__.py +++ b/elasticmock/__init__.py @@ -2,7 +2,7 @@ from functools import wraps -from elasticsearch.client import _normalize_hosts +from elasticsearch._sync.client.utils import client_node_configs from unittest.mock import patch from elasticmock.fake_elasticsearch import FakeElasticsearch @@ -11,15 +11,15 @@ def _get_elasticmock(hosts=None, *args, **kwargs): - host = _normalize_hosts(hosts)[0] + host = client_node_configs(hosts, cloud_id=None)[0] elastic_key = '{0}:{1}'.format( - host.get('host', 'localhost'), host.get('port', 9200) + host.host, host.port ) if elastic_key in ELASTIC_INSTANCES: connection = ELASTIC_INSTANCES.get(elastic_key) else: - connection = FakeElasticsearch() + connection = FakeElasticsearch(hosts=[host]) ELASTIC_INSTANCES[elastic_key] = connection return connection diff --git a/elasticmock/fake_cluster.py b/elasticmock/fake_cluster.py index 193678e..e912cf0 100644 --- a/elasticmock/fake_cluster.py +++ b/elasticmock/fake_cluster.py @@ -1,15 +1,21 @@ # -*- coding: utf-8 -*- +import typing as t +from elasticsearch._sync.client.cluster import ClusterClient +from elasticsearch._sync.client.utils import _rewrite_parameters -from elasticsearch.client.cluster import ClusterClient -from elasticsearch.client.utils import query_params + +from elastic_transport import ObjectApiResponse +from elasticmock.utilities.decorator import wrap_object_api_response class FakeClusterClient(ClusterClient): - @query_params('level', 'local', 'master_timeout', 'timeout', - 'wait_for_active_shards', 'wait_for_nodes', - 'wait_for_relocating_shards', 'wait_for_status') - def health(self, index=None, params=None, headers=None): + @_rewrite_parameters() + @wrap_object_api_response + def health( + self, + **params + ) -> ObjectApiResponse[t.Any]: return { 'cluster_name': 'testcluster', 'status': 'green', diff --git a/elasticmock/fake_elasticsearch.py b/elasticmock/fake_elasticsearch.py index 2975c86..8941b65 100644 --- a/elasticmock/fake_elasticsearch.py +++ b/elasticmock/fake_elasticsearch.py @@ -1,22 +1,34 @@ # -*- coding: utf-8 -*- +import re import datetime import json import sys +import typing as t +import functools from collections import defaultdict import dateutil.parser from elasticsearch import Elasticsearch -from elasticsearch.client.utils import query_params -from elasticsearch.client import _normalize_hosts -from elasticsearch.transport import Transport -from elasticsearch.exceptions import NotFoundError, RequestError +from elasticsearch._sync.client._base import BaseClient +from elasticsearch._sync.client.utils import _rewrite_parameters +from elasticsearch._sync.client.utils import client_node_configs +from elastic_transport import ( + ApiResponseMeta, + HeadApiResponse, + ObjectApiResponse, + Transport, +) +from elastic_transport.client_utils import DEFAULT, DefaultType +from elasticsearch.exceptions import NotFoundError, BadRequestError from elasticmock.behaviour.server_failure import server_failure from elasticmock.fake_cluster import FakeClusterClient from elasticmock.fake_indices import FakeIndicesClient -from elasticmock.utilities import (extract_ignore_as_iterable, get_random_id, - get_random_scroll_id) -from elasticmock.utilities.decorator import for_all_methods +from elasticmock.utilities import get_random_id, get_random_scroll_id +from elasticmock.utilities.decorator import for_all_methods, wrap_object_api_response + + +SelfType = t.TypeVar("SelfType", bound="FakeElasticsearch") PY3 = sys.version_info[0] == 3 if PY3: @@ -285,14 +297,21 @@ def _compare_value_for_field(self, doc_source, field, value, ignore_case): return False +class NoOpTransport(Transport): + def perform_request(self, *args, **kwargs): + assert False, "Transport should not be invoked during unit tests." + + @for_all_methods([server_failure]) class FakeElasticsearch(Elasticsearch): - __documents_dict = None + _documents_dict = None - def __init__(self, hosts=None, transport_class=None, **kwargs): - self.__documents_dict = {} - self.__scrolls = {} - self.transport = Transport(_normalize_hosts(hosts), **kwargs) + def __init__(self, hosts=None, *, transport_class=None, _transport=None, **kwargs): + self._documents_dict = {} + self._scrolls = {} + if _transport is None: + _transport = NoOpTransport(hosts, **kwargs) + BaseClient.__init__(self, _transport) @property def indices(self): @@ -302,58 +321,76 @@ def indices(self): def cluster(self): return FakeClusterClient(self) - @query_params() - def ping(self, params=None, headers=None): + def options( + self: SelfType, + ignore_status: t.Union[DefaultType, int, t.Collection[int]] = DEFAULT, + **params + ) -> SelfType: + if ignore_status is not DEFAULT: + if isinstance(ignore_status, int): + ignore_status = (ignore_status,) + self._ignore_status = ignore_status + return self + + @_rewrite_parameters() + def ping(self, **kwargs) -> bool: return True - @query_params() - def info(self, params=None, headers=None): + @_rewrite_parameters() + @wrap_object_api_response + def info(self, **kwargs) -> ObjectApiResponse[t.Any]: return { 'status': 200, 'cluster_name': 'elasticmock', 'version': { - 'lucene_version': '4.10.4', - 'build_hash': '00f95f4ffca6de89d68b7ccaf80d148f1f70e4d4', - 'number': '1.7.5', - 'build_timestamp': '2016-02-02T09:55:30Z', - 'build_snapshot': False + "number" : "8.12.0", + "build_flavor" : "default", + "build_type" : "tar", + "build_hash" : "1665f706fd9354802c02146c1e6b5c0fbcddfbc9", + "build_date" : "2024-01-11T10:05:27.953830042Z", + "build_snapshot" : False, + "lucene_version" : "9.9.1", + "minimum_wire_compatibility_version" : "7.17.0", + "minimum_index_compatibility_version" : "7.0.0" }, 'name': 'Nightwatch', 'tagline': 'You Know, for Search' } - @query_params('consistency', - 'op_type', - 'parent', - 'refresh', - 'replication', - 'routing', - 'timeout', - 'timestamp', - 'ttl', - 'version', - 'version_type') - def index(self, index, body, doc_type='_doc', id=None, params=None, headers=None): - if index not in self.__documents_dict: - self.__documents_dict[index] = list() + @_rewrite_parameters( + body_name="document", + ) + @wrap_object_api_response + def index( + self, + *, + index: str, + document: t.Optional[t.Mapping[str, t.Any]] = None, + body: t.Optional[t.Mapping[str, t.Any]] = None, + id: t.Optional[str] = None, + **params + ) -> ObjectApiResponse[t.Any]: + if index not in self._documents_dict: + self._documents_dict[index] = list() version = 1 + doc_type = "_doc" result = 'created' if id is None: id = get_random_id() - elif self.exists(index, id, doc_type=doc_type, params=params): - doc = self.get(index, id, doc_type=doc_type, params=params) + elif self.exists(index=index, id=id, **params): + doc = self.get(index=index, id=id, **params) version = doc['_version'] + 1 - self.delete(index, id, doc_type=doc_type) + self.delete(index=index, id=id) result = 'updated' - self.__documents_dict[index].append({ + self._documents_dict[index].append({ '_type': doc_type, '_id': id, - '_source': body, + '_source': document, '_index': index, '_version': version }) @@ -367,187 +404,195 @@ def index(self, index, body, doc_type='_doc', id=None, params=None, headers=None 'result': result } - @query_params('consistency', 'op_type', 'parent', 'refresh', 'replication', - 'routing', 'timeout', 'timestamp', 'ttl', 'version', 'version_type') - def bulk(self, body, index=None, doc_type=None, params=None, headers=None): + @_rewrite_parameters( + body_name="operations", + parameter_aliases={ + "_source": "source", + "_source_excludes": "source_excludes", + "_source_includes": "source_includes", + }, + ) + def bulk( + self, + *, + operations: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + body: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + index: t.Optional[str] = None, + **params + ) -> ObjectApiResponse[t.Any]: items = [] errors = False - for raw_line in body.splitlines(): - if len(raw_line.strip()) > 0: - line = json.loads(raw_line) - - if any(action in line for action in ['index', 'create', 'update', 'delete']): - action = next(iter(line.keys())) - - version = 1 - index = line[action].get('_index') or index - doc_type = line[action].get('_type', "_doc") # _type is deprecated in 7.x - - if action in ['delete', 'update'] and not line[action].get("_id"): - raise RequestError(400, 'action_request_validation_exception', 'missing id') - - document_id = line[action].get('_id', get_random_id()) - - if action == 'delete': - status, result, error = self._validate_action( - action, index, document_id, doc_type, params=params - ) - item = {action: { - '_type': doc_type, - '_id': document_id, - '_index': index, - '_version': version, - 'status': status, - }} - if error: - errors = True - item[action]["error"] = result - else: - self.delete(index, document_id, doc_type=doc_type, params=params) - item[action]["result"] = result - items.append(item) - - if index not in self.__documents_dict: - self.__documents_dict[index] = list() - else: - if 'doc' in line and action == 'update': - source = line['doc'] - else: - source = line + if isinstance(operations, str): + operations = [json.loads(raw_line.strip()) for raw_line in operations.splitlines() if raw_line.strip()] + + for line in operations: + if any(action in line for action in ['index', 'create', 'update', 'delete']): + action = next(iter(line.keys())) + + version = 1 + index = line[action].get('_index') or index + + if action in ['delete', 'update'] and not line[action].get("_id"): + raise BadRequestError(message='action_request_validation_exception, missing id', meta=ApiResponseMeta(400, "HTTP/1.1", {}, 1, None), body=operations) + + document_id = line[action].get('_id', get_random_id()) + + if action == 'delete': status, result, error = self._validate_action( - action, index, document_id, doc_type, params=params + action, index, document_id, **params ) - item = { - action: { - '_type': doc_type, - '_id': document_id, - '_index': index, - '_version': version, - 'status': status, - } - } - if not error: - item[action]["result"] = result - if self.exists(index, document_id, doc_type=doc_type, params=params): - doc = self.get(index, document_id, doc_type=doc_type, params=params) - version = doc['_version'] + 1 - self.delete(index, document_id, doc_type=doc_type, params=params) - - self.__documents_dict[index].append({ - '_type': doc_type, - '_id': document_id, - '_source': source, - '_index': index, - '_version': version - }) - else: + item = {action: { + '_type': "_doc", + '_id': document_id, + '_index': index, + '_version': version, + 'status': status, + }} + if error: errors = True item[action]["error"] = result + else: + self.delete(index=index, id=document_id, **params) + item[action]["result"] = result items.append(item) - return { - 'errors': errors, - 'items': items - } - def _validate_action(self, action, index, document_id, doc_type, params=None): - if action in ['index', 'update'] and self.exists(index, id=document_id, doc_type=doc_type, params=params): + if index not in self._documents_dict: + self._documents_dict[index] = list() + else: + if 'doc' in line and action == 'update': + source = line['doc'] + else: + source = line + status, result, error = self._validate_action( + action, index, document_id, **params + ) + item = { + action: { + '_type': "_doc", + '_id': document_id, + '_index': index, + '_version': version, + 'status': status, + } + } + if not error: + item[action]["result"] = result + if self.exists(index=index, id=document_id, **params): + doc = self.get(index=index, id=document_id, **params) + version = doc['_version'] + 1 + self.delete(index=index, id=document_id, **params) + + self._documents_dict[index].append({ + '_type': "_doc", + '_id': document_id, + '_source': source, + '_index': index, + '_version': version + }) + else: + errors = True + item[action]["error"] = result + items.append(item) + return ObjectApiResponse( + body={ + 'errors': errors, + 'items': items + }, + meta=ApiResponseMeta(status, "HTTP/1.1", {}, 1, None), + ) + + def _validate_action(self, action, index, document_id, **params): + if action in ['index', 'update'] and self.exists(index=index, id=document_id, **params): return 200, 'updated', False - if action == 'create' and self.exists(index, id=document_id, doc_type=doc_type, params=params): + if action == 'create' and self.exists(index=index, id=document_id, **params): return 409, 'version_conflict_engine_exception', True - elif action in ['index', 'create'] and not self.exists(index, id=document_id, doc_type=doc_type, params=params): + elif action in ['index', 'create'] and not self.exists(index=index, id=document_id, **params): return 201, 'created', False - elif action == "delete" and self.exists(index, id=document_id, doc_type=doc_type, params=params): + elif action == "delete" and self.exists(index=index, id=document_id, **params): return 200, 'deleted', False - elif action == 'update' and not self.exists(index, id=document_id, doc_type=doc_type, params=params): + elif action == 'update' and not self.exists(index=index, id=document_id, **params): return 404, 'document_missing_exception', True - elif action == 'delete' and not self.exists(index, id=document_id, doc_type=doc_type, params=params): + elif action == 'delete' and not self.exists(index=index, id=document_id, **params): return 404, 'not_found', True else: raise NotImplementedError(f"{action} behaviour hasn't been implemented") - @query_params('parent', 'preference', 'realtime', 'refresh', 'routing') - def exists(self, index, id, doc_type=None, params=None, headers=None): - result = False - if index in self.__documents_dict: - for document in self.__documents_dict[index]: - if document.get('_id') == id and document.get('_type') == doc_type: - result = True + @_rewrite_parameters( + parameter_aliases={ + "_source": "source", + "_source_excludes": "source_excludes", + "_source_includes": "source_includes", + }, + ) + def exists( + self, + *, + index: str, + id: str, + **params, + ) -> HeadApiResponse: + status = 404 + if index in self._documents_dict: + for document in self._documents_dict[index]: + if document.get('_id') == id: + status = 200 break - return result - @query_params('_source', '_source_exclude', '_source_include', 'fields', - 'parent', 'preference', 'realtime', 'refresh', 'routing', 'version', - 'version_type') - def get(self, index, id, doc_type='_all', params=None, headers=None): - ignore = extract_ignore_as_iterable(params) + return HeadApiResponse( + meta=ApiResponseMeta(status, "HTTP/1.1", {}, 1, None), + ) + + @_rewrite_parameters( + parameter_aliases={ + "_source": "source", + "_source_excludes": "source_excludes", + "_source_includes": "source_includes", + }, + ) + @wrap_object_api_response + def get( + self, + *, + index: str, + id: str, + **params, + ) -> ObjectApiResponse[t.Any]: result = None - if index in self.__documents_dict: - for document in self.__documents_dict[index]: + if index in self._documents_dict: + for document in self._documents_dict[index]: if document.get('_id') == id: - if doc_type == '_all': - result = document - break - else: - if document.get('_type') == doc_type: - result = document - break + result = document + break if result: result['found'] = True return result - elif params and 404 in ignore: + elif self._ignore_status is not DEFAULT and 404 in self._ignore_status: return {'found': False} else: error_data = { '_index': index, - '_type': doc_type, + '_type': "_doc", '_id': id, 'found': False } - raise NotFoundError(404, json.dumps(error_data)) - - @query_params( - "_source", - "_source_excludes", - "_source_includes", - "allow_no_indices", - "analyze_wildcard", - "analyzer", - "conflicts", - "default_operator", - "df", - "expand_wildcards", - "from_", - "ignore_unavailable", - "lenient", - "max_docs", - "pipeline", - "preference", - "q", - "refresh", - "request_cache", - "requests_per_second", - "routing", - "scroll", - "scroll_size", - "search_timeout", - "search_type", - "size", - "slices", - "sort", - "stats", - "terminate_after", - "timeout", - "version", - "version_type", - "wait_for_active_shards", - "wait_for_completion", + raise NotFoundError(message="Not Found", meta=ApiResponseMeta(404, "HTTP/1.1", {}, 1, None), body=error_data) + + @_rewrite_parameters( + body_fields=("conflicts", "max_docs", "query", "script", "slice"), + parameter_aliases={"from": "from_"}, ) + @wrap_object_api_response def update_by_query( - self, index, body=None, doc_type=None, params=None, headers=None - ): + self, + *, + index: t.Union[str, t.Sequence[str]], + script: t.Optional[t.Mapping[str, t.Any]] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + **params + ) -> ObjectApiResponse[t.Any]: # Actually it only supports script equal operations # TODO: Full support from painless language total_updated = 0 @@ -556,23 +601,22 @@ def update_by_query( new_values = {} script_params = body['script']['params'] script_source = body['script']['source'] \ - .replace('ctx._source.', '') \ .split(';') for sentence in script_source: if sentence: - field, _, value = sentence.split() - if value.startswith('params.'): - _, key = value.split('.') + mtch = re.match(r"\s*ctx._source.(?P\w+)\s*=\s*\(?params.(?P\w+)\)?\s*", sentence) + if mtch: + field = mtch.group("field") + key = mtch.group("key") value = script_params.get(key) new_values[field] = value - matches = self.search(index=index, doc_type=doc_type, body=body, - params=params, headers=headers) + matches = self.search(index=index, body=body, **params) if matches['hits']['total']: for hit in matches['hits']['hits']: body = hit['_source'] body.update(new_values) - self.index(index, body, doc_type=hit['_type'], id=hit['_id']) + self.index(index=index, body=body, doc_type=hit['_type'], id=hit['_id']) total_updated += 1 return { @@ -591,51 +635,94 @@ def update_by_query( 'failures': [] } - - @query_params('_source', '_source_exclude', '_source_include', - 'preference', 'realtime', 'refresh', 'routing', - 'stored_fields') - def mget(self, body, index, doc_type='_all', params=None, headers=None): - docs = body.get('docs') + @_rewrite_parameters( + body_fields=("docs", "ids"), + parameter_aliases={ + "_source": "source", + "_source_excludes": "source_excludes", + "_source_includes": "source_includes", + }, + ) + @wrap_object_api_response + def mget( + self, + *, + index: t.Optional[str] = None, + docs: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + **params + ) -> ObjectApiResponse[t.Any]: + docs = body.get('docs') or docs ids = [doc['_id'] for doc in docs] results = [] for id in ids: try: - results.append(self.get(index, id, doc_type=doc_type, - params=params, headers=headers)) + results.append(self.get(index=index, id=id, **params)) except: pass if not results: - raise RequestError( - 400, - 'action_request_validation_exception', - 'Validation Failed: 1: no documents to get;' - ) + raise BadRequestError(message='action_request_validation_exception; Validation Failed: 1: no documents to get;', meta=ApiResponseMeta(400, "HTTP/1.1", {}, 1, None), body=body) return {'docs': results} - @query_params('_source', '_source_exclude', '_source_include', 'parent', - 'preference', 'realtime', 'refresh', 'routing', 'version', - 'version_type') - def get_source(self, index, doc_type, id, params=None, headers=None): - document = self.get(index=index, doc_type=doc_type, id=id, params=params) + @_rewrite_parameters( + parameter_aliases={ + "_source": "source", + "_source_excludes": "source_excludes", + "_source_includes": "source_includes", + }, + ) + @wrap_object_api_response + def get_source( + self, + *, + index: str, + id: str, + **params + ) -> ObjectApiResponse[t.Any]: + document = self.get(index=index, id=id, **params) return document.get('_source') - @query_params('_source', '_source_exclude', '_source_include', - 'allow_no_indices', 'analyze_wildcard', 'analyzer', 'default_operator', - 'df', 'expand_wildcards', 'explain', 'fielddata_fields', 'fields', - 'from_', 'ignore_unavailable', 'lenient', 'lowercase_expanded_terms', - 'preference', 'q', 'request_cache', 'routing', 'scroll', 'search_type', - 'size', 'sort', 'stats', 'suggest_field', 'suggest_mode', - 'suggest_size', 'suggest_text', 'terminate_after', 'timeout', - 'track_scores', 'version') - def count(self, index=None, doc_type=None, body=None, params=None, headers=None): + @_rewrite_parameters( + body_fields=("scroll_id",), + ) + @wrap_object_api_response + def clear_scroll( + self, + *, + scroll_id: t.Optional[t.Union[str, t.Sequence[str]]] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + succeeded = True + num_freed = 1 + if scroll_id == "_all": + num_freed = (len(self._scrolls)) + self._scrolls.clear() + elif scroll_id in self._scrolls: + del self._scrolls[scroll_id] + else: + succeeded = True + num_freed = 0 + + return { + "succeeded": succeeded, + "num_freed": num_freed, + } + + @_rewrite_parameters( + body_fields=("query",), + ) + @wrap_object_api_response + def count( + self, + *, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, + **params + ) -> ObjectApiResponse[t.Any]: searchable_indexes = self._normalize_index_to_list(index) i = 0 for searchable_index in searchable_indexes: - for document in self.__documents_dict[searchable_index]: - if doc_type and document.get('_type') != doc_type: - continue + for document in self._documents_dict[searchable_index]: i += 1 result = { 'count': i, @@ -652,16 +739,17 @@ def count(self, index=None, doc_type=None, body=None, params=None, headers=None) def _get_fake_query_condition(self, query_type_str, condition): return FakeQueryCondition(QueryType.get_query_type(query_type_str), condition) - @query_params( - "ccs_minimize_roundtrips", - "max_concurrent_searches", - "max_concurrent_shard_requests", - "pre_filter_shard_size", - "rest_total_hits_as_int", - "search_type", - "typed_keys", + @_rewrite_parameters( + body_name="searches", ) - def msearch(self, body, index=None, doc_type=None, params=None, headers=None): + @wrap_object_api_response + def msearch( + self, + *, + searches: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + body: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + **params + ) -> ObjectApiResponse[t.Any]: def grouped(iterable): if len(iterable) % 2 != 0: raise Exception('Malformed body') @@ -674,7 +762,7 @@ def grouped(iterable): responses = [] took = 0 - for ind, query in grouped(body): + for ind, query in grouped(searches): response = self.search(index=ind, body=query) took += response['took'] responses.append(response) @@ -684,16 +772,66 @@ def grouped(iterable): } return result - @query_params('_source', '_source_exclude', '_source_include', - 'allow_no_indices', 'analyze_wildcard', 'analyzer', 'default_operator', - 'df', 'expand_wildcards', 'explain', 'fielddata_fields', 'fields', - 'from_', 'ignore_unavailable', 'lenient', 'lowercase_expanded_terms', - 'preference', 'q', 'request_cache', 'routing', 'scroll', 'search_type', - 'size', 'sort', 'stats', 'suggest_field', 'suggest_mode', - 'suggest_size', 'suggest_text', 'terminate_after', 'timeout', - 'track_scores', 'version') - def search(self, index=None, doc_type=None, body=None, params=None, headers=None): + @_rewrite_parameters( + body_fields=( + "aggregations", + "aggs", + "collapse", + "docvalue_fields", + "explain", + "ext", + "fields", + "from_", + "highlight", + "indices_boost", + "knn", + "min_score", + "pit", + "post_filter", + "profile", + "query", + "rank", + "rescore", + "runtime_mappings", + "script_fields", + "search_after", + "seq_no_primary_term", + "size", + "slice", + "sort", + "source", + "stats", + "stored_fields", + "suggest", + "terminate_after", + "timeout", + "track_scores", + "track_total_hits", + "version", + ), + parameter_aliases={ + "_source": "source", + "_source_excludes": "source_excludes", + "_source_includes": "source_includes", + "from": "from_", + }, + ) + @wrap_object_api_response + def search( + self, + *, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, + aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, + aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, + from_: t.Optional[int] = None, + query: t.Optional[t.Mapping[str, t.Any]] = None, + scroll: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, + size: t.Optional[int] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + **params + ) -> ObjectApiResponse[t.Any]: searchable_indexes = self._normalize_index_to_list(index) + body = body if body is not None else {} matches = [] conditions = [] @@ -704,13 +842,7 @@ def search(self, index=None, doc_type=None, body=None, params=None, headers=None conditions.append(self._get_fake_query_condition(query_type_str, condition)) for searchable_index in searchable_indexes: - for document in self.__documents_dict[searchable_index]: - - if doc_type: - if isinstance(doc_type, list) and document.get('_type') not in doc_type: - continue - if isinstance(doc_type, str) and document.get('_type') != doc_type: - continue + for document in self._documents_dict[searchable_index]: if conditions: for condition in conditions: if condition.evaluate(document): @@ -744,10 +876,14 @@ def search(self, index=None, doc_type=None, body=None, params=None, headers=None hits.append(match) # build aggregations - if body is not None and 'aggs' in body: + _aggregations = aggregations or aggs + if body: + _aggregations = body.get("aggregations", None) or body.get("aggs", None) + + if _aggregations: aggregations = {} - for aggregation, definition in body['aggs'].items(): + for aggregation, definition in _aggregations.items(): aggregations[aggregation] = { "doc_count_error_upper_bound": 0, "sum_other_doc_count": 0, @@ -757,99 +893,95 @@ def search(self, index=None, doc_type=None, body=None, params=None, headers=None if aggregations: result['aggregations'] = aggregations - if 'scroll' in params: + _size = int(body.get("size", size or 10)) + _from_ = int(body.get("from", from_ or 0)) + + if scroll is not None: result['_scroll_id'] = str(get_random_scroll_id()) - params['size'] = int(params.get('size', 10)) - params['from'] = int(params.get('from') + params.get('size') if 'from' in params else 0) - self.__scrolls[result.get('_scroll_id')] = { + params['size'] = _size + params['from'] = _from_ + _size if _from_ is not None else 0 + self._scrolls[result.get('_scroll_id')] = { 'index': index, - 'doc_type': doc_type, + 'doc_type': "_doc", 'body': body, 'params': params } - hits = hits[params.get('from'):params.get('from') + params.get('size')] - elif 'size' in params: - hits = hits[:int(params['size'])] - elif body and 'size' in body: - hits = hits[:int(body['size'])] + hits = hits[_from_:_from_ + _size] + elif _size is not None: + hits = hits[:_size] result['hits']['hits'] = hits return result - @query_params('scroll') - def scroll(self, scroll_id, params=None, headers=None): - scroll = self.__scrolls.pop(scroll_id) + @_rewrite_parameters( + body_fields=("scroll_id", "scroll"), + ) + @wrap_object_api_response + def scroll( + self, + *, + scroll_id: t.Optional[str] = None, + **params + ) -> ObjectApiResponse[t.Any]: + scroll = self._scrolls.pop(scroll_id, None) + if scroll is None: + raise NotFoundError( + message="Not Found", + meta=ApiResponseMeta(404, "HTTP/1.1", {}, 1, None), + body={ + "type": "search_phase_execution_exception", + "reason": "all shards failed", + "phase": "query", + "grouped": True, + } + ) + result = self.search( index=scroll.get('index'), - doc_type=scroll.get('doc_type'), - body=scroll.get('body'), - params=scroll.get('params') + # body=scroll.get("body"), + scroll=scroll, + **scroll.get("params"), ) return result - @query_params('consistency', 'parent', 'refresh', 'replication', 'routing', - 'timeout', 'version', 'version_type') - def delete(self, index, id, doc_type=None, params=None, headers=None): - + @_rewrite_parameters() + @wrap_object_api_response + def delete( + self, + *, + index: str, + id: str, + **params + ) -> ObjectApiResponse[t.Any]: found = False - ignore = extract_ignore_as_iterable(params) - if index in self.__documents_dict: - for document in self.__documents_dict[index]: + if index in self._documents_dict: + for document in self._documents_dict[index]: if document.get('_id') == id: found = True - if doc_type and document.get('_type') != doc_type: - found = False - if found: - self.__documents_dict[index].remove(document) - break + self._documents_dict[index].remove(document) + break result_dict = { 'found': found, '_index': index, - '_type': doc_type, + '_type': "_doc", '_id': id, '_version': 1, } if found: return result_dict - elif params and 404 in ignore: + elif self._ignore_status is not DEFAULT and 404 in self._ignore_status: return {'found': False} else: - raise NotFoundError(404, json.dumps(result_dict)) - - @query_params('allow_no_indices', 'expand_wildcards', 'ignore_unavailable', - 'preference', 'routing') - def suggest(self, body, index=None, params=None, headers=None): - if index is not None and index not in self.__documents_dict: - raise NotFoundError(404, 'IndexMissingException[[{0}] missing]'.format(index)) - - result_dict = {} - for key, value in body.items(): - text = value.get('text') - suggestion = int(text) + 1 if isinstance(text, int) else '{0}_suggestion'.format(text) - result_dict[key] = [ - { - 'text': text, - 'length': 1, - 'options': [ - { - 'text': suggestion, - 'freq': 1, - 'score': 1.0 - } - ], - 'offset': 0 - } - ] - return result_dict + raise NotFoundError(message="Not Found", meta=ApiResponseMeta(404, "HTTP/1.1", {}, 1, None), body=result_dict) def _normalize_index_to_list(self, index): # Ensure to have a list of index if index is None: - searchable_indexes = self.__documents_dict.keys() + searchable_indexes = self._documents_dict.keys() elif isinstance(index, str) or isinstance(index, unicode): searchable_indexes = [index] elif isinstance(index, list): @@ -860,8 +992,8 @@ def _normalize_index_to_list(self, index): # Check index(es) exists for searchable_index in searchable_indexes: - if searchable_index not in self.__documents_dict: - raise NotFoundError(404, 'IndexMissingException[[{0}] missing]'.format(searchable_index)) + if searchable_index not in self._documents_dict: + raise NotFoundError(message='IndexMissingException[[{0}] missing]'.format(searchable_index), meta=ApiResponseMeta(404, "HTTP/1.1", {}, 1, None), body={}) return searchable_indexes diff --git a/elasticmock/fake_indices.py b/elasticmock/fake_indices.py index 611fc1a..ff40df8 100644 --- a/elasticmock/fake_indices.py +++ b/elasticmock/fake_indices.py @@ -1,32 +1,83 @@ # -*- coding: utf-8 -*- +import typing as t -from elasticsearch.client.indices import IndicesClient -from elasticsearch.client.utils import query_params +from elastic_transport import ObjectApiResponse, HeadApiResponse, ApiResponseMeta +from elasticsearch.exceptions import NotFoundError, BadRequestError + +from elasticsearch._sync.client.indices import IndicesClient +from elasticsearch._sync.client.utils import _rewrite_parameters +from elasticmock.utilities.decorator import wrap_object_api_response class FakeIndicesClient(IndicesClient): - @query_params('master_timeout', 'timeout') - def create(self, index, body=None, params=None, headers=None, *args, **kwargs): - documents_dict = self.__get_documents_dict() + @_rewrite_parameters( + body_fields=("aliases", "mappings", "settings"), + ) + @wrap_object_api_response + def create( + self, + *, + index: str, + body: t.Optional[t.Dict[str, t.Any]] = None, + **params + ) -> ObjectApiResponse[t.Any]: + documents_dict = self._get_documents_dict() if index not in documents_dict: documents_dict[index] = [] + return { + "acknowledged": True, + "shards_acknowledged": True, + "index": index, + } + else: + raise BadRequestError(message='resource_already_exists_exception', meta=ApiResponseMeta(400, "HTTP/1.1", {}, 1, None), body={"type": "resource_already_exists_exception"}) - @query_params('allow_no_indices', 'expand_wildcards', 'ignore_unavailable', - 'local') - def exists(self, index, params=None, headers=None): - return index in self.__get_documents_dict() + @_rewrite_parameters( + body_fields=("aliases", "mappings", "settings"), + ) + def exists( + self, + *, + index: str, + body: t.Optional[t.Dict[str, t.Any]] = None, + **params + ) -> HeadApiResponse: + status = 200 if index in self._get_documents_dict() else 404 + return HeadApiResponse( + meta=ApiResponseMeta(status, "HTTP/1.1", {}, 1, None), + ) - @query_params('allow_no_indices', 'expand_wildcards', 'force', - 'ignore_unavailable', 'operation_threading') - def refresh(self, index=None, params=None, headers=None): - pass + @_rewrite_parameters() + def refresh( + self, + index: t.Optional[t.Union[str, t.Sequence[str]]] = None, + **params + ) -> ObjectApiResponse[t.Any]: + if self.exists(index=index): + return { + "_shards": { + "total": 1, + "successful": 1, + "failed": 0 + } + } + else: + raise NotFoundError(message=f"no such index [{index}]", meta=ApiResponseMeta(404, "HTTP/1.1", {}, 1, None), body={"type": "index_not_found_exception"}) - @query_params('master_timeout', 'timeout') - def delete(self, index, params=None, headers=None): - documents_dict = self.__get_documents_dict() + @_rewrite_parameters() + def delete( + self, + *, + index: t.Union[str, t.Sequence[str]], + **params + ) -> ObjectApiResponse[t.Any]: + documents_dict = self._get_documents_dict() if index in documents_dict: del documents_dict[index] + return {"acknowledged": True} + else: + raise NotFoundError(message=f"no such index [{index}]", meta=ApiResponseMeta(404, "HTTP/1.1", {}, 1, None), body={"type": "index_not_found_exception"}) - def __get_documents_dict(self): - return self.client._FakeElasticsearch__documents_dict + def _get_documents_dict(self): + return self._client._documents_dict diff --git a/elasticmock/utilities/__init__.py b/elasticmock/utilities/__init__.py index 09aa690..bb9c528 100644 --- a/elasticmock/utilities/__init__.py +++ b/elasticmock/utilities/__init__.py @@ -15,12 +15,4 @@ def get_random_id(size=DEFAULT_ELASTICSEARCH_ID_SIZE): def get_random_scroll_id(size=DEFAULT_ELASTICSEARCH_SEARCHRESULTPHASE_COUNT): - return base64.b64encode(''.join(get_random_id() for _ in range(size)).encode()) - - -def extract_ignore_as_iterable(params): - """Extracts the value of the ignore parameter as iterable""" - ignore = params.get('ignore', ()) - if isinstance(ignore, int): - ignore = (ignore,) - return ignore + return base64.b64encode(''.join(get_random_id() for _ in range(size)).encode()).decode() diff --git a/elasticmock/utilities/decorator.py b/elasticmock/utilities/decorator.py index 0031dfc..497faa3 100644 --- a/elasticmock/utilities/decorator.py +++ b/elasticmock/utilities/decorator.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- +import typing as t +import functools +from elastic_transport import ApiResponseMeta, ObjectApiResponse def for_all_methods(decorators, apply_on_public_only=True): @@ -14,3 +17,13 @@ def decorate(cls): setattr(cls, attr, decorator(getattr(cls, attr))) return cls return decorate + + +def wrap_object_api_response(func): + @functools.wraps(func) + def wrapper(*args: t.Any, **kwargs: t.Any): + return ObjectApiResponse( + body=func(*args, **kwargs), + meta=ApiResponseMeta(200, "HTTP/1.1", {}, 1, None), + ) + return wrapper diff --git a/requirements.txt b/requirements.txt index 57b936e..0add3cf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -elasticsearch>=1.9.0,<8.0.0 +elasticsearch>=8.0.0,<9.0.0 ipdb -python-dateutil \ No newline at end of file +python-dateutil diff --git a/setup.py b/setup.py index ce853d8..36ac42a 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ import setuptools -__version__ = '1.8.1' +__version__ = '2.0.0' # read the contents of your readme file from os import path @@ -21,7 +21,7 @@ url='https://github.com/vrcmarcos/elasticmock', packages=setuptools.find_packages(exclude=('tests')), install_requires=[ - 'elasticsearch<8.0.0', + 'elasticsearch>=8.0.0,<9.0.0', 'python-dateutil', ], classifiers=[ @@ -34,6 +34,9 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', "License :: OSI Approved :: MIT License", 'Topic :: Software Development :: Libraries :: Python Modules' ] diff --git a/tests/__init__.py b/tests/__init__.py index 8eccaed..fd7253b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -8,7 +8,7 @@ from elasticmock import elasticmock INDEX_NAME = 'test_index' -DOC_TYPE = 'doc-Type' +DOC_TYPE = '_doc' DOC_ID = 'doc-id' BODY = { 'author': 'kimchy', @@ -21,4 +21,4 @@ class TestElasticmock(unittest.TestCase): @elasticmock def setUp(self): - self.es = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}]) + self.es = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200, 'scheme': 'http'}]) diff --git a/tests/fake_cluster/test_health.py b/tests/fake_cluster/test_health.py index c3df143..ae83b8d 100644 --- a/tests/fake_cluster/test_health.py +++ b/tests/fake_cluster/test_health.py @@ -6,7 +6,7 @@ class TestHealth(TestElasticmock): def test_should_return_health(self): - health_status = self.es.cluster.health() + health_status = self.es.cluster.health().body expected_health_status = { 'cluster_name': 'testcluster', diff --git a/tests/fake_elasticsearch/test_count.py b/tests/fake_elasticsearch/test_count.py index a481f40..f957371 100644 --- a/tests/fake_elasticsearch/test_count.py +++ b/tests/fake_elasticsearch/test_count.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- - +import unittest from tests import TestElasticmock, DOC_TYPE +import elasticsearch class TestCount(TestElasticmock): @@ -31,6 +32,7 @@ def test_should_return_skipped_shards(self): count = self.es.count(doc_type=[]) self.assertEqual(0, count.get('_shards').get('skipped')) + @unittest.skipIf(elasticsearch.__version__ > (8, 0), "Custom doc-types deprectated in ES 6, not supported in ES 8 or higher.") def test_should_count_with_doc_types(self): self.es.index(index='index', doc_type=DOC_TYPE, body={'data': 'test1'}) self.es.index(index='index', doc_type='different-doc-type', body={'data': 'test2'}) diff --git a/tests/fake_elasticsearch/test_delete.py b/tests/fake_elasticsearch/test_delete.py index 14ec435..48da7b5 100644 --- a/tests/fake_elasticsearch/test_delete.py +++ b/tests/fake_elasticsearch/test_delete.py @@ -12,21 +12,23 @@ def test_should_raise_exception_when_delete_nonindexed_document(self): self.es.delete(index=INDEX_NAME, doc_type=DOC_TYPE, id=1) def test_should_not_raise_exception_when_delete_nonindexed_document_if_ignored(self): - target_doc = self.es.delete(index=INDEX_NAME, doc_type=DOC_TYPE, id=1, ignore=404) + ignore_es = self.es.options(ignore_status=404) + target_doc = ignore_es.delete(index=INDEX_NAME, doc_type=DOC_TYPE, id=1) self.assertFalse(target_doc.get('found')) def test_should_not_raise_exception_when_delete_nonindexed_document_if_ignored_list(self): - target_doc = self.es.delete(index=INDEX_NAME, doc_type=DOC_TYPE, id=1, ignore=(401, 404)) + ignore_es = self.es.options(ignore_status=(401, 404)) + target_doc = ignore_es.delete(index=INDEX_NAME, doc_type=DOC_TYPE, id=1) self.assertFalse(target_doc.get('found')) def test_should_delete_indexed_document(self): doc_indexed = self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) - search = self.es.search(index=INDEX_NAME) + search = self.es.search(index=INDEX_NAME).body self.assertEqual(1, search.get('hits').get('total').get('value')) doc_id = doc_indexed.get('_id') - doc_deleted = self.es.delete(index=INDEX_NAME, doc_type=DOC_TYPE, id=doc_id) - search = self.es.search(index=INDEX_NAME) + doc_deleted = self.es.delete(index=INDEX_NAME, doc_type=DOC_TYPE, id=doc_id).body + search = self.es.search(index=INDEX_NAME).body self.assertEqual(0, search.get('hits').get('total').get('value')) expected_doc_deleted = { diff --git a/tests/fake_elasticsearch/test_get.py b/tests/fake_elasticsearch/test_get.py index fa41536..2906a9c 100644 --- a/tests/fake_elasticsearch/test_get.py +++ b/tests/fake_elasticsearch/test_get.py @@ -12,18 +12,20 @@ def test_should_raise_notfounderror_when_nonindexed_id_is_used(self): self.es.get(index=INDEX_NAME, id='1') def test_should_not_raise_notfounderror_when_nonindexed_id_is_used_and_ignored(self): - target_doc = self.es.get(index=INDEX_NAME, id='1', ignore=404) + ignore_es = self.es.options(ignore_status=404) + target_doc = ignore_es.get(index=INDEX_NAME, id='1') self.assertFalse(target_doc.get('found')) def test_should_not_raise_notfounderror_when_nonindexed_id_is_used_and_ignored_list(self): - target_doc = self.es.get(index=INDEX_NAME, id='1', ignore=(401, 404)) + ignore_es = self.es.options(ignore_status=(401, 404)) + target_doc = ignore_es.get(index=INDEX_NAME, id='1') self.assertFalse(target_doc.get('found')) def test_should_get_document_with_id(self): data = self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) document_id = data.get('_id') - target_doc = self.es.get(index=INDEX_NAME, id=document_id) + target_doc = self.es.get(index=INDEX_NAME, id=document_id).body expected = { '_type': DOC_TYPE, @@ -40,7 +42,7 @@ def test_should_get_document_with_id_and_doc_type(self): data = self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) document_id = data.get('_id') - target_doc = self.es.get(index=INDEX_NAME, id=document_id, doc_type=DOC_TYPE) + target_doc = self.es.get(index=INDEX_NAME, id=document_id, doc_type=DOC_TYPE).body expected = { '_type': DOC_TYPE, @@ -57,7 +59,7 @@ def test_should_get_only_document_source_with_id(self): data = self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) document_id = data.get('_id') - target_doc_source = self.es.get_source(index=INDEX_NAME, doc_type=DOC_TYPE, id=document_id) + target_doc_source = self.es.get_source(index=INDEX_NAME, doc_type=DOC_TYPE, id=document_id).body self.assertEqual(target_doc_source, BODY) @@ -66,5 +68,5 @@ def test_mget_get_several_documents_by_id(self): for _ in range(0, 10): data = self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) ids.append(data.get('_id')) - results = self.es.mget(index=INDEX_NAME, body={'docs': [{'_id': id} for id in ids]}) + results = self.es.mget(index=INDEX_NAME, body={'docs': [{'_id': id} for id in ids]}).body self.assertEqual(len(results['docs']), 10) diff --git a/tests/fake_elasticsearch/test_index.py b/tests/fake_elasticsearch/test_index.py index 2d3c217..98a58e9 100644 --- a/tests/fake_elasticsearch/test_index.py +++ b/tests/fake_elasticsearch/test_index.py @@ -1,6 +1,8 @@ # -*- coding: utf-8 -*- +import unittest from tests import TestElasticmock, INDEX_NAME, DOC_TYPE, BODY +import elasticsearch UPDATED_BODY = { 'author': 'vrcmarcos', @@ -27,6 +29,7 @@ def test_should_index_document_without_doc_type(self): self.assertEqual(1, data.get('_version')) self.assertEqual(INDEX_NAME, data.get('_index')) + @unittest.skipIf(elasticsearch.__version__ > (8, 0), "Custom doc-types deprectated in ES 6, not supported in ES 8 or higher.") def test_doc_type_can_be_list(self): doc_types = ['1_idx', '2_idx', '3_idx'] count_per_doc_type = 3 @@ -45,7 +48,7 @@ def test_update_existing_doc(self): data = self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) document_id = data.get('_id') self.es.index(index=INDEX_NAME, id=document_id, doc_type=DOC_TYPE, body=UPDATED_BODY) - target_doc = self.es.get(index=INDEX_NAME, id=document_id) + target_doc = self.es.get(index=INDEX_NAME, id=document_id).body expected = { '_type': DOC_TYPE, @@ -75,4 +78,4 @@ def test_update_by_query(self): } }) target_doc = self.es.get(index=INDEX_NAME, id=document_id) - self.assertEqual(target_doc['_source']['author'], new_author) \ No newline at end of file + self.assertEqual(target_doc['_source']['author'], new_author) diff --git a/tests/fake_elasticsearch/test_instance.py b/tests/fake_elasticsearch/test_instance.py index 80267f3..eaaca02 100644 --- a/tests/fake_elasticsearch/test_instance.py +++ b/tests/fake_elasticsearch/test_instance.py @@ -14,6 +14,6 @@ def test_should_create_fake_elasticsearch_instance(self): @elasticmock def test_should_return_same_elastic_instance_when_instantiate_more_than_one_instance_with_same_host(self): - es1 = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}]) - es2 = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}]) + es1 = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200, 'scheme': 'http'}]) + es2 = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200, 'scheme': 'http'}]) self.assertEqual(es1, es2) diff --git a/tests/fake_elasticsearch/test_scroll.py b/tests/fake_elasticsearch/test_scroll.py index 1b90191..b42ee6a 100644 --- a/tests/fake_elasticsearch/test_scroll.py +++ b/tests/fake_elasticsearch/test_scroll.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- - +from elasticsearch.exceptions import NotFoundError from tests import TestElasticmock, INDEX_NAME, DOC_TYPE, BODY @@ -9,7 +9,7 @@ def test_scrolling(self): for _ in range(100): self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) - result = self.es.search(index=INDEX_NAME, params={'scroll': '1m', 'size': 30}) + result = self.es.search(index=INDEX_NAME, **{'scroll': '1m', 'size': 30}) self.__assert_scroll(result, 30) for _ in range(2): @@ -19,6 +19,26 @@ def test_scrolling(self): result = self.es.scroll(scroll_id=result.get('_scroll_id'), scroll='1m') self.__assert_scroll(result, 10) + def test_clear_scroll(self): + for _ in range(100): + self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) + + result = self.es.search(index=INDEX_NAME, **{'scroll': '1m', 'size': 30}) + self.__assert_scroll(result, 30) + + self.es.clear_scroll(scroll_id=result.get('_scroll_id')) + with self.assertRaises(NotFoundError): + result = self.es.scroll(scroll_id=result.get('_scroll_id'), scroll='1m') + + # Clear using _all as scroll_id + result = self.es.search(index=INDEX_NAME, **{'scroll': '1m', 'size': 30}) + self.__assert_scroll(result, 30) + + self.es.clear_scroll(scroll_id='_all') + assert self.es._scrolls == {} + with self.assertRaises(NotFoundError): + result = self.es.scroll(scroll_id=result.get('_scroll_id'), scroll='1m') + def __assert_scroll(self, result, expected_scroll_hits): hits = result.get('hits') diff --git a/tests/fake_elasticsearch/test_search.py b/tests/fake_elasticsearch/test_search.py index f31c64b..007adf1 100644 --- a/tests/fake_elasticsearch/test_search.py +++ b/tests/fake_elasticsearch/test_search.py @@ -1,6 +1,8 @@ # -*- coding: utf-8 -*- import datetime +import unittest +import elasticsearch from elasticsearch.exceptions import NotFoundError from parameterized import parameterized @@ -46,6 +48,7 @@ def test_should_return_only_indexed_documents_on_index(self): search = self.es.search(index=INDEX_NAME) self.assertEqual(index_quantity, search.get('hits').get('total').get('value')) + @unittest.skipIf(elasticsearch.__version__ > (8, 0), "Custom doc-types deprectated in ES 6, not supported in ES 8 or higher.") def test_should_return_only_indexed_documents_on_index_with_doc_type(self): index_quantity = 2 for i in range(0, index_quantity): @@ -75,7 +78,7 @@ def test_search_with_scroll_param(self): for _ in range(100): self.es.index(index='groups', doc_type='groups', body={'budget': 1000}) - result = self.es.search(index='groups', params={'scroll': '1m', 'size': 30}) + result = self.es.search(index='groups', **{'scroll': '1m', 'size': 30}) self.assertNotEqual(None, result.get('_scroll_id', None)) self.assertEqual(30, len(result.get('hits').get('hits'))) self.assertEqual(100, result.get('hits').get('total').get('value')) @@ -165,7 +168,7 @@ def test_search_with_terms_query(self): def test_query_on_nested_data(self): for i, y in enumerate(['yes', 'no']): - self.es.index('index_for_search', doc_type=DOC_TYPE, + self.es.index(index='index_for_search', doc_type=DOC_TYPE, body={'id': i, 'data': {'x': i, 'y': y}}) for term, value, i in [('data.x', 1, 1), ('data.y', 'yes', 0)]: diff --git a/tests/fake_elasticsearch/test_suggest.py b/tests/fake_elasticsearch/test_suggest.py deleted file mode 100644 index 14d94a9..0000000 --- a/tests/fake_elasticsearch/test_suggest.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- - -from elasticsearch.exceptions import NotFoundError - -from tests import TestElasticmock, INDEX_NAME, DOC_TYPE, BODY - - -class TestSuggest(TestElasticmock): - - def test_should_raise_notfounderror_when_nonindexed_id_is_used_for_suggest(self): - with self.assertRaises(NotFoundError): - self.es.suggest(body={}, index=INDEX_NAME) - - def test_should_return_suggestions(self): - self.es.index(index=INDEX_NAME, doc_type=DOC_TYPE, body=BODY) - suggestion_body = { - 'suggestion-string': { - 'text': 'test_text', - 'term': { - 'field': 'string' - } - }, - 'suggestion-id': { - 'text': 1234567, - 'term': { - 'field': 'id' - } - } - } - suggestion = self.es.suggest(body=suggestion_body, index=INDEX_NAME) - self.assertIsNotNone(suggestion) - self.assertDictEqual({ - 'suggestion-string': [ - { - 'text': 'test_text', - 'length': 1, - 'options': [ - { - 'text': 'test_text_suggestion', - 'freq': 1, - 'score': 1.0 - } - ], - 'offset': 0 - } - ], - 'suggestion-id': [ - { - 'text': 1234567, - 'length': 1, - 'options': [ - { - 'text': 1234568, - 'freq': 1, - 'score': 1.0 - } - ], - 'offset': 0 - } - ], - }, suggestion) diff --git a/tests/fake_elasticsearch/test_transport_failure.py b/tests/fake_elasticsearch/test_transport_failure.py new file mode 100644 index 0000000..3d4f07a --- /dev/null +++ b/tests/fake_elasticsearch/test_transport_failure.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- + +from tests import TestElasticmock + + +class TestTransport(TestElasticmock): + + def test_should_raise_error(self): + with self.assertRaises(AssertionError): + self.es.perform_request("HEAD", "/", params={}, headers={}) diff --git a/tests/fake_indices/test_create.py b/tests/fake_indices/test_create.py index 92c815f..947c417 100644 --- a/tests/fake_indices/test_create.py +++ b/tests/fake_indices/test_create.py @@ -6,6 +6,6 @@ class TestCreate(TestElasticmock): def test_should_create_index(self): - self.assertFalse(self.es.indices.exists(INDEX_NAME)) - self.es.indices.create(INDEX_NAME) - self.assertTrue(self.es.indices.exists(INDEX_NAME)) + self.assertFalse(self.es.indices.exists(index=INDEX_NAME)) + self.es.indices.create(index=INDEX_NAME) + self.assertTrue(self.es.indices.exists(index=INDEX_NAME)) diff --git a/tests/fake_indices/test_delete.py b/tests/fake_indices/test_delete.py index 5c5aac5..4a4d994 100644 --- a/tests/fake_indices/test_delete.py +++ b/tests/fake_indices/test_delete.py @@ -1,21 +1,22 @@ # -*- coding: utf-8 -*- - +from elasticsearch.exceptions import NotFoundError from tests import TestElasticmock, INDEX_NAME class TestDelete(TestElasticmock): def test_should_delete_index(self): - self.assertFalse(self.es.indices.exists(INDEX_NAME)) + self.assertFalse(self.es.indices.exists(index=INDEX_NAME)) - self.es.indices.create(INDEX_NAME) - self.assertTrue(self.es.indices.exists(INDEX_NAME)) + self.es.indices.create(index=INDEX_NAME) + self.assertTrue(self.es.indices.exists(index=INDEX_NAME)) - self.es.indices.delete(INDEX_NAME) - self.assertFalse(self.es.indices.exists(INDEX_NAME)) + self.es.indices.delete(index=INDEX_NAME) + self.assertFalse(self.es.indices.exists(index=INDEX_NAME)) def test_should_delete_inexistent_index(self): - self.assertFalse(self.es.indices.exists(INDEX_NAME)) + self.assertFalse(self.es.indices.exists(index=INDEX_NAME)) - self.es.indices.delete(INDEX_NAME) - self.assertFalse(self.es.indices.exists(INDEX_NAME)) + with self.assertRaises(NotFoundError): + self.es.indices.delete(index=INDEX_NAME) + self.assertFalse(self.es.indices.exists(index=INDEX_NAME)) diff --git a/tests/fake_indices/test_exists.py b/tests/fake_indices/test_exists.py index 75b8918..3af8115 100644 --- a/tests/fake_indices/test_exists.py +++ b/tests/fake_indices/test_exists.py @@ -6,8 +6,8 @@ class TestExists(TestElasticmock): def test_should_return_false_when_index_does_not_exists(self): - self.assertFalse(self.es.indices.exists(INDEX_NAME)) + self.assertFalse(self.es.indices.exists(index=INDEX_NAME)) def test_should_return_true_when_index_exists(self): - self.es.indices.create(INDEX_NAME) - self.assertTrue(self.es.indices.exists(INDEX_NAME)) + self.es.indices.create(index=INDEX_NAME) + self.assertTrue(self.es.indices.exists(index=INDEX_NAME)) diff --git a/tests/fake_indices/test_refresh.py b/tests/fake_indices/test_refresh.py index cf11a2d..99d98db 100644 --- a/tests/fake_indices/test_refresh.py +++ b/tests/fake_indices/test_refresh.py @@ -6,6 +6,6 @@ class TestRefresh(TestElasticmock): def test_should_refresh_index(self): - self.es.indices.create(INDEX_NAME) - self.es.indices.refresh(INDEX_NAME) - self.assertTrue(self.es.indices.exists(INDEX_NAME)) + self.es.indices.create(index=INDEX_NAME) + self.es.indices.refresh(index=INDEX_NAME) + self.assertTrue(self.es.indices.exists(index=INDEX_NAME)) diff --git a/tox.ini b/tox.ini index 6dcc43f..74888fd 100644 --- a/tox.ini +++ b/tox.ini @@ -3,19 +3,23 @@ envlist = py36-elasticsearch{1,2,5,6,7} py37-elasticsearch{1,2,5,6,7} - py38-elasticsearch{1,2,5,6,7} - py39-elasticsearch{1,2,5,6,7} + py38-elasticsearch{1,2,5,6,7,8} + py39-elasticsearch{1,2,5,6,7,8} + py310-elasticsearch{1,2,5,6,7,8} + py311-elasticsearch{1,2,5,6,7,8} + py312-elasticsearch{1,2,5,6,7,8} [testenv] deps = parameterized - pytest==4.6.9 - pytest-cov==2.8.1 + pytest==8.2.2 + pytest-cov==5.0.0 elasticsearch1: elasticsearch ==1.9.0 elasticsearch2: elasticsearch >=2.0.0, <5.0.0 elasticsearch5: elasticsearch >=5.0.0, <6.0.0 elasticsearch6: elasticsearch >=6.0.0, <7.0.0 elasticsearch7: elasticsearch >=7.0.0, <8.0.0 + elasticsearch8: elasticsearch >=8.0.0, <9.0.0 commands = python -c "import tests.tox_banner" py.test --cov-report term-missing --cov=elasticmock From edea356da987cbbd382494def19aa65db0c0dd98 Mon Sep 17 00:00:00 2001 From: majikman111 Date: Fri, 4 Oct 2024 14:10:57 -0400 Subject: [PATCH 2/2] Update fake_elasticsearch.py FIXED: Interface changed in ES8.13.0 and body should no longer be referenced directly. --- elasticmock/fake_elasticsearch.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/elasticmock/fake_elasticsearch.py b/elasticmock/fake_elasticsearch.py index 8941b65..879202e 100644 --- a/elasticmock/fake_elasticsearch.py +++ b/elasticmock/fake_elasticsearch.py @@ -595,12 +595,14 @@ def update_by_query( ) -> ObjectApiResponse[t.Any]: # Actually it only supports script equal operations # TODO: Full support from painless language + if body and script is None: + script = body['script'] total_updated = 0 if isinstance(index, list): index, = index new_values = {} - script_params = body['script']['params'] - script_source = body['script']['source'] \ + script_params = script['params'] + script_source = script['source'] \ .split(';') for sentence in script_source: if sentence: