Skip to content

Commit

Permalink
Merge pull request opensearch-project#637 from AndreKurait/E2EWaitBet…
Browse files Browse the repository at this point in the history
…weenRequestsOnConnection

Add test that verifies replayer function when waiting a minute between requests
  • Loading branch information
AndreKurait authored May 16, 2024
2 parents 9fbb98d + 82690c5 commit 121ba5d
Show file tree
Hide file tree
Showing 2 changed files with 74 additions and 40 deletions.
26 changes: 13 additions & 13 deletions test/operations.py
Original file line number Diff line number Diff line change
@@ -1,49 +1,49 @@
import requests
import json
from requests import Session


def create_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False):
response = requests.put(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl)
def create_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False, session: Session = Session()):
response = session.put(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl)

return response


def check_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False):
response = requests.get(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl)
def check_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False, session: Session = Session()):
response = session.get(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl)

return response


def delete_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False):
response = requests.delete(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl)
def delete_index(endpoint: str, index_name: str, auth, verify_ssl: bool = False, session: Session = Session()):
response = session.delete(f'{endpoint}/{index_name}', auth=auth, verify=verify_ssl)

return response


def delete_document(endpoint: str, index_name: str, doc_id: str, auth,
verify_ssl: bool = False):
response = requests.delete(f'{endpoint}/{index_name}/_doc/{doc_id}', auth=auth, verify=verify_ssl)
verify_ssl: bool = False, session: Session = Session()):
response = session.delete(f'{endpoint}/{index_name}/_doc/{doc_id}', auth=auth, verify=verify_ssl)

return response


def create_document(endpoint: str, index_name: str, doc_id: str, auth,
verify_ssl: bool = False):
verify_ssl: bool = False, session: Session = Session()):
document = {
'title': 'Test Document',
'content': 'This is a sample document for testing OpenSearch.'
}
url = f'{endpoint}/{index_name}/_doc/{doc_id}'
headers = {'Content-Type': 'application/json'}
response = requests.put(url, headers=headers, data=json.dumps(document), auth=auth, verify=verify_ssl)
response = session.put(url, headers=headers, data=json.dumps(document), auth=auth, verify=verify_ssl)

return response


def get_document(endpoint: str, index_name: str, doc_id: str, auth,
verify_ssl: bool = False):
verify_ssl: bool = False, session: Session = Session()):
url = f'{endpoint}/{index_name}/_doc/{doc_id}'
headers = {'Content-Type': 'application/json'}
response = requests.get(url, headers=headers, auth=auth, verify=verify_ssl)
response = session.get(url, headers=headers, auth=auth, verify=verify_ssl)

return response
88 changes: 61 additions & 27 deletions test/tests.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
import boto3
import json
import subprocess

from operations import create_index, check_index, create_document, \
delete_document, delete_index, get_document
from http import HTTPStatus
from typing import Tuple, Callable
import unittest
import logging
import time
import pytest
import requests
import string
import secrets
import pytest
import boto3
import string
import subprocess
import time
import unittest
from http import HTTPStatus
from requests import Session
from requests.adapters import HTTPAdapter
from requests.exceptions import ConnectionError, SSLError
from requests_aws4auth import AWS4Auth
from typing import Tuple, Callable

from requests.exceptions import ConnectionError, SSLError
from operations import create_index, check_index, create_document, \
delete_document, delete_index, get_document

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -109,6 +110,24 @@ def does_index_match_ignored_index(self, index_name: str):
return True
return False

def assert_source_target_doc_match(self, index_name, doc_id):
source_response = get_document(self.source_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl)
self.assertEqual(source_response.status_code, HTTPStatus.OK)

target_response = retry_request(get_document, args=(self.target_endpoint, index_name, doc_id,
self.target_auth, self.target_verify_ssl),
expected_status_code=HTTPStatus.OK)
self.assertEqual(target_response.status_code, HTTPStatus.OK)

# Comparing the document's content on both endpoints, asserting
# that they match.
source_document = source_response.json()
source_content = source_document['_source']
target_document = target_response.json()
target_content = target_document['_source']
self.assertEqual(source_content, target_content)

def set_common_values(self):
self.index_prefix_ignore_list = ["test_", ".", "searchguard", "sg7", "security-auditlog"]

Expand Down Expand Up @@ -179,21 +198,7 @@ def test_0002_document(self):
self.source_verify_ssl)
self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED)

source_response = get_document(self.source_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl)
self.assertEqual(source_response.status_code, HTTPStatus.OK)

target_response = retry_request(get_document, args=(self.target_endpoint, index_name, doc_id,
self.target_auth, self.target_verify_ssl),
expected_status_code=HTTPStatus.OK)
self.assertEqual(target_response.status_code, HTTPStatus.OK)

# Comparing the document's content on both endpoints, asserting that they match.
source_document = source_response.json()
source_content = source_document['_source']
target_document = target_response.json()
target_content = target_document['_source']
self.assertEqual(source_content, target_content)
self.assert_source_target_doc_match(index_name, doc_id)

# Deleting the document that was created then asserting that it was deleted on both targets.
proxy_response = delete_document(self.proxy_endpoint, index_name, doc_id, self.source_auth,
Expand Down Expand Up @@ -305,3 +310,32 @@ def test_0006_OSB(self):
if source_count != target_count:
self.assertEqual(source_count, target_count, f'{index}: doc counts do not match - '
f'Source = {source_count}, Target = {target_count}')

def test_0007_timeBetweenRequestsOnSameConnection(self):
# This test will verify that the replayer functions correctly when
# requests on the same connection on the proxy that has a minute gap
seconds_between_requests = 60 # 1 minute

proxy_single_connection_session = Session()
adapter = HTTPAdapter(pool_connections=1, pool_maxsize=1, max_retries=1)
proxy_single_connection_session.mount(self.proxy_endpoint, adapter)

index_name = f"test_0007_{self.unique_id}"

number_of_docs = 3

for doc_id_int in range(number_of_docs):
doc_id = str(doc_id_int)
proxy_response = create_document(self.proxy_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl, proxy_single_connection_session)
self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED)

if doc_id_int + 1 < number_of_docs:
time.sleep(seconds_between_requests)

try:
for doc_id_int in range(number_of_docs):
doc_id = str(doc_id_int)
self.assert_source_target_doc_match(index_name, doc_id)
finally:
proxy_single_connection_session.close()

0 comments on commit 121ba5d

Please sign in to comment.