Skip to content

Commit

Permalink
Update E2E tests for large document generation
Browse files Browse the repository at this point in the history
Signed-off-by: Andre Kurait <akurait@amazon.com>
  • Loading branch information
AndreKurait committed May 14, 2024
1 parent ba31311 commit af1f7c3
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 6 deletions.
30 changes: 25 additions & 5 deletions test/operations.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import datetime
import random
import string
import json
from requests import Session

Expand Down Expand Up @@ -27,12 +30,29 @@ def delete_document(endpoint: str, index_name: str, doc_id: str, auth,
return response


def create_document(endpoint: str, index_name: str, doc_id: str, auth,
verify_ssl: bool = False, session: Session = Session()):
document = {
'title': 'Test Document',
'content': 'This is a sample document for testing OpenSearch.'
def generate_large_doc(size_mb):
# Calculate number of characters needed (1 char = 1 byte)
num_chars = size_mb * 1000 * 1000

# Generate random string of the desired length
large_string = ''.join(random.choices(string.ascii_letters + string.digits, k=num_chars))

return {
"timestamp": datetime.datetime.now().isoformat(),
"large_field": large_string
}


def create_document(endpoint: str, index_name: str, doc_id: str, auth,
verify_ssl: bool = False, doc_body: dict = None, session: Session = Session()):
if doc_body is None:
document = {
'title': 'Test Document',
'content': 'This is a sample document for testing OpenSearch.'
}
else:
document = doc_body

url = f'{endpoint}/{index_name}/_doc/{doc_id}'
headers = {'Content-Type': 'application/json'}
response = session.put(url, headers=headers, data=json.dumps(document), auth=auth, verify=verify_ssl)
Expand Down
30 changes: 29 additions & 1 deletion test/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import requests
import secrets
import string
from operations import generate_large_doc
import subprocess
import time
import unittest
Expand Down Expand Up @@ -327,7 +328,7 @@ def test_0007_timeBetweenRequestsOnSameConnection(self):
for doc_id_int in range(number_of_docs):
doc_id = str(doc_id_int)
proxy_response = create_document(self.proxy_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl, proxy_single_connection_session)
self.source_verify_ssl, session=proxy_single_connection_session)
self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED)

if doc_id_int + 1 < number_of_docs:
Expand All @@ -339,3 +340,30 @@ def test_0007_timeBetweenRequestsOnSameConnection(self):
self.assert_source_target_doc_match(index_name, doc_id)
finally:
proxy_single_connection_session.close()

def test_0008_largeRequest(self):
index_name = f"test_0008_{self.unique_id}"
doc_id = "1"

# Create large document, 20MB which is less than the default max of
# 100MB in http.max_content_length
large_doc = generate_large_doc(size_mb=20)

# Measure the time taken by the create_document call
# Send large request to proxy and verify response
start_time = time.time()
proxy_response = create_document(self.proxy_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl, doc_body=large_doc)
end_time = time.time()
duration = end_time - start_time

self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED)

# Wait for the measured duration
logger.debug(f"Waiting {round(duration, 3)} seconds for expected time "
f"to replay large doc creation")

time.sleep(duration)

# Verify document created on source and target
self.assert_source_target_doc_match(index_name, doc_id)

0 comments on commit af1f7c3

Please sign in to comment.