Skip to content

Commit

Permalink
Merge pull request #636 from AndreKurait/E2ELargeRequestCDC
Browse files Browse the repository at this point in the history
Add E2E large request test
  • Loading branch information
peternied authored May 16, 2024
2 parents 121ba5d + c2d7337 commit a35d536
Show file tree
Hide file tree
Showing 4 changed files with 69 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ RUN echo y | /usr/share/elasticsearch/bin/elasticsearch-plugin install https://m
RUN pushd /usr/share/elasticsearch/plugins/search-guard-7/tools ; chmod ugo+x ./install_demo_configuration.sh ; yes | ./install_demo_configuration.sh ; popd
ENV ELASTIC_SEARCH_CONFIG_FILE=/usr/share/elasticsearch/config/elasticsearch.yml
ENV PROXY_TLS_CONFIG_FILE=/usr/share/elasticsearch/config/proxy_tls.yml
ENV ELASTIC_SEARCH_JVM_CONFIG_FILE=/usr/share/elasticsearch/config/jvm.options

# without this line, elasticsearch will complain that there aren't enough nodes
RUN echo "discovery.type: single-node" >> $ELASTIC_SEARCH_CONFIG_FILE
COPY disableTlsConfig.sh enableTlsConfig.sh /root/
Expand All @@ -19,5 +21,11 @@ RUN /root/enableTlsConfig.sh $ELASTIC_SEARCH_CONFIG_FILE
# Alter this config line to either enable(searchguard.disabled: false) or disable(searchguard.disabled: true) HTTP auth
RUN echo "searchguard.disabled: false" >> $ELASTIC_SEARCH_CONFIG_FILE

RUN sed -i '/^-Xms/i # Increase default heap to 80% RAM, Requires JDK >= 10' $ELASTIC_SEARCH_JVM_CONFIG_FILE && \
sed -i 's/^-Xms/#&/' $ELASTIC_SEARCH_JVM_CONFIG_FILE && \
sed -i 's/^-Xmx/#&/' $ELASTIC_SEARCH_JVM_CONFIG_FILE && \
sed -i '/^#-Xmx/s/$/\n-XX:MaxRAMPercentage=80.0/' $ELASTIC_SEARCH_JVM_CONFIG_FILE


#CMD tail -f /dev/null
CMD /usr/local/bin/docker-entrypoint.sh eswrapper
2 changes: 1 addition & 1 deletion test/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ This script accepts various parameters to customize its behavior. Below is a lis
- Default: `admin`

- `--target_password`: Password for authentication with the target endpoint.
- Default: `admin`
- Default: `myStrongPassword123!`


#### Clean Up
Expand Down
30 changes: 25 additions & 5 deletions test/operations.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import datetime
import random
import string
import json
from requests import Session

Expand Down Expand Up @@ -27,12 +30,29 @@ def delete_document(endpoint: str, index_name: str, doc_id: str, auth,
return response


def create_document(endpoint: str, index_name: str, doc_id: str, auth,
verify_ssl: bool = False, session: Session = Session()):
document = {
'title': 'Test Document',
'content': 'This is a sample document for testing OpenSearch.'
def generate_large_doc(size_mib):
# Calculate number of characters needed (1 char = 1 byte)
num_chars = size_mib * 1024 * 1024

# Generate random string of the desired length
large_string = ''.join(random.choices(string.ascii_letters + string.digits, k=num_chars))

return {
"timestamp": datetime.datetime.now().isoformat(),
"large_field": large_string
}


def create_document(endpoint: str, index_name: str, doc_id: str, auth,
verify_ssl: bool = False, doc_body: dict = None, session: Session = Session()):
if doc_body is None:
document = {
'title': 'Test Document',
'content': 'This is a sample document for testing OpenSearch.'
}
else:
document = doc_body

url = f'{endpoint}/{index_name}/_doc/{doc_id}'
headers = {'Content-Type': 'application/json'}
response = session.put(url, headers=headers, data=json.dumps(document), auth=auth, verify=verify_ssl)
Expand Down
37 changes: 35 additions & 2 deletions test/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import requests
import secrets
import string
from operations import generate_large_doc
import subprocess
import time
import unittest
Expand Down Expand Up @@ -110,7 +111,7 @@ def does_index_match_ignored_index(self, index_name: str):
return True
return False

def assert_source_target_doc_match(self, index_name, doc_id):
def assert_source_target_doc_match(self, index_name, doc_id, doc_body: dict = None):
source_response = get_document(self.source_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl)
self.assertEqual(source_response.status_code, HTTPStatus.OK)
Expand All @@ -127,6 +128,8 @@ def assert_source_target_doc_match(self, index_name, doc_id):
target_document = target_response.json()
target_content = target_document['_source']
self.assertEqual(source_content, target_content)
if doc_body is not None:
self.assertEqual(source_content, doc_body)

def set_common_values(self):
self.index_prefix_ignore_list = ["test_", ".", "searchguard", "sg7", "security-auditlog"]
Expand Down Expand Up @@ -327,7 +330,7 @@ def test_0007_timeBetweenRequestsOnSameConnection(self):
for doc_id_int in range(number_of_docs):
doc_id = str(doc_id_int)
proxy_response = create_document(self.proxy_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl, proxy_single_connection_session)
self.source_verify_ssl, session=proxy_single_connection_session)
self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED)

if doc_id_int + 1 < number_of_docs:
Expand All @@ -339,3 +342,33 @@ def test_0007_timeBetweenRequestsOnSameConnection(self):
self.assert_source_target_doc_match(index_name, doc_id)
finally:
proxy_single_connection_session.close()

def test_0008_largeRequest(self):
index_name = f"test_0008_{self.unique_id}"
doc_id = "1"

# Create large document, 99MiB
# Default max 100MiB in ES/OS settings (http.max_content_length)
large_doc = generate_large_doc(size_mib=99)

# Measure the time taken by the create_document call
# Send large request to proxy and verify response
start_time = time.time()
proxy_response = create_document(self.proxy_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl, doc_body=large_doc)
end_time = time.time()
duration = end_time - start_time

# Set wait time to double the response time or 5 seconds
wait_time_seconds = min(round(duration, 3) * 2, 5)

self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED)

# Wait for the measured duration
logger.debug(f"Waiting {wait_time_seconds} seconds for"
f" replay of large doc creation")

time.sleep(wait_time_seconds)

# Verify document created on source and target
self.assert_source_target_doc_match(index_name, doc_id, doc_body=large_doc)

0 comments on commit a35d536

Please sign in to comment.