Skip to content

Commit

Permalink
Update E2E tests for large document generation to 99MiB
Browse files Browse the repository at this point in the history
Signed-off-by: Andre Kurait <akurait@amazon.com>
  • Loading branch information
AndreKurait committed May 16, 2024
1 parent 47626f8 commit c2d7337
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ RUN echo y | /usr/share/elasticsearch/bin/elasticsearch-plugin install https://m
RUN pushd /usr/share/elasticsearch/plugins/search-guard-7/tools ; chmod ugo+x ./install_demo_configuration.sh ; yes | ./install_demo_configuration.sh ; popd
ENV ELASTIC_SEARCH_CONFIG_FILE=/usr/share/elasticsearch/config/elasticsearch.yml
ENV PROXY_TLS_CONFIG_FILE=/usr/share/elasticsearch/config/proxy_tls.yml
ENV ELASTIC_SEARCH_JVM_CONFIG_FILE=/usr/share/elasticsearch/config/jvm.options

# without this line, elasticsearch will complain that there aren't enough nodes
RUN echo "discovery.type: single-node" >> $ELASTIC_SEARCH_CONFIG_FILE
COPY disableTlsConfig.sh enableTlsConfig.sh /root/
Expand All @@ -19,5 +21,11 @@ RUN /root/enableTlsConfig.sh $ELASTIC_SEARCH_CONFIG_FILE
# Alter this config line to either enable(searchguard.disabled: false) or disable(searchguard.disabled: true) HTTP auth
RUN echo "searchguard.disabled: false" >> $ELASTIC_SEARCH_CONFIG_FILE

RUN sed -i '/^-Xms/i # Increase default heap to 80% RAM, Requires JDK >= 10' $ELASTIC_SEARCH_JVM_CONFIG_FILE && \
sed -i 's/^-Xms/#&/' $ELASTIC_SEARCH_JVM_CONFIG_FILE && \
sed -i 's/^-Xmx/#&/' $ELASTIC_SEARCH_JVM_CONFIG_FILE && \
sed -i '/^#-Xmx/s/$/\n-XX:MaxRAMPercentage=80.0/' $ELASTIC_SEARCH_JVM_CONFIG_FILE


#CMD tail -f /dev/null
CMD /usr/local/bin/docker-entrypoint.sh eswrapper
12 changes: 6 additions & 6 deletions test/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def does_index_match_ignored_index(self, index_name: str):
return True
return False

def assert_source_target_doc_match(self, index_name, doc_id, doc_body : dict = None):
def assert_source_target_doc_match(self, index_name, doc_id, doc_body: dict = None):
source_response = get_document(self.source_endpoint, index_name, doc_id, self.source_auth,
self.source_verify_ssl)
self.assertEqual(source_response.status_code, HTTPStatus.OK)
Expand Down Expand Up @@ -347,8 +347,8 @@ def test_0008_largeRequest(self):
index_name = f"test_0008_{self.unique_id}"
doc_id = "1"

# Create large document, 99MiB which is less than the default max of
# 100MiB in ES/OS settings (http.max_content_length)
# Create large document, 99MiB
# Default max 100MiB in ES/OS settings (http.max_content_length)
large_doc = generate_large_doc(size_mib=99)

# Measure the time taken by the create_document call
Expand All @@ -359,14 +359,14 @@ def test_0008_largeRequest(self):
end_time = time.time()
duration = end_time - start_time

# Set wait time to response time or 1 second
wait_time_seconds = min(round(duration, 3), 1)
# Set wait time to double the response time or 5 seconds
wait_time_seconds = min(round(duration, 3) * 2, 5)

self.assertEqual(proxy_response.status_code, HTTPStatus.CREATED)
f" replay large doc creation")

# Wait for the measured duration
logger.debug(f"Waiting {wait_time_seconds} seconds for"
f" replay of large doc creation")

time.sleep(wait_time_seconds)

Expand Down

0 comments on commit c2d7337

Please sign in to comment.