Skip to content

Commit

Permalink
SNOW-872499: use maxHttpRetries on chunk downloader requests to preve…
Browse files Browse the repository at this point in the history
…nt infinite retries (#1484)

use maxHttpRetries on chunk downloader requests
  • Loading branch information
sfc-gh-ext-simba-lb authored Jul 24, 2023
1 parent 0c3991c commit ee19422
Show file tree
Hide file tree
Showing 8 changed files with 90 additions and 4 deletions.
2 changes: 2 additions & 0 deletions src/main/java/net/snowflake/client/core/SFBaseSession.java
Original file line number Diff line number Diff line change
Expand Up @@ -840,6 +840,8 @@ public SFConnectionHandler getSfConnectionHandler() {

public abstract int getAuthTimeout();

public abstract int getMaxHttpRetries();

public abstract SnowflakeConnectString getSnowflakeConnectionString();

public abstract boolean isAsyncSession();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ public SFBaseSession getSession() {
private final int networkTimeoutInMilli;
private final int authTimeout;
private final int socketTimeout;
private final int maxHttpRetries;
private final SFBaseSession session;

public ChunkDownloadContext(
Expand All @@ -65,6 +66,7 @@ public ChunkDownloadContext(
int networkTimeoutInMilli,
int authTimeout,
int socketTimeout,
int maxHttpRetries,
SFBaseSession session) {
this.chunkDownloader = chunkDownloader;
this.resultChunk = resultChunk;
Expand All @@ -74,6 +76,7 @@ public ChunkDownloadContext(
this.networkTimeoutInMilli = networkTimeoutInMilli;
this.authTimeout = authTimeout;
this.socketTimeout = socketTimeout;
this.maxHttpRetries = maxHttpRetries;
this.session = session;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ else if (context.getQrmk() != null) {
false, // no retry parameters in url
false, // no request_guid
true, // retry on HTTP403 for AWS S3
true, // no retry on http request
new ExecTimeTelemetryData());

SnowflakeResultSetSerializableV1.logger.debug(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ public class SnowflakeChunkDownloader implements ChunkDownloader {

private final int socketTimeout;

private final int maxHttpRetries;
private long memoryLimit;

// the current memory usage across JVM
Expand Down Expand Up @@ -120,7 +121,6 @@ static long getCurrentMemoryUsage() {
private static final long downloadedConditionTimeoutInSeconds =
HttpUtil.getDownloadedConditionTimeoutInSeconds();

private static final int MAX_NUM_OF_RETRY = 10;
private static final int MAX_RETRY_JITTER = 1000; // milliseconds

// Only controls the max retry number when prefetch runs out of memory
Expand Down Expand Up @@ -194,6 +194,7 @@ public SnowflakeChunkDownloader(SnowflakeResultSetSerializableV1 resultSetSerial
this.networkTimeoutInMilli = resultSetSerializable.getNetworkTimeoutInMilli();
this.authTimeout = resultSetSerializable.getAuthTimeout();
this.socketTimeout = resultSetSerializable.getSocketTimeout();
this.maxHttpRetries = resultSetSerializable.getMaxHttpRetries();
this.prefetchSlots = resultSetSerializable.getResultPrefetchThreads() * 2;
this.queryResultFormat = resultSetSerializable.getQueryResultFormat();
logger.debug("qrmk = {}", this.qrmk);
Expand Down Expand Up @@ -406,6 +407,7 @@ private void startNextDownloaders() throws SnowflakeSQLException {
networkTimeoutInMilli,
authTimeout,
socketTimeout,
maxHttpRetries,
this.session));
downloaderFutures.put(nextChunkToDownload, downloaderFuture);
// increment next chunk to download
Expand Down Expand Up @@ -634,7 +636,7 @@ public SnowflakeResultChunk getNextChunkToConsume()
private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws InterruptedException {
int retry = 0;
long startTime = System.currentTimeMillis();
while (currentChunk.getDownloadState() != DownloadState.SUCCESS && retry < MAX_NUM_OF_RETRY) {
while (currentChunk.getDownloadState() != DownloadState.SUCCESS && retry < maxHttpRetries) {
logger.debug(
"Thread {} is waiting for #chunk{} to be ready, current" + "chunk state is: {}, retry={}",
Thread.currentThread().getId(),
Expand Down Expand Up @@ -704,6 +706,7 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru
networkTimeoutInMilli,
authTimeout,
socketTimeout,
maxHttpRetries,
session));
downloaderFutures.put(nextChunkToConsume, downloaderFuture);
// Only when prefetch fails due to internal memory limitation, nextChunkToDownload
Expand All @@ -715,7 +718,7 @@ private void waitForChunkReady(SnowflakeResultChunk currentChunk) throws Interru
}
if (currentChunk.getDownloadState() == DownloadState.SUCCESS) {
logger.debug("ready to consume #chunk{}, succeed retry={}", nextChunkToConsume, retry);
} else if (retry >= MAX_NUM_OF_RETRY) {
} else if (retry >= maxHttpRetries) {
// stop retrying and report failure
currentChunk.setDownloadState(DownloadState.FAILURE);
currentChunk.setDownloadError(
Expand Down Expand Up @@ -859,6 +862,7 @@ private static Callable<Void> getDownloadChunkCallable(
final int networkTimeoutInMilli,
final int authTimeout,
final int socketTimeout,
final int maxHttpRetries,
final SFBaseSession session) {
ChunkDownloadContext downloadContext =
new ChunkDownloadContext(
Expand All @@ -870,6 +874,7 @@ private static Callable<Void> getDownloadChunkCallable(
networkTimeoutInMilli,
authTimeout,
socketTimeout,
maxHttpRetries,
session);

return new Callable<Void>() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ public String toString() {
int networkTimeoutInMilli;
int authTimeout;
int socketTimeout;
int maxHttpRetries;
boolean isResultColumnCaseInsensitive;
int resultSetType;
int resultSetConcurrency;
Expand Down Expand Up @@ -195,6 +196,7 @@ private SnowflakeResultSetSerializableV1(SnowflakeResultSetSerializableV1 toCopy
this.networkTimeoutInMilli = toCopy.networkTimeoutInMilli;
this.authTimeout = toCopy.authTimeout;
this.socketTimeout = toCopy.socketTimeout;
this.maxHttpRetries = toCopy.maxHttpRetries;
this.isResultColumnCaseInsensitive = toCopy.isResultColumnCaseInsensitive;
this.resultSetType = toCopy.resultSetType;
this.resultSetConcurrency = toCopy.resultSetConcurrency;
Expand Down Expand Up @@ -317,6 +319,10 @@ public int getSocketTimeout() {
return socketTimeout;
}

public int getMaxHttpRetries() {
return maxHttpRetries;
}

public int getResultPrefetchThreads() {
return resultPrefetchThreads;
}
Expand Down Expand Up @@ -662,6 +668,7 @@ public static SnowflakeResultSetSerializableV1 create(
resultSetSerializable.snowflakeConnectionString = sfSession.getSnowflakeConnectionString();
resultSetSerializable.networkTimeoutInMilli = sfSession.getNetworkTimeoutInMilli();
resultSetSerializable.authTimeout = 0;
resultSetSerializable.maxHttpRetries = sfSession.getMaxHttpRetries();
resultSetSerializable.isResultColumnCaseInsensitive = sfSession.isResultColumnCaseInsensitive();
resultSetSerializable.treatNTZAsUTC = sfSession.getTreatNTZAsUTC();
resultSetSerializable.formatDateWithTimezone = sfSession.getFormatDateWithTimezone();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public void setup() throws SQLException, InterruptedException {
((SnowflakeResultSetSerializableV1) resultSetSerializable).getChunkHeadersMap();
sfContext =
new ChunkDownloadContext(
downloader, chunk, qrmk, 0, chunkHeadersMap, 0, 0, 0, sfBaseSession);
downloader, chunk, qrmk, 0, chunkHeadersMap, 0, 0, 0, 7, sfBaseSession);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -711,6 +711,14 @@ public int getAuthTimeout() {
return 0;
}

/**
* @return
*/
@Override
public int getMaxHttpRetries() {
return 7;
}

public SnowflakeConnectString getSnowflakeConnectionString() {
return null;
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
/*
* Copyright (c) 2022 Snowflake Computing Inc. All right reserved.
*/
package net.snowflake.client.jdbc;

import static org.junit.Assert.assertTrue;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Properties;
import org.junit.Test;
import org.mockito.Mockito;

public class SnowflakeChunkDownloaderLatestIT extends BaseJDBCTest {

/**
* Tests that the chunk downloader uses the maxHttpRetries and doesn't enter and infinite loop of
* retries.
*
* @throws SQLException
* @throws InterruptedException
*/
@Test
public void testChunkDownloaderRetry() throws SQLException, InterruptedException {
// set proxy to invalid host and bypass the snowflakecomputing.com domain
// this will cause connection issues to the internal stage on fetching
System.setProperty("https.proxyHost", "127.0.0.1");
System.setProperty("https.proxyPort", "8080");
System.setProperty("http.nonProxyHosts", "*snowflakecomputing.com");

// set max retries
Properties properties = new Properties();
properties.put("maxHttpRetries", 2);

SnowflakeChunkDownloader snowflakeChunkDownloaderSpy = null;

try (Connection connection = getConnection(properties)) {
Statement statement = connection.createStatement();
// execute a query that will require chunk downloading
ResultSet resultSet =
statement.executeQuery(
"select seq8(), randstr(1000, random()) from table(generator(rowcount => 10000))");
List<SnowflakeResultSetSerializable> resultSetSerializables =
((SnowflakeResultSet) resultSet).getResultSetSerializables(100 * 1024 * 1024);
SnowflakeResultSetSerializable resultSetSerializable = resultSetSerializables.get(0);
SnowflakeChunkDownloader downloader =
new SnowflakeChunkDownloader((SnowflakeResultSetSerializableV1) resultSetSerializable);
snowflakeChunkDownloaderSpy = Mockito.spy(downloader);
snowflakeChunkDownloaderSpy.getNextChunkToConsume();
} catch (SnowflakeSQLException exception) {
// verify that request was retried twice before reaching max retries
Mockito.verify(snowflakeChunkDownloaderSpy, Mockito.times(2)).getResultStreamProvider();
assertTrue(exception.getMessage().contains("Max retry reached for the download of #chunk0"));
assertTrue(exception.getMessage().contains("retry=2"));
}
}
}

0 comments on commit ee19422

Please sign in to comment.