diff --git a/integration-test/spark-common/src/test/java/org/apache/uniffle/test/RssShuffleManagerTest.java b/integration-test/spark-common/src/test/java/org/apache/uniffle/test/RssShuffleManagerTest.java index ac6d739dd7..88b2c9b89c 100644 --- a/integration-test/spark-common/src/test/java/org/apache/uniffle/test/RssShuffleManagerTest.java +++ b/integration-test/spark-common/src/test/java/org/apache/uniffle/test/RssShuffleManagerTest.java @@ -38,7 +38,6 @@ import org.apache.spark.sql.SparkSession; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -125,8 +124,6 @@ public void testRssShuffleManagerClientConf(BlockIdLayout layout) throws Excepti @ParameterizedTest @MethodSource("testBlockIdLayouts") - @Disabled( - "Dynamic client conf not working for arguments used to create ShuffleWriteClient: issue #1554") public void testRssShuffleManagerDynamicClientConf(BlockIdLayout layout) throws Exception { doTestRssShuffleManager(null, layout, layout, true); } @@ -159,7 +156,7 @@ private void doTestRssShuffleManager( conf.set("spark." + RssClientConfig.RSS_CLIENT_ASSIGNMENT_RETRY_INTERVAL, "1000"); conf.set("spark." + RssClientConfig.RSS_CLIENT_ASSIGNMENT_RETRY_TIMES, "10"); - // configure block id layout (if set) + // configure client conf block id layout (if set) if (clientConfLayout != null) { conf.set( "spark." + RssClientConf.BLOCKID_SEQUENCE_NO_BITS.key(), @@ -185,6 +182,17 @@ private void doTestRssShuffleManager( RssShuffleManagerBase shuffleManager = (RssShuffleManagerBase) SparkEnv.get().shuffleManager(); + // configure expected block id layout + conf.set( + "spark." + RssClientConf.BLOCKID_SEQUENCE_NO_BITS.key(), + String.valueOf(expectedLayout.sequenceNoBits)); + conf.set( + "spark." + RssClientConf.BLOCKID_PARTITION_ID_BITS.key(), + String.valueOf(expectedLayout.partitionIdBits)); + conf.set( + "spark." + RssClientConf.BLOCKID_TASK_ATTEMPT_ID_BITS.key(), + String.valueOf(expectedLayout.taskAttemptIdBits)); + // get written block ids (we know there is one shuffle where two task attempts wrote two // partitions) RssConf rssConf = RssSparkConfig.toRssConf(conf);