diff --git a/integration_tests/src/main/python/hive_parquet_write_test.py b/integration_tests/src/main/python/hive_parquet_write_test.py index e74a99f43c7..96976c3a356 100644 --- a/integration_tests/src/main/python/hive_parquet_write_test.py +++ b/integration_tests/src/main/python/hive_parquet_write_test.py @@ -19,7 +19,7 @@ from data_gen import * from hive_write_test import _restricted_timestamp from marks import allow_non_gpu, ignore_order -from spark_session import with_cpu_session, is_before_spark_320 +from spark_session import with_cpu_session, is_before_spark_320, is_spark_351_or_later # Disable the meta conversion from Hive write to FrameData write in Spark, to test # "GpuInsertIntoHiveTable" for Parquet write. @@ -55,7 +55,7 @@ _hive_write_gens = [_hive_basic_gens, _hive_struct_gens, _hive_array_gens, _hive_map_gens] # ProjectExec falls back on databricks due to no GPU version of "MapFromArrays". -fallback_nodes = ['ProjectExec'] if is_databricks_runtime() else [] +fallback_nodes = ['ProjectExec'] if is_databricks_runtime() or is_spark_351_or_later() else [] @allow_non_gpu(*(non_utc_allow + fallback_nodes)) diff --git a/integration_tests/src/main/python/spark_session.py b/integration_tests/src/main/python/spark_session.py index 78e0b08a651..c55f1976497 100644 --- a/integration_tests/src/main/python/spark_session.py +++ b/integration_tests/src/main/python/spark_session.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020-2023, NVIDIA CORPORATION. +# Copyright (c) 2020-2024, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -220,6 +220,9 @@ def is_spark_341(): def is_spark_350_or_later(): return spark_version() >= "3.5.0" +def is_spark_351_or_later(): + return spark_version() >= "3.5.1" + def is_spark_330(): return spark_version() == "3.3.0"