Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-31725][CORE][SQL][TESTS] Set America/Los_Angeles time zone and Locale.US in tests by default #28548

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark

// scalastyle:off
import java.io.File
import java.util.{Locale, TimeZone}

import org.apache.log4j.spi.LoggingEvent

Expand Down Expand Up @@ -63,6 +64,11 @@ abstract class SparkFunSuite
with Logging {
// scalastyle:on

// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)

protected val enableAutoThreadAudit = true

protected override def beforeAll(): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql.catalyst.expressions

import java.sql.Timestamp
import java.util.TimeZone

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.types._
Expand All @@ -35,15 +34,7 @@ class SortOrderExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
val l1 = Literal.create(20132983L, LongType)
val l2 = Literal.create(-20132983L, LongType)
val millis = 1524954911000L
// Explicitly choose a time zone, since Date objects can create different values depending on
// local time zone of the machine on which the test is running
val oldDefaultTZ = TimeZone.getDefault
val d1 = try {
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
Literal.create(new java.sql.Date(millis), DateType)
} finally {
TimeZone.setDefault(oldDefaultTZ)
}
val d1 = Literal.create(new java.sql.Date(millis), DateType)
val t1 = Literal.create(new Timestamp(millis), TimestampType)
val f1 = Literal.create(0.7788229f, FloatType)
val f2 = Literal.create(-0.7788229f, FloatType)
Expand Down
7 changes: 1 addition & 6 deletions sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql

import java.util.{Locale, TimeZone}
import java.util.TimeZone

import scala.collection.JavaConverters._

Expand All @@ -35,11 +35,6 @@ abstract class QueryTest extends PlanTest {

protected def spark: SparkSession

// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)

/**
* Runs the plan and makes sure the answer contains all of the keywords.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.sql

import java.io.File
import java.util.{Locale, TimeZone}
import java.util.Locale
import java.util.regex.Pattern

import scala.collection.mutable.{ArrayBuffer, HashMap}
Expand Down Expand Up @@ -672,25 +672,16 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession {
session.sql("DROP TABLE IF EXISTS tenk1")
}

private val originalTimeZone = TimeZone.getDefault
private val originalLocale = Locale.getDefault

override def beforeAll(): Unit = {
super.beforeAll()
createTestTables(spark)
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)
RuleExecutor.resetMetrics()
CodeGenerator.resetCompileTime()
WholeStageCodegenExec.resetCodeGenTime()
}

override def afterAll(): Unit = {
try {
TimeZone.setDefault(originalTimeZone)
Locale.setDefault(originalLocale)
removeTestTables(spark)

// For debugging dump some statistics about how much time was spent in various optimizer rules
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql.hive.execution

import java.io.File
import java.util.{Locale, TimeZone}

import org.scalatest.BeforeAndAfter

Expand All @@ -36,8 +35,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
private lazy val hiveQueryDir = TestHive.getHiveFile(
"ql/src/test/queries/clientpositive".split("/").mkString(File.separator))

private val originalTimeZone = TimeZone.getDefault
private val originalLocale = Locale.getDefault
private val originalColumnBatchSize = TestHive.conf.columnBatchSize
private val originalInMemoryPartitionPruning = TestHive.conf.inMemoryPartitionPruning
private val originalCrossJoinEnabled = TestHive.conf.crossJoinEnabled
Expand All @@ -51,10 +48,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
override def beforeAll(): Unit = {
super.beforeAll()
TestHive.setCacheTables(true)
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)
// Set a relatively small column batch size for testing purposes
TestHive.setConf(SQLConf.COLUMN_BATCH_SIZE, 5)
// Enable in-memory partition pruning for testing purposes
Expand All @@ -73,8 +66,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
override def afterAll(): Unit = {
try {
TestHive.setCacheTables(false)
TimeZone.setDefault(originalTimeZone)
Locale.setDefault(originalLocale)
TestHive.setConf(SQLConf.COLUMN_BATCH_SIZE, originalColumnBatchSize)
TestHive.setConf(SQLConf.IN_MEMORY_PARTITION_PRUNING, originalInMemoryPartitionPruning)
TestHive.setConf(SQLConf.CROSS_JOINS_ENABLED, originalCrossJoinEnabled)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql.hive.execution

import java.io.File
import java.util.{Locale, TimeZone}

import org.scalatest.BeforeAndAfter

Expand All @@ -33,17 +32,11 @@ import org.apache.spark.util.Utils
* files, every `createQueryTest` calls should explicitly set `reset` to `false`.
*/
class HiveWindowFunctionQuerySuite extends HiveComparisonTest with BeforeAndAfter {
private val originalTimeZone = TimeZone.getDefault
private val originalLocale = Locale.getDefault
private val testTempDir = Utils.createTempDir()

override def beforeAll(): Unit = {
super.beforeAll()
TestHive.setCacheTables(true)
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)

// Create the table used in windowing.q
sql("DROP TABLE IF EXISTS part")
Expand Down Expand Up @@ -103,8 +96,6 @@ class HiveWindowFunctionQuerySuite extends HiveComparisonTest with BeforeAndAfte
override def afterAll(): Unit = {
try {
TestHive.setCacheTables(false)
TimeZone.setDefault(originalTimeZone)
Locale.setDefault(originalLocale)
TestHive.reset()
} finally {
super.afterAll()
Expand Down Expand Up @@ -747,17 +738,11 @@ class HiveWindowFunctionQuerySuite extends HiveComparisonTest with BeforeAndAfte

class HiveWindowFunctionQueryFileSuite
extends HiveCompatibilitySuite with BeforeAndAfter {
private val originalTimeZone = TimeZone.getDefault
private val originalLocale = Locale.getDefault
private val testTempDir = Utils.createTempDir()

override def beforeAll(): Unit = {
super.beforeAll()
TestHive.setCacheTables(true)
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)

// The following settings are used for generating golden files with Hive.
// We have to use kryo to correctly let Hive serialize plans with window functions.
Expand All @@ -772,8 +757,6 @@ class HiveWindowFunctionQueryFileSuite
override def afterAll(): Unit = {
try {
TestHive.setCacheTables(false)
TimeZone.setDefault(originalTimeZone)
Locale.setDefault(originalLocale)
TestHive.reset()
} finally {
super.afterAll()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql.hive

import java.util
import java.util.{Locale, TimeZone}

import org.apache.hadoop.hive.ql.udf.UDAFPercentile
import org.apache.hadoop.hive.serde2.io.DoubleWritable
Expand Down Expand Up @@ -74,11 +73,6 @@ class HiveInspectorSuite extends SparkFunSuite with HiveInspectors {
.get())
}

// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)

val data =
Literal(true) ::
Literal(null) ::
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.hive.execution
import java.io.File
import java.net.URI
import java.sql.Timestamp
import java.util.{Locale, TimeZone}
import java.util.Locale

import scala.util.Try

Expand All @@ -47,9 +47,6 @@ case class TestData(a: Int, b: String)
* included in the hive distribution.
*/
class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAndAfter {
private val originalTimeZone = TimeZone.getDefault
private val originalLocale = Locale.getDefault

import org.apache.spark.sql.hive.test.TestHive.implicits._

private val originalCrossJoinEnabled = TestHive.conf.crossJoinEnabled
Expand All @@ -59,19 +56,13 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd
override def beforeAll(): Unit = {
super.beforeAll()
TestHive.setCacheTables(true)
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)
// Ensures that cross joins are enabled so that we can test them
TestHive.setConf(SQLConf.CROSS_JOINS_ENABLED, true)
}

override def afterAll(): Unit = {
try {
TestHive.setCacheTables(false)
TimeZone.setDefault(originalTimeZone)
Locale.setDefault(originalLocale)
sql("DROP TEMPORARY FUNCTION IF EXISTS udtf_count2")
TestHive.setConf(SQLConf.CROSS_JOINS_ENABLED, originalCrossJoinEnabled)
} finally {
Expand Down