Skip to content

Commit

Permalink
[SPARK-45964][SQL] Remove private sql accessor in XML and JSON packag…
Browse files Browse the repository at this point in the history
…e under catalyst package

### What changes were proposed in this pull request?

This PR removes `private[sql]` in XML and JSON packages at `catalyst` package.

### Why are the changes needed?

`catalyst` is already a private package: https://github.com/apache/spark/blob/master/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/package.scala#L21-L22

See also SPARK-16813

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

CI in this PR should test them out.

### Was this patch authored or co-authored using generative AI tooling?

No.

Closes #43856 from HyukjinKwon/SPARK-45964.

Authored-by: Hyukjin Kwon <gurwls223@apache.org>
Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
  • Loading branch information
HyukjinKwon authored and dongjoon-hyun committed Nov 17, 2023
1 parent db0da0c commit 8147620
Show file tree
Hide file tree
Showing 10 changed files with 14 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import sun.nio.cs.StreamDecoder
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.unsafe.types.UTF8String

private[sql] object CreateJacksonParser extends Serializable {
object CreateJacksonParser extends Serializable {
def string(jsonFactory: JsonFactory, record: String): JsonParser = {
jsonFactory.createParser(record)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ import org.apache.spark.sql.internal.{LegacyBehaviorPolicy, SQLConf}
*
* Most of these map directly to Jackson's internal options, specified in [[JsonReadFeature]].
*/
private[sql] class JSONOptions(
class JSONOptions(
@transient val parameters: CaseInsensitiveMap[String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String)
Expand Down Expand Up @@ -212,7 +212,7 @@ private[sql] class JSONOptions(
}
}

private[sql] class JSONOptionsInRead(
class JSONOptionsInRead(
@transient override val parameters: CaseInsensitiveMap[String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String)
Expand Down Expand Up @@ -242,7 +242,7 @@ private[sql] class JSONOptionsInRead(
}
}

private[sql] object JSONOptionsInRead {
object JSONOptionsInRead {
// The following encodings are not supported in per-line mode (multiline is false)
// because they cause some problems in reading files with BOM which is supposed to
// present in the files with such encodings. After splitting input files by lines,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ import org.apache.spark.util.ArrayImplicits._
* of map. An exception will be thrown if trying to write out a struct if it is initialized with
* a `MapType`, and vice verse.
*/
private[sql] class JacksonGenerator(
class JacksonGenerator(
dataType: DataType,
writer: Writer,
options: JSONOptions) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ import org.apache.spark.sql.internal.{LegacyBehaviorPolicy, SQLConf}
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils

private[sql] class JsonInferSchema(options: JSONOptions) extends Serializable with Logging {
class JsonInferSchema(options: JSONOptions) extends Serializable with Logging {

private val decimalParser = ExprUtils.getDecimalParser(options.locale)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import sun.nio.cs.StreamDecoder
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.unsafe.types.UTF8String

private[sql] object CreateXmlParser extends Serializable {
object CreateXmlParser extends Serializable {
val filter = new EventFilter {
override def accept(event: XMLEvent): Boolean =
// Ignore comments and processing instructions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,7 @@ class StaxXmlParser(
*
* This implementation is ultimately loosely based on LineRecordReader in Hadoop.
*/
private[xml] class XmlTokenizer(
class XmlTokenizer(
inputStream: InputStream,
options: XmlOptions) {
private val reader = new InputStreamReader(inputStream, Charset.forName(options.charset))
Expand Down Expand Up @@ -742,7 +742,7 @@ private[xml] class XmlTokenizer(
}
}

private[sql] object StaxXmlParser {
object StaxXmlParser {
/**
* Parses a stream that contains CSV strings and turns it into an iterator of tokens.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import javax.xml.stream.events._
import scala.annotation.tailrec
import scala.jdk.CollectionConverters._

private[sql] object StaxXmlParserUtils {
object StaxXmlParserUtils {

private[sql] val factory: XMLInputFactory = {
val factory = XMLInputFactory.newInstance()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import org.apache.spark.internal.Logging
/**
* Utilities for working with XSD validation.
*/
private[sql] object ValidatorUtil extends Logging{
object ValidatorUtil extends Logging {
// Parsing XSDs may be slow, so cache them by path:

private val cache = CacheBuilder.newBuilder().softValues().build(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ import org.apache.spark.sql.catalyst.util.{DateFormatter, PermissiveMode, Timest
import org.apache.spark.sql.catalyst.util.LegacyDateFormats.FAST_DATE_FORMAT
import org.apache.spark.sql.types._

private[sql] class XmlInferSchema(options: XmlOptions, caseSensitive: Boolean)
class XmlInferSchema(options: XmlOptions, caseSensitive: Boolean)
extends Serializable
with Logging {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import org.apache.spark.sql.internal.{LegacyBehaviorPolicy, SQLConf}
/**
* Options for the XML data source.
*/
private[sql] class XmlOptions(
class XmlOptions(
val parameters: CaseInsensitiveMap[String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String,
Expand Down Expand Up @@ -172,7 +172,7 @@ private[sql] class XmlOptions(
}
}

private[sql] object XmlOptions extends DataSourceOptions {
object XmlOptions extends DataSourceOptions {
val DEFAULT_ATTRIBUTE_PREFIX = "_"
val DEFAULT_VALUE_TAG = "_VALUE"
val DEFAULT_ROW_TAG = "ROW"
Expand Down

0 comments on commit 8147620

Please sign in to comment.