Skip to content

Commit

Permalink
[SPARK-44283][CONNECT] Move Origin to SQL/API
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?
This PR move Origin and SQLQueryContext to sql/api.

### Why are the changes needed?
This change allows us to move ParseException to sql/api.

### Does this PR introduce _any_ user-facing change?
No.

### How was this patch tested?
Existing tests.

Closes apache#41837 from hvanhovell/SPARK-44283.

Authored-by: Herman van Hovell <herman@databricks.com>
Signed-off-by: Herman van Hovell <herman@databricks.com>
  • Loading branch information
hvanhovell authored and ragnarok56 committed Mar 2, 2024
1 parent 26a545f commit c2d3981
Show file tree
Hide file tree
Showing 3 changed files with 72 additions and 54 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.trees

import org.apache.spark.QueryContext

/**
* Contexts of TreeNodes, including location, SQL text, object type and object name.
* The only supported object type is "VIEW" now. In the future, we may support SQL UDF or other
* objects which contain SQL text.
*/
case class Origin(
line: Option[Int] = None,
startPosition: Option[Int] = None,
startIndex: Option[Int] = None,
stopIndex: Option[Int] = None,
sqlText: Option[String] = None,
objectType: Option[String] = None,
objectName: Option[String] = None) {

lazy val context: SQLQueryContext = SQLQueryContext(
line, startPosition, startIndex, stopIndex, sqlText, objectType, objectName)

def getQueryContext: Array[QueryContext] = if (context.isValid) {
Array(context)
} else {
Array.empty
}
}

/**
* Provides a location for TreeNodes to ask about the context of their origin. For example, which
* line of code is currently being parsed.
*/
object CurrentOrigin {
private val value = new ThreadLocal[Origin]() {
override def initialValue: Origin = Origin()
}

def get: Origin = value.get()
def set(o: Origin): Unit = value.set(o)

def reset(): Unit = value.set(Origin())

def setPosition(line: Int, start: Int): Unit = {
value.set(
value.get.copy(line = Some(line), startPosition = Some(start)))
}

def withOrigin[A](o: Origin)(f: => A): A = {
// remember the previous one so it can be reset to this
// this way withOrigin can be recursive
val previous = get
set(o)
val ret = try f finally { set(previous) }
ret
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import org.json4s.JsonAST._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

import org.apache.spark.QueryContext
import org.apache.spark.sql.catalyst.{AliasIdentifier, CatalystIdentifier}
import org.apache.spark.sql.catalyst.ScalaReflection._
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType, FunctionResource}
Expand All @@ -53,59 +52,6 @@ import org.apache.spark.util.collection.BitSet
/** Used by [[TreeNode.getNodeNumbered]] when traversing the tree for a given number */
private class MutableInt(var i: Int)

/**
* Contexts of TreeNodes, including location, SQL text, object type and object name.
* The only supported object type is "VIEW" now. In the future, we may support SQL UDF or other
* objects which contain SQL text.
*/
case class Origin(
line: Option[Int] = None,
startPosition: Option[Int] = None,
startIndex: Option[Int] = None,
stopIndex: Option[Int] = None,
sqlText: Option[String] = None,
objectType: Option[String] = None,
objectName: Option[String] = None) {

lazy val context: SQLQueryContext = SQLQueryContext(
line, startPosition, startIndex, stopIndex, sqlText, objectType, objectName)

def getQueryContext: Array[QueryContext] = if (context.isValid) {
Array(context)
} else {
Array.empty
}
}

/**
* Provides a location for TreeNodes to ask about the context of their origin. For example, which
* line of code is currently being parsed.
*/
object CurrentOrigin {
private val value = new ThreadLocal[Origin]() {
override def initialValue: Origin = Origin()
}

def get: Origin = value.get()
def set(o: Origin): Unit = value.set(o)

def reset(): Unit = value.set(Origin())

def setPosition(line: Int, start: Int): Unit = {
value.set(
value.get.copy(line = Some(line), startPosition = Some(start)))
}

def withOrigin[A](o: Origin)(f: => A): A = {
// remember the previous one so it can be reset to this
// this way withOrigin can be recursive
val previous = get
set(o)
val ret = try f finally { set(previous) }
ret
}
}

// A tag of a `TreeNode`, which defines name and type
case class TreeNodeTag[T](name: String)

Expand Down

0 comments on commit c2d3981

Please sign in to comment.