Skip to content

Commit 351a36d

Browse files
chenghao-intelrxin
authored andcommitted
[SPARK-8883][SQL]Remove the OverrideFunctionRegistry
Remove the `OverrideFunctionRegistry` from the Spark SQL, as the subclasses of `FunctionRegistry` have their own way to the delegate to the right underlying `FunctionRegistry`. Author: Cheng Hao <[email protected]> Closes apache#7260 from chenghao-intel/override and squashes the following commits: 164d093 [Cheng Hao] enable the function registry 2ca8459 [Cheng Hao] remove the OverrideFunctionRegistry
1 parent 08192a1 commit 351a36d

File tree

4 files changed

+3
-17
lines changed

4 files changed

+3
-17
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -35,19 +35,6 @@ trait FunctionRegistry {
3535
def lookupFunction(name: String, children: Seq[Expression]): Expression
3636
}
3737

38-
class OverrideFunctionRegistry(underlying: FunctionRegistry) extends FunctionRegistry {
39-
40-
private val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive = false)
41-
42-
override def registerFunction(name: String, builder: FunctionBuilder): Unit = {
43-
functionBuilders.put(name, builder)
44-
}
45-
46-
override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
47-
functionBuilders.get(name).map(_(children)).getOrElse(underlying.lookupFunction(name, children))
48-
}
49-
}
50-
5138
class SimpleFunctionRegistry extends FunctionRegistry {
5239

5340
private val functionBuilders = StringKeyHashMap[FunctionBuilder](caseSensitive = false)

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -139,8 +139,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
139139

140140
// TODO how to handle the temp function per user session?
141141
@transient
142-
protected[sql] lazy val functionRegistry: FunctionRegistry =
143-
new OverrideFunctionRegistry(FunctionRegistry.builtin)
142+
protected[sql] lazy val functionRegistry: FunctionRegistry = FunctionRegistry.builtin
144143

145144
@transient
146145
protected[sql] lazy val analyzer: Analyzer =

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -371,7 +371,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
371371
// Note that HiveUDFs will be overridden by functions registered in this context.
372372
@transient
373373
override protected[sql] lazy val functionRegistry: FunctionRegistry =
374-
new OverrideFunctionRegistry(new HiveFunctionRegistry(FunctionRegistry.builtin))
374+
new HiveFunctionRegistry(FunctionRegistry.builtin)
375375

376376
/* An analyzer that uses the Hive metastore. */
377377
@transient

sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ private[hive] class HiveFunctionRegistry(underlying: analysis.FunctionRegistry)
7777
}
7878

7979
override def registerFunction(name: String, builder: FunctionBuilder): Unit =
80-
throw new UnsupportedOperationException
80+
underlying.registerFunction(name, builder)
8181
}
8282

8383
private[hive] case class HiveSimpleUDF(funcWrapper: HiveFunctionWrapper, children: Seq[Expression])

0 commit comments

Comments
 (0)