Skip to content

Commit a3f7421

Browse files
adrian-wangrxin
authored andcommitted
[SPARK-5248] [SQL] move sql.types.decimal.Decimal to sql.types.Decimal
rxin follow up of apache#3732 Author: Daoyuan Wang <[email protected]> Closes apache#4041 from adrian-wang/decimal and squashes the following commits: aa3d738 [Daoyuan Wang] fix auto refactor 7777a58 [Daoyuan Wang] move sql.types.decimal.Decimal to sql.types.Decimal
1 parent d5eeb35 commit a3f7421

File tree

22 files changed

+13
-29
lines changed

22 files changed

+13
-29
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ import org.apache.spark.util.Utils
2323
import org.apache.spark.sql.catalyst.expressions.{GenericRow, Attribute, AttributeReference, Row}
2424
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
2525
import org.apache.spark.sql.types._
26-
import org.apache.spark.sql.types.decimal.Decimal
2726

2827

2928
/**

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import org.apache.spark.sql.catalyst.expressions._
2727
import org.apache.spark.sql.catalyst.plans.logical._
2828
import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
2929
import org.apache.spark.sql.types._
30-
import org.apache.spark.sql.types.decimal.Decimal
3130

3231
/**
3332
* A collection of implicit conversions that create a DSL for constructing catalyst data structures.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ import java.text.{DateFormat, SimpleDateFormat}
2323
import org.apache.spark.Logging
2424
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2525
import org.apache.spark.sql.types._
26-
import org.apache.spark.sql.types.decimal.Decimal
2726

2827
/** Cast the child expression to the target data type. */
2928
case class Cast(child: Expression, dataType: DataType) extends UnaryExpression with Logging {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.sql.catalyst.expressions.codegen
1919

2020
import com.google.common.cache.{CacheLoader, CacheBuilder}
21-
import org.apache.spark.sql.types.decimal.Decimal
2221

2322
import scala.language.existentials
2423

@@ -541,11 +540,11 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
541540
childEval.code ++
542541
q"""
543542
var $nullTerm = ${childEval.nullTerm}
544-
var $primitiveTerm: org.apache.spark.sql.types.decimal.Decimal =
543+
var $primitiveTerm: org.apache.spark.sql.types.Decimal =
545544
${defaultPrimitive(DecimalType())}
546545

547546
if (!$nullTerm) {
548-
$primitiveTerm = new org.apache.spark.sql.types.decimal.Decimal()
547+
$primitiveTerm = new org.apache.spark.sql.types.Decimal()
549548
$primitiveTerm = $primitiveTerm.setOrNull(${childEval.primitiveTerm}, $precision, $scale)
550549
$nullTerm = $primitiveTerm == null
551550
}
@@ -627,7 +626,7 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
627626
case LongType => ru.Literal(Constant(1L))
628627
case ByteType => ru.Literal(Constant(-1.toByte))
629628
case DoubleType => ru.Literal(Constant(-1.toDouble))
630-
case DecimalType() => q"org.apache.spark.sql.types.decimal.Decimal(-1)"
629+
case DecimalType() => q"org.apache.spark.sql.types.Decimal(-1)"
631630
case IntegerType => ru.Literal(Constant(-1))
632631
case _ => ru.Literal(Constant(null))
633632
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@
1717

1818
package org.apache.spark.sql.catalyst.expressions
1919

20-
import org.apache.spark.sql.types.decimal.Decimal
21-
import org.apache.spark.sql.types.{DecimalType, LongType, DoubleType, DataType}
20+
import org.apache.spark.sql.types._
2221

2322
/** Return the unscaled Long value of a Decimal, assuming it fits in a Long */
2423
case class UnscaledValue(child: Expression) extends UnaryExpression {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ package org.apache.spark.sql.catalyst.expressions
2020
import java.sql.{Date, Timestamp}
2121

2222
import org.apache.spark.sql.types._
23-
import org.apache.spark.sql.types.decimal.Decimal
2423

2524
object Literal {
2625
def apply(v: Any): Literal = v match {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import org.apache.spark.sql.catalyst.plans.LeftSemi
2727
import org.apache.spark.sql.catalyst.plans.logical._
2828
import org.apache.spark.sql.catalyst.rules._
2929
import org.apache.spark.sql.types._
30-
import org.apache.spark.sql.types.decimal.Decimal
3130

3231
abstract class Optimizer extends RuleExecutor[LogicalPlan]
3332

sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
2020
import java.text.SimpleDateFormat
2121

2222
import org.apache.spark.sql.catalyst.ScalaReflection
23-
import org.apache.spark.sql.types.decimal.Decimal
23+
import org.apache.spark.sql.types.Decimal
2424

2525

2626
protected[sql] object DataTypeConversions {

sql/catalyst/src/main/scala/org/apache/spark/sql/types/decimal/Decimal.scala renamed to sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
* limitations under the License.
1616
*/
1717

18-
package org.apache.spark.sql.types.decimal
18+
package org.apache.spark.sql.types
1919

2020
import org.apache.spark.annotation.DeveloperApi
2121

@@ -28,7 +28,7 @@ import org.apache.spark.annotation.DeveloperApi
2828
* - Otherwise, the decimal value is longVal / (10 ** _scale)
2929
*/
3030
final class Decimal extends Ordered[Decimal] with Serializable {
31-
import Decimal.{MAX_LONG_DIGITS, POW_10, ROUNDING_MODE, BIG_DEC_ZERO}
31+
import org.apache.spark.sql.types.Decimal.{BIG_DEC_ZERO, MAX_LONG_DIGITS, POW_10, ROUNDING_MODE}
3232

3333
private var decimalVal: BigDecimal = null
3434
private var longVal: Long = 0L

sql/catalyst/src/main/scala/org/apache/spark/sql/types/dataTypes.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@ import org.json4s.jackson.JsonMethods._
3232
import org.apache.spark.annotation.DeveloperApi
3333
import org.apache.spark.sql.catalyst.ScalaReflectionLock
3434
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression}
35-
import org.apache.spark.sql.types.decimal._
3635
import org.apache.spark.util.Utils
3736

3837

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import org.scalatest.Matchers._
2727

2828
import org.apache.spark.sql.catalyst.dsl.expressions._
2929
import org.apache.spark.sql.types._
30-
import org.apache.spark.sql.types.decimal.Decimal
3130

3231

3332
class ExpressionEvaluationSuite extends FunSuite {

sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
package org.apache.spark.sql.types.decimal
1919

20+
import org.apache.spark.sql.types.Decimal
2021
import org.scalatest.{PrivateMethodTester, FunSuite}
2122

2223
import scala.language.postfixOps

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ package org.apache.spark.sql.execution
1919

2020
import java.nio.ByteBuffer
2121

22+
import org.apache.spark.sql.types.Decimal
23+
2224
import scala.reflect.ClassTag
2325

2426
import com.clearspring.analytics.stream.cardinality.HyperLogLog
@@ -29,7 +31,6 @@ import com.twitter.chill.{AllScalaRegistrar, ResourcePool}
2931
import org.apache.spark.{SparkEnv, SparkConf}
3032
import org.apache.spark.serializer.{SerializerInstance, KryoSerializer}
3133
import org.apache.spark.sql.catalyst.expressions.GenericRow
32-
import org.apache.spark.sql.types.decimal.Decimal
3334
import org.apache.spark.util.collection.OpenHashSet
3435
import org.apache.spark.util.MutablePair
3536
import org.apache.spark.util.Utils

sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ import org.apache.spark.sql.catalyst.analysis.HiveTypeCoercion
3333
import org.apache.spark.sql.catalyst.expressions._
3434
import org.apache.spark.sql.catalyst.ScalaReflection
3535
import org.apache.spark.sql.types._
36-
import org.apache.spark.sql.types.decimal.Decimal
3736
import org.apache.spark.Logging
3837

3938
private[sql] object JsonRDD extends Logging {

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetConverter.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import parquet.schema.MessageType
2525
import org.apache.spark.sql.catalyst.expressions._
2626
import org.apache.spark.sql.parquet.CatalystConverter.FieldType
2727
import org.apache.spark.sql.types._
28-
import org.apache.spark.sql.types.decimal.Decimal
2928

3029
/**
3130
* Collection of converters of Parquet types (group and primitive types) that

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ import parquet.schema.MessageType
3030
import org.apache.spark.Logging
3131
import org.apache.spark.sql.catalyst.expressions.{Attribute, Row}
3232
import org.apache.spark.sql.types._
33-
import org.apache.spark.sql.types.decimal.Decimal
3433

3534
/**
3635
* A `parquet.io.api.RecordMaterializer` for Rows.

sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import org.apache.spark.sql.json.JsonRDD.{compatibleType, enforceCorrectType}
2525
import org.apache.spark.sql.test.TestSQLContext
2626
import org.apache.spark.sql.test.TestSQLContext._
2727
import org.apache.spark.sql.types._
28-
import org.apache.spark.sql.types.decimal.Decimal
2928
import org.apache.spark.sql.{QueryTest, Row, SQLConf}
3029

3130
class JsonSuite extends QueryTest {

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ import org.apache.hadoop.{io => hadoopIo}
2626
import org.apache.spark.sql.catalyst.expressions._
2727
import org.apache.spark.sql.types
2828
import org.apache.spark.sql.types._
29-
import org.apache.spark.sql.types.decimal.Decimal
3029

3130
/* Implicit conversions */
3231
import scala.collection.JavaConversions._
@@ -43,7 +42,7 @@ import scala.collection.JavaConversions._
4342
* long / scala.Long
4443
* short / scala.Short
4544
* byte / scala.Byte
46-
* org.apache.spark.sql.types.decimal.Decimal
45+
* org.apache.spark.sql.types.Decimal
4746
* Array[Byte]
4847
* java.sql.Date
4948
* java.sql.Timestamp

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@ import org.apache.spark.sql.catalyst.plans.logical._
3535
import org.apache.spark.sql.execution.ExplainCommand
3636
import org.apache.spark.sql.hive.execution.{HiveNativeCommand, DropTable, AnalyzeTable}
3737
import org.apache.spark.sql.types._
38-
import org.apache.spark.sql.types.decimal.Decimal
3938

4039
/* Implicit conversions */
4140
import scala.collection.JavaConversions._

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@ import org.scalatest.FunSuite
3131

3232
import org.apache.spark.sql.catalyst.expressions.{Literal, Row}
3333
import org.apache.spark.sql.types._
34-
import org.apache.spark.sql.types.decimal.Decimal
3534

3635
class HiveInspectorSuite extends FunSuite with HiveInspectors {
3736
test("Test wrap SettableStructObjectInspector") {

sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.{TypeInfo, TypeInfoFactory}
4141
import org.apache.hadoop.io.NullWritable
4242
import org.apache.hadoop.mapred.InputFormat
4343

44-
import org.apache.spark.sql.types.DecimalType
45-
import org.apache.spark.sql.types.decimal.Decimal
44+
import org.apache.spark.sql.types.{Decimal, DecimalType}
4645

4746
case class HiveFunctionWrapper(functionClassName: String) extends java.io.Serializable {
4847
// for Serialization

sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,7 @@ import org.apache.hadoop.hive.serde2.{io => hiveIo}
4242
import org.apache.hadoop.{io => hadoopIo}
4343

4444
import org.apache.spark.Logging
45-
import org.apache.spark.sql.types.DecimalType
46-
import org.apache.spark.sql.types.decimal.Decimal
45+
import org.apache.spark.sql.types.{Decimal, DecimalType}
4746

4847

4948
/**

0 commit comments

Comments
 (0)