aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-11-19 14:48:18 -0800
committerReynold Xin <rxin@databricks.com>2015-11-19 14:48:18 -0800
commit014c0f7a9dfdb1686fa9aeacaadb2a17a855a943 (patch)
tree04a6a7ee86f6dfb5abe824bb0f5e51330fb3fb60
parent599a8c6e2bf7da70b20ef3046f5ce099dfd637f8 (diff)
downloadspark-014c0f7a9dfdb1686fa9aeacaadb2a17a855a943.tar.gz
spark-014c0f7a9dfdb1686fa9aeacaadb2a17a855a943.tar.bz2
spark-014c0f7a9dfdb1686fa9aeacaadb2a17a855a943.zip
[SPARK-11858][SQL] Move sql.columnar into sql.execution.
In addition, tightened visibility of a lot of classes in the columnar package from private[sql] to private[columnar]. Author: Reynold Xin <rxin@databricks.com> Closes #9842 from rxin/SPARK-11858.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnAccessor.scala)42
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnBuilder.scala)51
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnStats.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnStats.scala)34
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala)48
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/GenerateColumnAccessor.scala)4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/InMemoryColumnarTableScan.scala)5
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessor.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnAccessor.scala)4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilder.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnBuilder.scala)4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnAccessor.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnAccessor.scala)6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnBuilder.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnBuilder.scala)6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressionScheme.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressionScheme.scala)16
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/compressionSchemes.scala)16
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnStatsSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala)6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala)4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnarTestUtils.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnarTestUtils.scala)2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala)2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessorSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnAccessorSuite.scala)4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilderSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala)4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/PartitionBatchPruningSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala)2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/BooleanBitSetSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala)6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/DictionaryEncodingSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala)6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/IntegralDeltaSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala)6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala)6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/TestCompressibleColumnBuilder.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/TestCompressibleColumnBuilder.scala)4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala2
30 files changed, 155 insertions, 147 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala
index f85aeb1b02..293fcfe96e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala
@@ -22,7 +22,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock
import org.apache.spark.Logging
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.columnar.InMemoryRelation
+import org.apache.spark.sql.execution.columnar.InMemoryRelation
import org.apache.spark.storage.StorageLevel
import org.apache.spark.storage.StorageLevel.MEMORY_AND_DISK
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
index 3d4ce633c0..f67c951bc0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.planning._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical.{BroadcastHint, LogicalPlan}
import org.apache.spark.sql.catalyst.plans.physical._
-import org.apache.spark.sql.columnar.{InMemoryColumnarTableScan, InMemoryRelation}
+import org.apache.spark.sql.execution.columnar.{InMemoryColumnarTableScan, InMemoryRelation}
import org.apache.spark.sql.execution.datasources.{CreateTableUsing, CreateTempTableUsing, DescribeCommand => LogicalDescribeCommand, _}
import org.apache.spark.sql.execution.{DescribeCommand => RunnableDescribeCommand}
import org.apache.spark.sql.{Strategy, execution}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala
index 42ec4d3433..fee36f6023 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import java.nio.{ByteBuffer, ByteOrder}
import org.apache.spark.sql.catalyst.expressions.{MutableRow, UnsafeArrayData, UnsafeMapData, UnsafeRow}
-import org.apache.spark.sql.columnar.compression.CompressibleColumnAccessor
+import org.apache.spark.sql.execution.columnar.compression.CompressibleColumnAccessor
import org.apache.spark.sql.types._
/**
@@ -29,7 +29,7 @@ import org.apache.spark.sql.types._
* a [[MutableRow]]. In this way, boxing cost can be avoided by leveraging the setter methods
* for primitive values provided by [[MutableRow]].
*/
-private[sql] trait ColumnAccessor {
+private[columnar] trait ColumnAccessor {
initialize()
protected def initialize()
@@ -41,7 +41,7 @@ private[sql] trait ColumnAccessor {
protected def underlyingBuffer: ByteBuffer
}
-private[sql] abstract class BasicColumnAccessor[JvmType](
+private[columnar] abstract class BasicColumnAccessor[JvmType](
protected val buffer: ByteBuffer,
protected val columnType: ColumnType[JvmType])
extends ColumnAccessor {
@@ -61,65 +61,65 @@ private[sql] abstract class BasicColumnAccessor[JvmType](
protected def underlyingBuffer = buffer
}
-private[sql] class NullColumnAccessor(buffer: ByteBuffer)
+private[columnar] class NullColumnAccessor(buffer: ByteBuffer)
extends BasicColumnAccessor[Any](buffer, NULL)
with NullableColumnAccessor
-private[sql] abstract class NativeColumnAccessor[T <: AtomicType](
+private[columnar] abstract class NativeColumnAccessor[T <: AtomicType](
override protected val buffer: ByteBuffer,
override protected val columnType: NativeColumnType[T])
extends BasicColumnAccessor(buffer, columnType)
with NullableColumnAccessor
with CompressibleColumnAccessor[T]
-private[sql] class BooleanColumnAccessor(buffer: ByteBuffer)
+private[columnar] class BooleanColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, BOOLEAN)
-private[sql] class ByteColumnAccessor(buffer: ByteBuffer)
+private[columnar] class ByteColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, BYTE)
-private[sql] class ShortColumnAccessor(buffer: ByteBuffer)
+private[columnar] class ShortColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, SHORT)
-private[sql] class IntColumnAccessor(buffer: ByteBuffer)
+private[columnar] class IntColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, INT)
-private[sql] class LongColumnAccessor(buffer: ByteBuffer)
+private[columnar] class LongColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, LONG)
-private[sql] class FloatColumnAccessor(buffer: ByteBuffer)
+private[columnar] class FloatColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, FLOAT)
-private[sql] class DoubleColumnAccessor(buffer: ByteBuffer)
+private[columnar] class DoubleColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, DOUBLE)
-private[sql] class StringColumnAccessor(buffer: ByteBuffer)
+private[columnar] class StringColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, STRING)
-private[sql] class BinaryColumnAccessor(buffer: ByteBuffer)
+private[columnar] class BinaryColumnAccessor(buffer: ByteBuffer)
extends BasicColumnAccessor[Array[Byte]](buffer, BINARY)
with NullableColumnAccessor
-private[sql] class CompactDecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
+private[columnar] class CompactDecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
extends NativeColumnAccessor(buffer, COMPACT_DECIMAL(dataType))
-private[sql] class DecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
+private[columnar] class DecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
extends BasicColumnAccessor[Decimal](buffer, LARGE_DECIMAL(dataType))
with NullableColumnAccessor
-private[sql] class StructColumnAccessor(buffer: ByteBuffer, dataType: StructType)
+private[columnar] class StructColumnAccessor(buffer: ByteBuffer, dataType: StructType)
extends BasicColumnAccessor[UnsafeRow](buffer, STRUCT(dataType))
with NullableColumnAccessor
-private[sql] class ArrayColumnAccessor(buffer: ByteBuffer, dataType: ArrayType)
+private[columnar] class ArrayColumnAccessor(buffer: ByteBuffer, dataType: ArrayType)
extends BasicColumnAccessor[UnsafeArrayData](buffer, ARRAY(dataType))
with NullableColumnAccessor
-private[sql] class MapColumnAccessor(buffer: ByteBuffer, dataType: MapType)
+private[columnar] class MapColumnAccessor(buffer: ByteBuffer, dataType: MapType)
extends BasicColumnAccessor[UnsafeMapData](buffer, MAP(dataType))
with NullableColumnAccessor
-private[sql] object ColumnAccessor {
+private[columnar] object ColumnAccessor {
def apply(dataType: DataType, buffer: ByteBuffer): ColumnAccessor = {
val buf = buffer.order(ByteOrder.nativeOrder)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala
index 599f30f2d7..7e26f19bb7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnBuilder.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import java.nio.{ByteBuffer, ByteOrder}
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.columnar.ColumnBuilder._
-import org.apache.spark.sql.columnar.compression.{AllCompressionSchemes, CompressibleColumnBuilder}
+import org.apache.spark.sql.execution.columnar.ColumnBuilder._
+import org.apache.spark.sql.execution.columnar.compression.{AllCompressionSchemes, CompressibleColumnBuilder}
import org.apache.spark.sql.types._
-private[sql] trait ColumnBuilder {
+private[columnar] trait ColumnBuilder {
/**
* Initializes with an approximate lower bound on the expected number of elements in this column.
*/
@@ -46,7 +46,7 @@ private[sql] trait ColumnBuilder {
def build(): ByteBuffer
}
-private[sql] class BasicColumnBuilder[JvmType](
+private[columnar] class BasicColumnBuilder[JvmType](
val columnStats: ColumnStats,
val columnType: ColumnType[JvmType])
extends ColumnBuilder {
@@ -84,17 +84,17 @@ private[sql] class BasicColumnBuilder[JvmType](
}
}
-private[sql] class NullColumnBuilder
+private[columnar] class NullColumnBuilder
extends BasicColumnBuilder[Any](new ObjectColumnStats(NullType), NULL)
with NullableColumnBuilder
-private[sql] abstract class ComplexColumnBuilder[JvmType](
+private[columnar] abstract class ComplexColumnBuilder[JvmType](
columnStats: ColumnStats,
columnType: ColumnType[JvmType])
extends BasicColumnBuilder[JvmType](columnStats, columnType)
with NullableColumnBuilder
-private[sql] abstract class NativeColumnBuilder[T <: AtomicType](
+private[columnar] abstract class NativeColumnBuilder[T <: AtomicType](
override val columnStats: ColumnStats,
override val columnType: NativeColumnType[T])
extends BasicColumnBuilder[T#InternalType](columnStats, columnType)
@@ -102,40 +102,45 @@ private[sql] abstract class NativeColumnBuilder[T <: AtomicType](
with AllCompressionSchemes
with CompressibleColumnBuilder[T]
-private[sql] class BooleanColumnBuilder extends NativeColumnBuilder(new BooleanColumnStats, BOOLEAN)
+private[columnar]
+class BooleanColumnBuilder extends NativeColumnBuilder(new BooleanColumnStats, BOOLEAN)
-private[sql] class ByteColumnBuilder extends NativeColumnBuilder(new ByteColumnStats, BYTE)
+private[columnar]
+class ByteColumnBuilder extends NativeColumnBuilder(new ByteColumnStats, BYTE)
-private[sql] class ShortColumnBuilder extends NativeColumnBuilder(new ShortColumnStats, SHORT)
+private[columnar] class ShortColumnBuilder extends NativeColumnBuilder(new ShortColumnStats, SHORT)
-private[sql] class IntColumnBuilder extends NativeColumnBuilder(new IntColumnStats, INT)
+private[columnar] class IntColumnBuilder extends NativeColumnBuilder(new IntColumnStats, INT)
-private[sql] class LongColumnBuilder extends NativeColumnBuilder(new LongColumnStats, LONG)
+private[columnar] class LongColumnBuilder extends NativeColumnBuilder(new LongColumnStats, LONG)
-private[sql] class FloatColumnBuilder extends NativeColumnBuilder(new FloatColumnStats, FLOAT)
+private[columnar] class FloatColumnBuilder extends NativeColumnBuilder(new FloatColumnStats, FLOAT)
-private[sql] class DoubleColumnBuilder extends NativeColumnBuilder(new DoubleColumnStats, DOUBLE)
+private[columnar]
+class DoubleColumnBuilder extends NativeColumnBuilder(new DoubleColumnStats, DOUBLE)
-private[sql] class StringColumnBuilder extends NativeColumnBuilder(new StringColumnStats, STRING)
+private[columnar]
+class StringColumnBuilder extends NativeColumnBuilder(new StringColumnStats, STRING)
-private[sql] class BinaryColumnBuilder extends ComplexColumnBuilder(new BinaryColumnStats, BINARY)
+private[columnar]
+class BinaryColumnBuilder extends ComplexColumnBuilder(new BinaryColumnStats, BINARY)
-private[sql] class CompactDecimalColumnBuilder(dataType: DecimalType)
+private[columnar] class CompactDecimalColumnBuilder(dataType: DecimalType)
extends NativeColumnBuilder(new DecimalColumnStats(dataType), COMPACT_DECIMAL(dataType))
-private[sql] class DecimalColumnBuilder(dataType: DecimalType)
+private[columnar] class DecimalColumnBuilder(dataType: DecimalType)
extends ComplexColumnBuilder(new DecimalColumnStats(dataType), LARGE_DECIMAL(dataType))
-private[sql] class StructColumnBuilder(dataType: StructType)
+private[columnar] class StructColumnBuilder(dataType: StructType)
extends ComplexColumnBuilder(new ObjectColumnStats(dataType), STRUCT(dataType))
-private[sql] class ArrayColumnBuilder(dataType: ArrayType)
+private[columnar] class ArrayColumnBuilder(dataType: ArrayType)
extends ComplexColumnBuilder(new ObjectColumnStats(dataType), ARRAY(dataType))
-private[sql] class MapColumnBuilder(dataType: MapType)
+private[columnar] class MapColumnBuilder(dataType: MapType)
extends ComplexColumnBuilder(new ObjectColumnStats(dataType), MAP(dataType))
-private[sql] object ColumnBuilder {
+private[columnar] object ColumnBuilder {
val DEFAULT_INITIAL_BUFFER_SIZE = 128 * 1024
val MAX_BATCH_SIZE_IN_BYTE = 4 * 1024 * 1024L
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnStats.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnStats.scala
index 91a0565058..c52ee9ffd6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnStats.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnStats.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, Attribute, AttributeMap, AttributeReference}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
-private[sql] class ColumnStatisticsSchema(a: Attribute) extends Serializable {
+private[columnar] class ColumnStatisticsSchema(a: Attribute) extends Serializable {
val upperBound = AttributeReference(a.name + ".upperBound", a.dataType, nullable = true)()
val lowerBound = AttributeReference(a.name + ".lowerBound", a.dataType, nullable = true)()
val nullCount = AttributeReference(a.name + ".nullCount", IntegerType, nullable = false)()
@@ -32,7 +32,7 @@ private[sql] class ColumnStatisticsSchema(a: Attribute) extends Serializable {
val schema = Seq(lowerBound, upperBound, nullCount, count, sizeInBytes)
}
-private[sql] class PartitionStatistics(tableSchema: Seq[Attribute]) extends Serializable {
+private[columnar] class PartitionStatistics(tableSchema: Seq[Attribute]) extends Serializable {
val (forAttribute, schema) = {
val allStats = tableSchema.map(a => a -> new ColumnStatisticsSchema(a))
(AttributeMap(allStats), allStats.map(_._2.schema).foldLeft(Seq.empty[Attribute])(_ ++ _))
@@ -45,10 +45,10 @@ private[sql] class PartitionStatistics(tableSchema: Seq[Attribute]) extends Seri
* NOTE: we intentionally avoid using `Ordering[T]` to compare values here because `Ordering[T]`
* brings significant performance penalty.
*/
-private[sql] sealed trait ColumnStats extends Serializable {
+private[columnar] sealed trait ColumnStats extends Serializable {
protected var count = 0
protected var nullCount = 0
- private[sql] var sizeInBytes = 0L
+ private[columnar] var sizeInBytes = 0L
/**
* Gathers statistics information from `row(ordinal)`.
@@ -72,14 +72,14 @@ private[sql] sealed trait ColumnStats extends Serializable {
/**
* A no-op ColumnStats only used for testing purposes.
*/
-private[sql] class NoopColumnStats extends ColumnStats {
+private[columnar] class NoopColumnStats extends ColumnStats {
override def gatherStats(row: InternalRow, ordinal: Int): Unit = super.gatherStats(row, ordinal)
override def collectedStatistics: GenericInternalRow =
new GenericInternalRow(Array[Any](null, null, nullCount, count, 0L))
}
-private[sql] class BooleanColumnStats extends ColumnStats {
+private[columnar] class BooleanColumnStats extends ColumnStats {
protected var upper = false
protected var lower = true
@@ -97,7 +97,7 @@ private[sql] class BooleanColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class ByteColumnStats extends ColumnStats {
+private[columnar] class ByteColumnStats extends ColumnStats {
protected var upper = Byte.MinValue
protected var lower = Byte.MaxValue
@@ -115,7 +115,7 @@ private[sql] class ByteColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class ShortColumnStats extends ColumnStats {
+private[columnar] class ShortColumnStats extends ColumnStats {
protected var upper = Short.MinValue
protected var lower = Short.MaxValue
@@ -133,7 +133,7 @@ private[sql] class ShortColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class IntColumnStats extends ColumnStats {
+private[columnar] class IntColumnStats extends ColumnStats {
protected var upper = Int.MinValue
protected var lower = Int.MaxValue
@@ -151,7 +151,7 @@ private[sql] class IntColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class LongColumnStats extends ColumnStats {
+private[columnar] class LongColumnStats extends ColumnStats {
protected var upper = Long.MinValue
protected var lower = Long.MaxValue
@@ -169,7 +169,7 @@ private[sql] class LongColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class FloatColumnStats extends ColumnStats {
+private[columnar] class FloatColumnStats extends ColumnStats {
protected var upper = Float.MinValue
protected var lower = Float.MaxValue
@@ -187,7 +187,7 @@ private[sql] class FloatColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class DoubleColumnStats extends ColumnStats {
+private[columnar] class DoubleColumnStats extends ColumnStats {
protected var upper = Double.MinValue
protected var lower = Double.MaxValue
@@ -205,7 +205,7 @@ private[sql] class DoubleColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class StringColumnStats extends ColumnStats {
+private[columnar] class StringColumnStats extends ColumnStats {
protected var upper: UTF8String = null
protected var lower: UTF8String = null
@@ -223,7 +223,7 @@ private[sql] class StringColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class BinaryColumnStats extends ColumnStats {
+private[columnar] class BinaryColumnStats extends ColumnStats {
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
super.gatherStats(row, ordinal)
if (!row.isNullAt(ordinal)) {
@@ -235,7 +235,7 @@ private[sql] class BinaryColumnStats extends ColumnStats {
new GenericInternalRow(Array[Any](null, null, nullCount, count, sizeInBytes))
}
-private[sql] class DecimalColumnStats(precision: Int, scale: Int) extends ColumnStats {
+private[columnar] class DecimalColumnStats(precision: Int, scale: Int) extends ColumnStats {
def this(dt: DecimalType) = this(dt.precision, dt.scale)
protected var upper: Decimal = null
@@ -256,7 +256,7 @@ private[sql] class DecimalColumnStats(precision: Int, scale: Int) extends Column
new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
}
-private[sql] class ObjectColumnStats(dataType: DataType) extends ColumnStats {
+private[columnar] class ObjectColumnStats(dataType: DataType) extends ColumnStats {
val columnType = ColumnType(dataType)
override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala
index 68e509eb50..c9f2329db4 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import java.math.{BigDecimal, BigInteger}
import java.nio.ByteBuffer
@@ -41,7 +41,7 @@ import org.apache.spark.unsafe.types.UTF8String
*
* WARNNING: This only works with HeapByteBuffer
*/
-object ByteBufferHelper {
+private[columnar] object ByteBufferHelper {
def getInt(buffer: ByteBuffer): Int = {
val pos = buffer.position()
buffer.position(pos + 4)
@@ -73,7 +73,7 @@ object ByteBufferHelper {
*
* @tparam JvmType Underlying Java type to represent the elements.
*/
-private[sql] sealed abstract class ColumnType[JvmType] {
+private[columnar] sealed abstract class ColumnType[JvmType] {
// The catalyst data type of this column.
def dataType: DataType
@@ -142,7 +142,7 @@ private[sql] sealed abstract class ColumnType[JvmType] {
override def toString: String = getClass.getSimpleName.stripSuffix("$")
}
-private[sql] object NULL extends ColumnType[Any] {
+private[columnar] object NULL extends ColumnType[Any] {
override def dataType: DataType = NullType
override def defaultSize: Int = 0
@@ -152,7 +152,7 @@ private[sql] object NULL extends ColumnType[Any] {
override def getField(row: InternalRow, ordinal: Int): Any = null
}
-private[sql] abstract class NativeColumnType[T <: AtomicType](
+private[columnar] abstract class NativeColumnType[T <: AtomicType](
val dataType: T,
val defaultSize: Int)
extends ColumnType[T#InternalType] {
@@ -163,7 +163,7 @@ private[sql] abstract class NativeColumnType[T <: AtomicType](
def scalaTag: TypeTag[dataType.InternalType] = dataType.tag
}
-private[sql] object INT extends NativeColumnType(IntegerType, 4) {
+private[columnar] object INT extends NativeColumnType(IntegerType, 4) {
override def append(v: Int, buffer: ByteBuffer): Unit = {
buffer.putInt(v)
}
@@ -192,7 +192,7 @@ private[sql] object INT extends NativeColumnType(IntegerType, 4) {
}
}
-private[sql] object LONG extends NativeColumnType(LongType, 8) {
+private[columnar] object LONG extends NativeColumnType(LongType, 8) {
override def append(v: Long, buffer: ByteBuffer): Unit = {
buffer.putLong(v)
}
@@ -220,7 +220,7 @@ private[sql] object LONG extends NativeColumnType(LongType, 8) {
}
}
-private[sql] object FLOAT extends NativeColumnType(FloatType, 4) {
+private[columnar] object FLOAT extends NativeColumnType(FloatType, 4) {
override def append(v: Float, buffer: ByteBuffer): Unit = {
buffer.putFloat(v)
}
@@ -248,7 +248,7 @@ private[sql] object FLOAT extends NativeColumnType(FloatType, 4) {
}
}
-private[sql] object DOUBLE extends NativeColumnType(DoubleType, 8) {
+private[columnar] object DOUBLE extends NativeColumnType(DoubleType, 8) {
override def append(v: Double, buffer: ByteBuffer): Unit = {
buffer.putDouble(v)
}
@@ -276,7 +276,7 @@ private[sql] object DOUBLE extends NativeColumnType(DoubleType, 8) {
}
}
-private[sql] object BOOLEAN extends NativeColumnType(BooleanType, 1) {
+private[columnar] object BOOLEAN extends NativeColumnType(BooleanType, 1) {
override def append(v: Boolean, buffer: ByteBuffer): Unit = {
buffer.put(if (v) 1: Byte else 0: Byte)
}
@@ -302,7 +302,7 @@ private[sql] object BOOLEAN extends NativeColumnType(BooleanType, 1) {
}
}
-private[sql] object BYTE extends NativeColumnType(ByteType, 1) {
+private[columnar] object BYTE extends NativeColumnType(ByteType, 1) {
override def append(v: Byte, buffer: ByteBuffer): Unit = {
buffer.put(v)
}
@@ -330,7 +330,7 @@ private[sql] object BYTE extends NativeColumnType(ByteType, 1) {
}
}
-private[sql] object SHORT extends NativeColumnType(ShortType, 2) {
+private[columnar] object SHORT extends NativeColumnType(ShortType, 2) {
override def append(v: Short, buffer: ByteBuffer): Unit = {
buffer.putShort(v)
}
@@ -362,7 +362,7 @@ private[sql] object SHORT extends NativeColumnType(ShortType, 2) {
* A fast path to copy var-length bytes between ByteBuffer and UnsafeRow without creating wrapper
* objects.
*/
-private[sql] trait DirectCopyColumnType[JvmType] extends ColumnType[JvmType] {
+private[columnar] trait DirectCopyColumnType[JvmType] extends ColumnType[JvmType] {
// copy the bytes from ByteBuffer to UnsafeRow
override def extract(buffer: ByteBuffer, row: MutableRow, ordinal: Int): Unit = {
@@ -387,7 +387,7 @@ private[sql] trait DirectCopyColumnType[JvmType] extends ColumnType[JvmType] {
}
}
-private[sql] object STRING
+private[columnar] object STRING
extends NativeColumnType(StringType, 8) with DirectCopyColumnType[UTF8String] {
override def actualSize(row: InternalRow, ordinal: Int): Int = {
@@ -425,7 +425,7 @@ private[sql] object STRING
override def clone(v: UTF8String): UTF8String = v.clone()
}
-private[sql] case class COMPACT_DECIMAL(precision: Int, scale: Int)
+private[columnar] case class COMPACT_DECIMAL(precision: Int, scale: Int)
extends NativeColumnType(DecimalType(precision, scale), 8) {
override def extract(buffer: ByteBuffer): Decimal = {
@@ -467,13 +467,13 @@ private[sql] case class COMPACT_DECIMAL(precision: Int, scale: Int)
}
}
-private[sql] object COMPACT_DECIMAL {
+private[columnar] object COMPACT_DECIMAL {
def apply(dt: DecimalType): COMPACT_DECIMAL = {
COMPACT_DECIMAL(dt.precision, dt.scale)
}
}
-private[sql] sealed abstract class ByteArrayColumnType[JvmType](val defaultSize: Int)
+private[columnar] sealed abstract class ByteArrayColumnType[JvmType](val defaultSize: Int)
extends ColumnType[JvmType] with DirectCopyColumnType[JvmType] {
def serialize(value: JvmType): Array[Byte]
@@ -492,7 +492,7 @@ private[sql] sealed abstract class ByteArrayColumnType[JvmType](val defaultSize:
}
}
-private[sql] object BINARY extends ByteArrayColumnType[Array[Byte]](16) {
+private[columnar] object BINARY extends ByteArrayColumnType[Array[Byte]](16) {
def dataType: DataType = BinaryType
@@ -512,7 +512,7 @@ private[sql] object BINARY extends ByteArrayColumnType[Array[Byte]](16) {
def deserialize(bytes: Array[Byte]): Array[Byte] = bytes
}
-private[sql] case class LARGE_DECIMAL(precision: Int, scale: Int)
+private[columnar] case class LARGE_DECIMAL(precision: Int, scale: Int)
extends ByteArrayColumnType[Decimal](12) {
override val dataType: DataType = DecimalType(precision, scale)
@@ -539,13 +539,13 @@ private[sql] case class LARGE_DECIMAL(precision: Int, scale: Int)
}
}
-private[sql] object LARGE_DECIMAL {
+private[columnar] object LARGE_DECIMAL {
def apply(dt: DecimalType): LARGE_DECIMAL = {
LARGE_DECIMAL(dt.precision, dt.scale)
}
}
-private[sql] case class STRUCT(dataType: StructType)
+private[columnar] case class STRUCT(dataType: StructType)
extends ColumnType[UnsafeRow] with DirectCopyColumnType[UnsafeRow] {
private val numOfFields: Int = dataType.fields.size
@@ -586,7 +586,7 @@ private[sql] case class STRUCT(dataType: StructType)
override def clone(v: UnsafeRow): UnsafeRow = v.copy()
}
-private[sql] case class ARRAY(dataType: ArrayType)
+private[columnar] case class ARRAY(dataType: ArrayType)
extends ColumnType[UnsafeArrayData] with DirectCopyColumnType[UnsafeArrayData] {
override def defaultSize: Int = 16
@@ -625,7 +625,7 @@ private[sql] case class ARRAY(dataType: ArrayType)
override def clone(v: UnsafeArrayData): UnsafeArrayData = v.copy()
}
-private[sql] case class MAP(dataType: MapType)
+private[columnar] case class MAP(dataType: MapType)
extends ColumnType[UnsafeMapData] with DirectCopyColumnType[UnsafeMapData] {
override def defaultSize: Int = 32
@@ -663,7 +663,7 @@ private[sql] case class MAP(dataType: MapType)
override def clone(v: UnsafeMapData): UnsafeMapData = v.copy()
}
-private[sql] object ColumnType {
+private[columnar] object ColumnType {
def apply(dataType: DataType): ColumnType[_] = {
dataType match {
case NullType => NULL
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/GenerateColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala
index ff9393b465..eaafc96e4d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/GenerateColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.InternalRow
@@ -121,7 +121,7 @@ object GenerateColumnAccessor extends CodeGenerator[Seq[DataType], ColumnarItera
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder;
import org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter;
- import org.apache.spark.sql.columnar.MutableUnsafeRow;
+ import org.apache.spark.sql.execution.columnar.MutableUnsafeRow;
public SpecificColumnarIterator generate($exprType[] expr) {
return new SpecificColumnarIterator();
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/InMemoryColumnarTableScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala
index ae77298e6d..ce701fb3a7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/InMemoryColumnarTableScan.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import scala.collection.mutable.ArrayBuffer
@@ -50,7 +50,8 @@ private[sql] object InMemoryRelation {
* @param buffers The buffers for serialized columns
* @param stats The stat of columns
*/
-private[sql] case class CachedBatch(numRows: Int, buffers: Array[Array[Byte]], stats: InternalRow)
+private[columnar]
+case class CachedBatch(numRows: Int, buffers: Array[Array[Byte]], stats: InternalRow)
private[sql] case class InMemoryRelation(
output: Seq[Attribute],
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessor.scala
index 7eaecfe047..8d99546924 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessor.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import java.nio.{ByteOrder, ByteBuffer}
import org.apache.spark.sql.catalyst.expressions.MutableRow
-private[sql] trait NullableColumnAccessor extends ColumnAccessor {
+private[columnar] trait NullableColumnAccessor extends ColumnAccessor {
private var nullsBuffer: ByteBuffer = _
private var nullCount: Int = _
private var seenNulls: Int = 0
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilder.scala
index 76cfddf1cd..3a1931bfb5 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnBuilder.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilder.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import java.nio.{ByteBuffer, ByteOrder}
@@ -34,7 +34,7 @@ import org.apache.spark.sql.catalyst.InternalRow
* +---+-----+---------+
* }}}
*/
-private[sql] trait NullableColumnBuilder extends ColumnBuilder {
+private[columnar] trait NullableColumnBuilder extends ColumnBuilder {
protected var nulls: ByteBuffer = _
protected var nullCount: Int = _
private var pos: Int = _
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnAccessor.scala
index cb205defbb..6579b5068e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnAccessor.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
import org.apache.spark.sql.catalyst.expressions.MutableRow
-import org.apache.spark.sql.columnar.{ColumnAccessor, NativeColumnAccessor}
+import org.apache.spark.sql.execution.columnar.{ColumnAccessor, NativeColumnAccessor}
import org.apache.spark.sql.types.AtomicType
-private[sql] trait CompressibleColumnAccessor[T <: AtomicType] extends ColumnAccessor {
+private[columnar] trait CompressibleColumnAccessor[T <: AtomicType] extends ColumnAccessor {
this: NativeColumnAccessor[T] =>
private var decoder: Decoder[T] = _
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnBuilder.scala
index 161021ff96..b0e216feb5 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnBuilder.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnBuilder.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
import java.nio.{ByteBuffer, ByteOrder}
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.columnar.{ColumnBuilder, NativeColumnBuilder}
+import org.apache.spark.sql.execution.columnar.{ColumnBuilder, NativeColumnBuilder}
import org.apache.spark.sql.types.AtomicType
/**
@@ -40,7 +40,7 @@ import org.apache.spark.sql.types.AtomicType
* header body
* }}}
*/
-private[sql] trait CompressibleColumnBuilder[T <: AtomicType]
+private[columnar] trait CompressibleColumnBuilder[T <: AtomicType]
extends ColumnBuilder with Logging {
this: NativeColumnBuilder[T] with WithCompressionSchemes =>
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressionScheme.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressionScheme.scala
index 9322b772fd..920381f9c6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressionScheme.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressionScheme.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
import java.nio.{ByteBuffer, ByteOrder}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.MutableRow
-import org.apache.spark.sql.columnar.{ColumnType, NativeColumnType}
+import org.apache.spark.sql.execution.columnar.{ColumnType, NativeColumnType}
import org.apache.spark.sql.types.AtomicType
-private[sql] trait Encoder[T <: AtomicType] {
+private[columnar] trait Encoder[T <: AtomicType] {
def gatherCompressibilityStats(row: InternalRow, ordinal: Int): Unit = {}
def compressedSize: Int
@@ -37,13 +37,13 @@ private[sql] trait Encoder[T <: AtomicType] {
def compress(from: ByteBuffer, to: ByteBuffer): ByteBuffer
}
-private[sql] trait Decoder[T <: AtomicType] {
+private[columnar] trait Decoder[T <: AtomicType] {
def next(row: MutableRow, ordinal: Int): Unit
def hasNext: Boolean
}
-private[sql] trait CompressionScheme {
+private[columnar] trait CompressionScheme {
def typeId: Int
def supports(columnType: ColumnType[_]): Boolean
@@ -53,15 +53,15 @@ private[sql] trait CompressionScheme {
def decoder[T <: AtomicType](buffer: ByteBuffer, columnType: NativeColumnType[T]): Decoder[T]
}
-private[sql] trait WithCompressionSchemes {
+private[columnar] trait WithCompressionSchemes {
def schemes: Seq[CompressionScheme]
}
-private[sql] trait AllCompressionSchemes extends WithCompressionSchemes {
+private[columnar] trait AllCompressionSchemes extends WithCompressionSchemes {
override val schemes: Seq[CompressionScheme] = CompressionScheme.all
}
-private[sql] object CompressionScheme {
+private[columnar] object CompressionScheme {
val all: Seq[CompressionScheme] =
Seq(PassThrough, RunLengthEncoding, DictionaryEncoding, BooleanBitSet, IntDelta, LongDelta)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/compressionSchemes.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
index 41c9a284e3..941f03b745 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/compressionSchemes.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
import java.nio.ByteBuffer
@@ -23,11 +23,11 @@ import scala.collection.mutable
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{MutableRow, SpecificMutableRow}
-import org.apache.spark.sql.columnar._
+import org.apache.spark.sql.execution.columnar._
import org.apache.spark.sql.types._
-private[sql] case object PassThrough extends CompressionScheme {
+private[columnar] case object PassThrough extends CompressionScheme {
override val typeId = 0
override def supports(columnType: ColumnType[_]): Boolean = true
@@ -64,7 +64,7 @@ private[sql] case object PassThrough extends CompressionScheme {
}
}
-private[sql] case object RunLengthEncoding extends CompressionScheme {
+private[columnar] case object RunLengthEncoding extends CompressionScheme {
override val typeId = 1
override def encoder[T <: AtomicType](columnType: NativeColumnType[T]): Encoder[T] = {
@@ -172,7 +172,7 @@ private[sql] case object RunLengthEncoding extends CompressionScheme {
}
}
-private[sql] case object DictionaryEncoding extends CompressionScheme {
+private[columnar] case object DictionaryEncoding extends CompressionScheme {
override val typeId = 2
// 32K unique values allowed
@@ -281,7 +281,7 @@ private[sql] case object DictionaryEncoding extends CompressionScheme {
}
}
-private[sql] case object BooleanBitSet extends CompressionScheme {
+private[columnar] case object BooleanBitSet extends CompressionScheme {
override val typeId = 3
val BITS_PER_LONG = 64
@@ -371,7 +371,7 @@ private[sql] case object BooleanBitSet extends CompressionScheme {
}
}
-private[sql] case object IntDelta extends CompressionScheme {
+private[columnar] case object IntDelta extends CompressionScheme {
override def typeId: Int = 4
override def decoder[T <: AtomicType](buffer: ByteBuffer, columnType: NativeColumnType[T])
@@ -451,7 +451,7 @@ private[sql] case object IntDelta extends CompressionScheme {
}
}
-private[sql] case object LongDelta extends CompressionScheme {
+private[columnar] case object LongDelta extends CompressionScheme {
override def typeId: Int = 5
override def decoder[T <: AtomicType](buffer: ByteBuffer, columnType: NativeColumnType[T])
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala
index 28fa231e72..c912734bba 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala
@@ -19,5 +19,7 @@ package org.apache.spark.sql
/**
* The physical execution component of Spark SQL. Note that this is a private package.
+ * All classes in catalyst are considered an internal API to Spark SQL and are subject
+ * to change between minor releases.
*/
package object execution
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index bce94dafad..d86df4cfb9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -27,7 +27,7 @@ import scala.language.postfixOps
import org.scalatest.concurrent.Eventually._
import org.apache.spark.Accumulators
-import org.apache.spark.sql.columnar._
+import org.apache.spark.sql.execution.columnar._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.{SQLTestUtils, SharedSQLContext}
import org.apache.spark.storage.{StorageLevel, RDDBlockId}
@@ -280,7 +280,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
sql("CACHE TABLE testData")
sqlContext.table("testData").queryExecution.withCachedData.collect {
case cached: InMemoryRelation =>
- val actualSizeInBytes = (1 to 100).map(i => INT.defaultSize + i.toString.length + 4).sum
+ val actualSizeInBytes = (1 to 100).map(i => 4 + i.toString.length + 4).sum
assert(cached.statistics.sizeInBytes === actualSizeInBytes)
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
index b5417b195f..6ea1fe4ccf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
@@ -23,7 +23,7 @@ import scala.collection.JavaConverters._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.columnar.InMemoryRelation
+import org.apache.spark.sql.execution.columnar.InMemoryRelation
abstract class QueryTest extends PlanTest {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnStatsSuite.scala
index 89a664001b..b2d04f7c5a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnStatsSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
@@ -50,7 +50,7 @@ class ColumnStatsSuite extends SparkFunSuite {
}
test(s"$columnStatsName: non-empty") {
- import org.apache.spark.sql.columnar.ColumnarTestUtils._
+ import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
val columnStats = columnStatsClass.newInstance()
val rows = Seq.fill(10)(makeRandomRow(columnType)) ++ Seq.fill(10)(makeNullRow(1))
@@ -86,7 +86,7 @@ class ColumnStatsSuite extends SparkFunSuite {
}
test(s"$columnStatsName: non-empty") {
- import org.apache.spark.sql.columnar.ColumnarTestUtils._
+ import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
val columnStats = new DecimalColumnStats(15, 10)
val rows = Seq.fill(10)(makeRandomRow(columnType)) ++ Seq.fill(10)(makeNullRow(1))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
index 63bc39bfa0..34dd96929e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import java.nio.{ByteOrder, ByteBuffer}
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.CatalystTypeConverters
import org.apache.spark.sql.catalyst.expressions.{UnsafeProjection, GenericMutableRow}
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
import org.apache.spark.sql.types._
import org.apache.spark.{Logging, SparkFunSuite}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnarTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnarTestUtils.scala
index a5882f7870..9cae65ef6f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnarTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnarTestUtils.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import scala.collection.immutable.HashSet
import scala.util.Random
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
index 6265e40a0a..25afed25c8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import java.sql.{Date, Timestamp}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnAccessorSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessorSuite.scala
index aa1605fee8..35dc9a276c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnAccessorSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessorSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import java.nio.ByteBuffer
@@ -38,7 +38,7 @@ object TestNullableColumnAccessor {
}
class NullableColumnAccessorSuite extends SparkFunSuite {
- import org.apache.spark.sql.columnar.ColumnarTestUtils._
+ import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
Seq(
NULL, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE,
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilderSuite.scala
index 9140457783..93be3e16a5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilderSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.CatalystTypeConverters
@@ -36,7 +36,7 @@ object TestNullableColumnBuilder {
}
class NullableColumnBuilderSuite extends SparkFunSuite {
- import org.apache.spark.sql.columnar.ColumnarTestUtils._
+ import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
Seq(
BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE,
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/PartitionBatchPruningSuite.scala
index 6b7401464f..d762f7bfe9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/PartitionBatchPruningSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/BooleanBitSetSuite.scala
index 9a2948c59b..ccbddef0fa 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/BooleanBitSetSuite.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
-import org.apache.spark.sql.columnar.{BOOLEAN, NoopColumnStats}
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar.{BOOLEAN, NoopColumnStats}
class BooleanBitSetSuite extends SparkFunSuite {
import BooleanBitSet._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/DictionaryEncodingSuite.scala
index acfab6586c..830ca0294e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/DictionaryEncodingSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
import java.nio.ByteBuffer
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-import org.apache.spark.sql.columnar._
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar._
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
import org.apache.spark.sql.types.AtomicType
class DictionaryEncodingSuite extends SparkFunSuite {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/IntegralDeltaSuite.scala
index 2111e9fbe6..988a577a7b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/IntegralDeltaSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-import org.apache.spark.sql.columnar._
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar._
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
import org.apache.spark.sql.types.IntegralType
class IntegralDeltaSuite extends SparkFunSuite {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala
index 67ec08f594..ce3affba55 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-import org.apache.spark.sql.columnar._
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar._
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
import org.apache.spark.sql.types.AtomicType
class RunLengthEncodingSuite extends SparkFunSuite {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/TestCompressibleColumnBuilder.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/TestCompressibleColumnBuilder.scala
index 5268dfe0aa..5e078f2513 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/TestCompressibleColumnBuilder.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/TestCompressibleColumnBuilder.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
-import org.apache.spark.sql.columnar._
+import org.apache.spark.sql.execution.columnar._
import org.apache.spark.sql.types.AtomicType
class TestCompressibleColumnBuilder[T <: AtomicType](
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
index 5c2fc7d82f..99478e82d4 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive
import java.io.File
-import org.apache.spark.sql.columnar.InMemoryColumnarTableScan
+import org.apache.spark.sql.execution.columnar.InMemoryColumnarTableScan
import org.apache.spark.sql.execution.datasources.parquet.ParquetRelation
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.{AnalysisException, QueryTest, SaveMode}