aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java1
-rw-r--r--common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala3
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/DataSourceTest.scala1
4 files changed, 4 insertions, 7 deletions
diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java
index 3cd4264680..3557482385 100644
--- a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java
+++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java
@@ -24,7 +24,6 @@ import java.util.LinkedList;
import java.util.Map;
import org.apache.spark.unsafe.Platform;
-import org.apache.spark.unsafe.memory.MemoryAllocator;
/**
* A simple {@link MemoryAllocator} that can allocate up to 16GB using a JVM long primitive array.
diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java
index 8bd2b06db8..7b588681d9 100644
--- a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java
+++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/MemoryAllocator.java
@@ -23,12 +23,12 @@ public interface MemoryAllocator {
* Whether to fill newly allocated and deallocated memory with 0xa5 and 0x5a bytes respectively.
* This helps catch misuse of uninitialized or freed memory, but imposes some overhead.
*/
- public static final boolean MEMORY_DEBUG_FILL_ENABLED = Boolean.parseBoolean(
+ boolean MEMORY_DEBUG_FILL_ENABLED = Boolean.parseBoolean(
System.getProperty("spark.memory.debugFill", "false"));
// Same as jemalloc's debug fill values.
- public static final byte MEMORY_DEBUG_FILL_CLEAN_VALUE = (byte)0xa5;
- public static final byte MEMORY_DEBUG_FILL_FREED_VALUE = (byte)0x5a;
+ byte MEMORY_DEBUG_FILL_CLEAN_VALUE = (byte)0xa5;
+ byte MEMORY_DEBUG_FILL_FREED_VALUE = (byte)0x5a;
/**
* Allocates a contiguous block of memory. Note that the allocated memory is not guaranteed
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
index 76d7f5cbc3..772e031ea7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
@@ -780,8 +780,7 @@ private[sql] object ParquetFileFormat extends Logging {
val assumeBinaryIsString = sparkSession.sessionState.conf.isParquetBinaryAsString
val assumeInt96IsTimestamp = sparkSession.sessionState.conf.isParquetINT96AsTimestamp
val writeLegacyParquetFormat = sparkSession.sessionState.conf.writeLegacyParquetFormat
- val serializedConf =
- new SerializableConfiguration(sparkSession.sessionState.newHadoopConf())
+ val serializedConf = new SerializableConfiguration(sparkSession.sessionState.newHadoopConf())
// !! HACK ALERT !!
//
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/DataSourceTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/DataSourceTest.scala
index 206d03ea98..cc77d3c4b9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/DataSourceTest.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/DataSourceTest.scala
@@ -18,7 +18,6 @@
package org.apache.spark.sql.sources
import org.apache.spark.sql._
-import org.apache.spark.sql.internal.SQLConf
private[sql] abstract class DataSourceTest extends QueryTest {