aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-05-30 22:47:58 -0700
committerReynold Xin <rxin@databricks.com>2016-05-30 22:47:58 -0700
commit675921040ee4802aa9914457de62af746bc3657d (patch)
treecf9f293c9c3b489551764a5edbd981b290ae83cc /core/src
parent5b21139dbf3bd09cb3a590bd0ffb857ea92dc23c (diff)
downloadspark-675921040ee4802aa9914457de62af746bc3657d.tar.gz
spark-675921040ee4802aa9914457de62af746bc3657d.tar.bz2
spark-675921040ee4802aa9914457de62af746bc3657d.zip
[SPARK-15638][SQL] Audit Dataset, SparkSession, and SQLContext
## What changes were proposed in this pull request? This patch contains a list of changes as a result of my auditing Dataset, SparkSession, and SQLContext. The patch audits the categorization of experimental APIs, function groups, and deprecations. For the detailed list of changes, please see the diff. ## How was this patch tested? N/A Author: Reynold Xin <rxin@databricks.com> Closes #13370 from rxin/SPARK-15638.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManager.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/util/ListenerBus.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala2
6 files changed, 11 insertions, 10 deletions
diff --git a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
index a4a571f15a..867c4a1050 100644
--- a/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
+++ b/core/src/main/java/org/apache/spark/memory/TaskMemoryManager.java
@@ -182,7 +182,7 @@ public class TaskMemoryManager {
}
consumers.add(consumer);
- logger.debug("Task {} acquire {} for {}", taskAttemptId, Utils.bytesToString(got), consumer);
+ logger.debug("Task {} acquired {} for {}", taskAttemptId, Utils.bytesToString(got), consumer);
return got;
}
}
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
index 100ed76ecb..96325a0329 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
@@ -112,7 +112,8 @@ private[spark] class FairSchedulableBuilder(val rootPool: Pool, conf: SparkConf)
schedulingMode = SchedulingMode.withName(xmlSchedulingMode)
} catch {
case e: NoSuchElementException =>
- logWarning("Error xml schedulingMode, using default schedulingMode")
+ logWarning(s"Unsupported schedulingMode: $xmlSchedulingMode, " +
+ s"using the default schedulingMode: $schedulingMode")
}
}
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
index c56e451c11..2f9473aedc 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
@@ -92,9 +92,9 @@ private[spark] class BlockManager(
private[spark] val diskStore = new DiskStore(conf, diskBlockManager)
memoryManager.setMemoryStore(memoryStore)
- // Note: depending on the memory manager, `maxStorageMemory` may actually vary over time.
+ // Note: depending on the memory manager, `maxMemory` may actually vary over time.
// However, since we use this only for reporting and logging, what we actually want here is
- // the absolute maximum value that `maxStorageMemory` can ever possibly reach. We may need
+ // the absolute maximum value that `maxMemory` can ever possibly reach. We may need
// to revisit whether reporting this value as the "max" is intuitive to the user.
private val maxMemory = memoryManager.maxOnHeapStorageMemory
@@ -231,7 +231,7 @@ private[spark] class BlockManager(
*/
def reregister(): Unit = {
// TODO: We might need to rate limit re-registering.
- logInfo("BlockManager re-registering with master")
+ logInfo(s"BlockManager $blockManagerId re-registering with master")
master.registerBlockManager(blockManagerId, maxMemory, slaveEndpoint)
reportAllBlocks()
}
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
index c22d2e0fb6..52db45bd48 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
@@ -45,9 +45,9 @@ class BlockManagerMaster(
/** Register the BlockManager's id with the driver. */
def registerBlockManager(
blockManagerId: BlockManagerId, maxMemSize: Long, slaveEndpoint: RpcEndpointRef): Unit = {
- logInfo("Trying to register BlockManager")
+ logInfo(s"Registering BlockManager $blockManagerId")
tell(RegisterBlockManager(blockManagerId, maxMemSize, slaveEndpoint))
- logInfo("Registered BlockManager")
+ logInfo(s"Registered BlockManager $blockManagerId")
}
def updateBlockInfo(
diff --git a/core/src/main/scala/org/apache/spark/util/ListenerBus.scala b/core/src/main/scala/org/apache/spark/util/ListenerBus.scala
index 436c1951de..79fc2e9459 100644
--- a/core/src/main/scala/org/apache/spark/util/ListenerBus.scala
+++ b/core/src/main/scala/org/apache/spark/util/ListenerBus.scala
@@ -54,7 +54,7 @@ private[spark] trait ListenerBus[L <: AnyRef, E] extends Logging {
*/
final def postToAll(event: E): Unit = {
// JavaConverters can create a JIterableWrapper if we use asScala.
- // However, this method will be called frequently. To avoid the wrapper cost, here ewe use
+ // However, this method will be called frequently. To avoid the wrapper cost, here we use
// Java Iterator directly.
val iter = listeners.iterator
while (iter.hasNext) {
@@ -70,7 +70,7 @@ private[spark] trait ListenerBus[L <: AnyRef, E] extends Logging {
/**
* Post an event to the specified listener. `onPostEvent` is guaranteed to be called in the same
- * thread.
+ * thread for all listeners.
*/
protected def doPostEvent(listener: L, event: E): Unit
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index a8bb0002a7..7e204fa218 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1169,7 +1169,7 @@ private[spark] object Utils extends Logging {
}
/**
- * Execute a block of code that evaluates to Unit, stop SparkContext is there is any uncaught
+ * Execute a block of code that evaluates to Unit, stop SparkContext if there is any uncaught
* exception
*
* NOTE: This method is to be called by the driver-side components to avoid stopping the