diff options
author | Wenchen Fan <wenchen@databricks.com> | 2016-04-19 21:20:24 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-04-19 21:20:24 -0700 |
commit | 85d759ca3aebb7d60b963207dcada83c75502e52 (patch) | |
tree | b4fbaba9dfce1ae47485f318123881f42cd05e6c /sql | |
parent | 78b38109ed2fc20e97f9a968185d0c02ef83aa42 (diff) | |
download | spark-85d759ca3aebb7d60b963207dcada83c75502e52.tar.gz spark-85d759ca3aebb7d60b963207dcada83c75502e52.tar.bz2 spark-85d759ca3aebb7d60b963207dcada83c75502e52.zip |
[SPARK-14704][CORE] create accumulators in TaskMetrics
## What changes were proposed in this pull request?
Before this PR, we create accumulators at driver side(and register them) and send them to executor side, then we create `TaskMetrics` with these accumulators at executor side.
After this PR, we will create `TaskMetrics` at driver side and send it to executor side, so that we can create accumulators inside `TaskMetrics` directly, which is cleaner.
## How was this patch tested?
existing tests.
Author: Wenchen Fan <wenchen@databricks.com>
Closes #12472 from cloud-fan/acc.
Diffstat (limited to 'sql')
-rw-r--r-- | sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeRowSerializerSuite.scala | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeRowSerializerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeRowSerializerSuite.scala index 01687877ee..53105e0b24 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeRowSerializerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/UnsafeRowSerializerSuite.scala @@ -21,6 +21,7 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File} import java.util.Properties import org.apache.spark._ +import org.apache.spark.executor.TaskMetrics import org.apache.spark.memory.TaskMemoryManager import org.apache.spark.rdd.RDD import org.apache.spark.sql.Row @@ -113,8 +114,7 @@ class UnsafeRowSerializerSuite extends SparkFunSuite with LocalSparkContext { (i, converter(Row(i))) } val taskMemoryManager = new TaskMemoryManager(sc.env.memoryManager, 0) - val taskContext = new TaskContextImpl( - 0, 0, 0, 0, taskMemoryManager, new Properties, null, InternalAccumulator.createAll(sc)) + val taskContext = new TaskContextImpl(0, 0, 0, 0, taskMemoryManager, new Properties, null) val sorter = new ExternalSorter[Int, UnsafeRow, UnsafeRow]( taskContext, |