aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorIlya Ganelin <ilya.ganelin@capitalone.com>2015-01-19 01:32:22 -0800
committerPatrick Wendell <patrick@databricks.com>2015-01-19 01:32:36 -0800
commit3453d578ad9933be6881488c8ca3611e5b686af9 (patch)
tree4d272cf09db953f938f3fc4b7cf29405cb845cea /core/src/test
parent851b6a9bbadfcd4d258054586a9a157c41ce71c7 (diff)
downloadspark-3453d578ad9933be6881488c8ca3611e5b686af9.tar.gz
spark-3453d578ad9933be6881488c8ca3611e5b686af9.tar.bz2
spark-3453d578ad9933be6881488c8ca3611e5b686af9.zip
[SPARK-3288] All fields in TaskMetrics should be private and use getters/setters
I've updated the fields and all usages of these fields in the Spark code. I've verified that this did not break anything on my local repo. Author: Ilya Ganelin <ilya.ganelin@capitalone.com> Closes #4020 from ilganeli/SPARK-3288 and squashes the following commits: 39f3810 [Ilya Ganelin] resolved merge issues e446287 [Ilya Ganelin] Merge remote-tracking branch 'upstream/master' into SPARK-3288 b8c05cb [Ilya Ganelin] Missed making a variable private 6444391 [Ilya Ganelin] Made inc/dec functions private[spark] 1149e78 [Ilya Ganelin] Merge remote-tracking branch 'upstream/master' into SPARK-3288 26b312b [Ilya Ganelin] Debugging tests 17146c2 [Ilya Ganelin] Merge remote-tracking branch 'upstream/master' into SPARK-3288 5525c20 [Ilya Ganelin] Completed refactoring to make vars in TaskMetrics class private c64da4f [Ilya Ganelin] Partially updated task metrics to make some vars private
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala16
-rw-r--r--core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala28
2 files changed, 22 insertions, 22 deletions
diff --git a/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala b/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
index c9417ea1ed..68074ae32a 100644
--- a/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
@@ -140,7 +140,7 @@ class JobProgressListenerSuite extends FunSuite with LocalSparkContext with Matc
assert(listener.stageIdToData.size === 0)
// finish this task, should get updated shuffleRead
- shuffleReadMetrics.remoteBytesRead = 1000
+ shuffleReadMetrics.incRemoteBytesRead(1000)
taskMetrics.setShuffleReadMetrics(Some(shuffleReadMetrics))
var taskInfo = new TaskInfo(1234L, 0, 1, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
@@ -226,18 +226,18 @@ class JobProgressListenerSuite extends FunSuite with LocalSparkContext with Matc
val shuffleWriteMetrics = new ShuffleWriteMetrics()
taskMetrics.setShuffleReadMetrics(Some(shuffleReadMetrics))
taskMetrics.shuffleWriteMetrics = Some(shuffleWriteMetrics)
- shuffleReadMetrics.remoteBytesRead = base + 1
- shuffleReadMetrics.remoteBlocksFetched = base + 2
- shuffleWriteMetrics.shuffleBytesWritten = base + 3
- taskMetrics.executorRunTime = base + 4
- taskMetrics.diskBytesSpilled = base + 5
- taskMetrics.memoryBytesSpilled = base + 6
+ shuffleReadMetrics.incRemoteBytesRead(base + 1)
+ shuffleReadMetrics.incRemoteBlocksFetched(base + 2)
+ shuffleWriteMetrics.incShuffleBytesWritten(base + 3)
+ taskMetrics.setExecutorRunTime(base + 4)
+ taskMetrics.incDiskBytesSpilled(base + 5)
+ taskMetrics.incMemoryBytesSpilled(base + 6)
val inputMetrics = new InputMetrics(DataReadMethod.Hadoop)
taskMetrics.setInputMetrics(Some(inputMetrics))
inputMetrics.addBytesRead(base + 7)
val outputMetrics = new OutputMetrics(DataWriteMethod.Hadoop)
taskMetrics.outputMetrics = Some(outputMetrics)
- outputMetrics.bytesWritten = base + 8
+ outputMetrics.setBytesWritten(base + 8)
taskMetrics
}
diff --git a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
index db400b4162..0357fc6ce2 100644
--- a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
@@ -641,13 +641,13 @@ class JsonProtocolSuite extends FunSuite {
hasHadoopInput: Boolean,
hasOutput: Boolean) = {
val t = new TaskMetrics
- t.hostname = "localhost"
- t.executorDeserializeTime = a
- t.executorRunTime = b
- t.resultSize = c
- t.jvmGCTime = d
- t.resultSerializationTime = a + b
- t.memoryBytesSpilled = a + c
+ t.setHostname("localhost")
+ t.setExecutorDeserializeTime(a)
+ t.setExecutorRunTime(b)
+ t.setResultSize(c)
+ t.setJvmGCTime(d)
+ t.setResultSerializationTime(a + b)
+ t.incMemoryBytesSpilled(a + c)
if (hasHadoopInput) {
val inputMetrics = new InputMetrics(DataReadMethod.Hadoop)
@@ -655,20 +655,20 @@ class JsonProtocolSuite extends FunSuite {
t.setInputMetrics(Some(inputMetrics))
} else {
val sr = new ShuffleReadMetrics
- sr.remoteBytesRead = b + d
- sr.localBlocksFetched = e
- sr.fetchWaitTime = a + d
- sr.remoteBlocksFetched = f
+ sr.incRemoteBytesRead(b + d)
+ sr.incLocalBlocksFetched(e)
+ sr.incFetchWaitTime(a + d)
+ sr.incRemoteBlocksFetched(f)
t.setShuffleReadMetrics(Some(sr))
}
if (hasOutput) {
val outputMetrics = new OutputMetrics(DataWriteMethod.Hadoop)
- outputMetrics.bytesWritten = a + b + c
+ outputMetrics.setBytesWritten(a + b + c)
t.outputMetrics = Some(outputMetrics)
} else {
val sw = new ShuffleWriteMetrics
- sw.shuffleBytesWritten = a + b + c
- sw.shuffleWriteTime = b + c + d
+ sw.incShuffleBytesWritten(a + b + c)
+ sw.incShuffleWriteTime(b + c + d)
t.shuffleWriteMetrics = Some(sw)
}
// Make at most 6 blocks