aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorwitgo <witgo@qq.com>2014-06-22 18:25:16 -0700
committerReynold Xin <rxin@apache.org>2014-06-22 18:25:16 -0700
commit409d24e2b2e52df8fd2c5aca69def29b9a097a79 (patch)
treebba03ca6c1e9d2d7c15f84d602a2e49b94afd992 /core
parent9fe28c35df0dc8ac4a54db2a528fb7ae56d3f978 (diff)
downloadspark-409d24e2b2e52df8fd2c5aca69def29b9a097a79.tar.gz
spark-409d24e2b2e52df8fd2c5aca69def29b9a097a79.tar.bz2
spark-409d24e2b2e52df8fd2c5aca69def29b9a097a79.zip
SPARK-2229: FileAppender throw an llegalArgumentException in jdk6
Author: witgo <witgo@qq.com> Closes #1174 from witgo/SPARK-2229 and squashes the following commits: f85f321 [witgo] FileAppender throw anIllegalArgumentException in JDK6 e1a8da8 [witgo] SizeBasedRollingPolicy throw an java.lang.IllegalArgumentException in JDK6
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/util/logging/FileAppender.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/util/logging/RollingPolicy.scala2
2 files changed, 5 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/logging/FileAppender.scala b/core/src/main/scala/org/apache/spark/util/logging/FileAppender.scala
index 8e9c3036d0..1d54670606 100644
--- a/core/src/main/scala/org/apache/spark/util/logging/FileAppender.scala
+++ b/core/src/main/scala/org/apache/spark/util/logging/FileAppender.scala
@@ -125,16 +125,16 @@ private[spark] object FileAppender extends Logging {
val validatedParams: Option[(Long, String)] = rollingInterval match {
case "daily" =>
logInfo(s"Rolling executor logs enabled for $file with daily rolling")
- Some(24 * 60 * 60 * 1000L, "--YYYY-MM-dd")
+ Some(24 * 60 * 60 * 1000L, "--yyyy-MM-dd")
case "hourly" =>
logInfo(s"Rolling executor logs enabled for $file with hourly rolling")
- Some(60 * 60 * 1000L, "--YYYY-MM-dd--HH")
+ Some(60 * 60 * 1000L, "--yyyy-MM-dd--HH")
case "minutely" =>
logInfo(s"Rolling executor logs enabled for $file with rolling every minute")
- Some(60 * 1000L, "--YYYY-MM-dd--HH-mm")
+ Some(60 * 1000L, "--yyyy-MM-dd--HH-mm")
case IntParam(seconds) =>
logInfo(s"Rolling executor logs enabled for $file with rolling $seconds seconds")
- Some(seconds * 1000L, "--YYYY-MM-dd--HH-mm-ss")
+ Some(seconds * 1000L, "--yyyy-MM-dd--HH-mm-ss")
case _ =>
logWarning(s"Illegal interval for rolling executor logs [$rollingInterval], " +
s"rolling logs not enabled")
diff --git a/core/src/main/scala/org/apache/spark/util/logging/RollingPolicy.scala b/core/src/main/scala/org/apache/spark/util/logging/RollingPolicy.scala
index 84e5c3c917..d7b7219e17 100644
--- a/core/src/main/scala/org/apache/spark/util/logging/RollingPolicy.scala
+++ b/core/src/main/scala/org/apache/spark/util/logging/RollingPolicy.scala
@@ -109,7 +109,7 @@ private[spark] class SizeBasedRollingPolicy(
}
@volatile private var bytesWrittenSinceRollover = 0L
- val formatter = new SimpleDateFormat("--YYYY-MM-dd--HH-mm-ss--SSSS")
+ val formatter = new SimpleDateFormat("--yyyy-MM-dd--HH-mm-ss--SSSS")
/** Should rollover if the next set of bytes is going to exceed the size limit */
def shouldRollover(bytesToBeWritten: Long): Boolean = {