aboutsummaryrefslogtreecommitdiff
path: root/core/src/main
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-03-16 09:36:34 +0000
committerSean Owen <sowen@cloudera.com>2016-03-16 09:36:34 +0000
commit3b461d9ecd633c4fd659998b99e700d76f58d18a (patch)
tree09e9923fc17bada794d01bc365f130039972a8b7 /core/src/main
parent05ab2948ab357fc07222bb3505df80b1886f7310 (diff)
downloadspark-3b461d9ecd633c4fd659998b99e700d76f58d18a.tar.gz
spark-3b461d9ecd633c4fd659998b99e700d76f58d18a.tar.bz2
spark-3b461d9ecd633c4fd659998b99e700d76f58d18a.zip
[SPARK-13823][SPARK-13397][SPARK-13395][CORE] More warnings, StandardCharset follow up
## What changes were proposed in this pull request? Follow up to https://github.com/apache/spark/pull/11657 - Also update `String.getBytes("UTF-8")` to use `StandardCharsets.UTF_8` - And fix one last new Coverity warning that turned up (use of unguarded `wait()` replaced by simpler/more robust `java.util.concurrent` classes in tests) - And while we're here cleaning up Coverity warnings, just fix about 15 more build warnings ## How was this patch tested? Jenkins tests Author: Sean Owen <sowen@cloudera.com> Closes #11725 from srowen/SPARK-13823.2.
Diffstat (limited to 'core/src/main')
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/api/r/SerDe.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/GenericAvroSerializer.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala3
4 files changed, 7 insertions, 4 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala b/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala
index b0d858486b..55db938f09 100644
--- a/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala
@@ -18,6 +18,7 @@
package org.apache.spark.api.python
import java.nio.ByteOrder
+import java.nio.charset.StandardCharsets
import java.util.{ArrayList => JArrayList}
import scala.collection.JavaConverters._
@@ -68,7 +69,8 @@ private[spark] object SerDeUtil extends Logging {
construct(args ++ Array(""))
} else if (args.length == 2 && args(1).isInstanceOf[String]) {
val typecode = args(0).asInstanceOf[String].charAt(0)
- val data: Array[Byte] = args(1).asInstanceOf[String].getBytes("ISO-8859-1")
+ // This must be ISO 8859-1 / Latin 1, not UTF-8, to interoperate correctly
+ val data = args(1).asInstanceOf[String].getBytes(StandardCharsets.ISO_8859_1)
construct(typecode, machineCodes(typecode), data)
} else {
super.construct(args)
diff --git a/core/src/main/scala/org/apache/spark/api/r/SerDe.scala b/core/src/main/scala/org/apache/spark/api/r/SerDe.scala
index c7fb192f26..48df5bedd6 100644
--- a/core/src/main/scala/org/apache/spark/api/r/SerDe.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/SerDe.scala
@@ -410,7 +410,7 @@ private[spark] object SerDe {
}
def writeString(out: DataOutputStream, value: String): Unit = {
- val utf8 = value.getBytes("UTF-8")
+ val utf8 = value.getBytes(StandardCharsets.UTF_8)
val len = utf8.length
out.writeInt(len)
out.write(utf8, 0, len)
diff --git a/core/src/main/scala/org/apache/spark/serializer/GenericAvroSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/GenericAvroSerializer.scala
index 1a8e545b4f..d17a7894fd 100644
--- a/core/src/main/scala/org/apache/spark/serializer/GenericAvroSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/GenericAvroSerializer.scala
@@ -72,7 +72,7 @@ private[serializer] class GenericAvroSerializer(schemas: Map[Long, String])
def compress(schema: Schema): Array[Byte] = compressCache.getOrElseUpdate(schema, {
val bos = new ByteArrayOutputStream()
val out = codec.compressedOutputStream(bos)
- out.write(schema.toString.getBytes("UTF-8"))
+ out.write(schema.toString.getBytes(StandardCharsets.UTF_8))
out.close()
bos.toByteArray
})
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala b/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala
index 202a5191ad..f6a9f9c557 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala
@@ -19,6 +19,7 @@ package org.apache.spark.status.api.v1
import java.io.OutputStream
import java.lang.annotation.Annotation
import java.lang.reflect.Type
+import java.nio.charset.StandardCharsets
import java.text.SimpleDateFormat
import java.util.{Calendar, SimpleTimeZone}
import javax.ws.rs.Produces
@@ -68,7 +69,7 @@ private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{
multivaluedMap: MultivaluedMap[String, AnyRef],
outputStream: OutputStream): Unit = {
t match {
- case ErrorWrapper(err) => outputStream.write(err.getBytes("utf-8"))
+ case ErrorWrapper(err) => outputStream.write(err.getBytes(StandardCharsets.UTF_8))
case _ => mapper.writeValue(outputStream, t)
}
}