aboutsummaryrefslogtreecommitdiff
path: root/launcher
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-03-09 10:31:26 +0000
committerSean Owen <sowen@cloudera.com>2016-03-09 10:31:26 +0000
commitc3689bc24e03a9471cd6e8169da61963c4528252 (patch)
tree5d1ee90afa2087ede8e4dbc4dd666d699578c230 /launcher
parentcbff2803ef117d7cffe6f05fc1bbd395a1e9c587 (diff)
downloadspark-c3689bc24e03a9471cd6e8169da61963c4528252.tar.gz
spark-c3689bc24e03a9471cd6e8169da61963c4528252.tar.bz2
spark-c3689bc24e03a9471cd6e8169da61963c4528252.zip
[SPARK-13702][CORE][SQL][MLLIB] Use diamond operator for generic instance creation in Java code.
## What changes were proposed in this pull request? In order to make `docs/examples` (and other related code) more simple/readable/user-friendly, this PR replaces existing codes like the followings by using `diamond` operator. ``` - final ArrayList<Product2<Object, Object>> dataToWrite = - new ArrayList<Product2<Object, Object>>(); + final ArrayList<Product2<Object, Object>> dataToWrite = new ArrayList<>(); ``` Java 7 or higher supports **diamond** operator which replaces the type arguments required to invoke the constructor of a generic class with an empty set of type parameters (<>). Currently, Spark Java code use mixed usage of this. ## How was this patch tested? Manual. Pass the existing tests. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #11541 from dongjoon-hyun/SPARK-13702.
Diffstat (limited to 'launcher')
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java16
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java2
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java2
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/Main.java8
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java2
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java6
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java12
-rw-r--r--launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java10
8 files changed, 29 insertions, 29 deletions
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index c7ab51357c..46410327a5 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -58,12 +58,12 @@ abstract class AbstractCommandBuilder {
private Map<String, String> effectiveConfig;
public AbstractCommandBuilder() {
- this.appArgs = new ArrayList<String>();
- this.childEnv = new HashMap<String, String>();
- this.conf = new HashMap<String, String>();
- this.files = new ArrayList<String>();
- this.jars = new ArrayList<String>();
- this.pyFiles = new ArrayList<String>();
+ this.appArgs = new ArrayList<>();
+ this.childEnv = new HashMap<>();
+ this.conf = new HashMap<>();
+ this.files = new ArrayList<>();
+ this.jars = new ArrayList<>();
+ this.pyFiles = new ArrayList<>();
}
/**
@@ -87,7 +87,7 @@ abstract class AbstractCommandBuilder {
* class.
*/
List<String> buildJavaCommand(String extraClassPath) throws IOException {
- List<String> cmd = new ArrayList<String>();
+ List<String> cmd = new ArrayList<>();
String envJavaHome;
if (javaHome != null) {
@@ -134,7 +134,7 @@ abstract class AbstractCommandBuilder {
List<String> buildClassPath(String appClassPath) throws IOException {
String sparkHome = getSparkHome();
- List<String> cp = new ArrayList<String>();
+ List<String> cp = new ArrayList<>();
addToClassPath(cp, getenv("SPARK_CLASSPATH"));
addToClassPath(cp, appClassPath);
diff --git a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
index e328c8a341..7942d7372f 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java
@@ -147,7 +147,7 @@ class CommandBuilderUtils {
* Output: [ "ab cd", "efgh", "i \" j" ]
*/
static List<String> parseOptionString(String s) {
- List<String> opts = new ArrayList<String>();
+ List<String> opts = new ArrayList<>();
StringBuilder opt = new StringBuilder();
boolean inOpt = false;
boolean inSingleQuote = false;
diff --git a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
index 414ffc2c84..69fbf4387b 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
@@ -129,7 +129,7 @@ class LauncherServer implements Closeable {
server.setReuseAddress(true);
server.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0));
- this.clients = new ArrayList<ServerConnection>();
+ this.clients = new ArrayList<>();
this.threadIds = new AtomicLong();
this.factory = new NamedThreadFactory(THREAD_NAME_FMT);
this.pending = new ConcurrentHashMap<>();
diff --git a/launcher/src/main/java/org/apache/spark/launcher/Main.java b/launcher/src/main/java/org/apache/spark/launcher/Main.java
index e751e948e3..1e34bb8c73 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/Main.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/Main.java
@@ -50,7 +50,7 @@ class Main {
public static void main(String[] argsArray) throws Exception {
checkArgument(argsArray.length > 0, "Not enough arguments: missing class name.");
- List<String> args = new ArrayList<String>(Arrays.asList(argsArray));
+ List<String> args = new ArrayList<>(Arrays.asList(argsArray));
String className = args.remove(0);
boolean printLaunchCommand = !isEmpty(System.getenv("SPARK_PRINT_LAUNCH_COMMAND"));
@@ -70,7 +70,7 @@ class Main {
// Ignore parsing exceptions.
}
- List<String> help = new ArrayList<String>();
+ List<String> help = new ArrayList<>();
if (parser.className != null) {
help.add(parser.CLASS);
help.add(parser.className);
@@ -82,7 +82,7 @@ class Main {
builder = new SparkClassCommandBuilder(className, args);
}
- Map<String, String> env = new HashMap<String, String>();
+ Map<String, String> env = new HashMap<>();
List<String> cmd = builder.buildCommand(env);
if (printLaunchCommand) {
System.err.println("Spark Command: " + join(" ", cmd));
@@ -130,7 +130,7 @@ class Main {
return cmd;
}
- List<String> newCmd = new ArrayList<String>();
+ List<String> newCmd = new ArrayList<>();
newCmd.add("env");
for (Map.Entry<String, String> e : childEnv.entrySet()) {
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
index e575fd3308..40187236f2 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java
@@ -44,7 +44,7 @@ class SparkClassCommandBuilder extends AbstractCommandBuilder {
@Override
public List<String> buildCommand(Map<String, String> env) throws IOException {
- List<String> javaOptsKeys = new ArrayList<String>();
+ List<String> javaOptsKeys = new ArrayList<>();
String memKey = null;
String extraClassPath = null;
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
index 20e6003a00..a542159901 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
@@ -75,7 +75,7 @@ public class SparkLauncher {
/** Used internally to create unique logger names. */
private static final AtomicInteger COUNTER = new AtomicInteger();
- static final Map<String, String> launcherConfig = new HashMap<String, String>();
+ static final Map<String, String> launcherConfig = new HashMap<>();
/**
* Set a configuration value for the launcher library. These config values do not affect the
@@ -428,7 +428,7 @@ public class SparkLauncher {
}
private ProcessBuilder createBuilder() {
- List<String> cmd = new ArrayList<String>();
+ List<String> cmd = new ArrayList<>();
String script = isWindows() ? "spark-submit.cmd" : "spark-submit";
cmd.add(join(File.separator, builder.getSparkHome(), "bin", script));
cmd.addAll(builder.buildSparkSubmitArgs());
@@ -437,7 +437,7 @@ public class SparkLauncher {
// preserved, otherwise the batch interpreter will mess up the arguments. Batch scripts are
// weird.
if (isWindows()) {
- List<String> winCmd = new ArrayList<String>();
+ List<String> winCmd = new ArrayList<>();
for (String arg : cmd) {
winCmd.add(quoteForBatchScript(arg));
}
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index 269c89c310..b2dd6ac4c3 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -67,7 +67,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
* command line parsing works. This maps the class name to the resource to use when calling
* spark-submit.
*/
- private static final Map<String, String> specialClasses = new HashMap<String, String>();
+ private static final Map<String, String> specialClasses = new HashMap<>();
static {
specialClasses.put("org.apache.spark.repl.Main", "spark-shell");
specialClasses.put("org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver",
@@ -87,12 +87,12 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
private boolean allowsMixedArguments;
SparkSubmitCommandBuilder() {
- this.sparkArgs = new ArrayList<String>();
+ this.sparkArgs = new ArrayList<>();
this.printInfo = false;
}
SparkSubmitCommandBuilder(List<String> args) {
- this.sparkArgs = new ArrayList<String>();
+ this.sparkArgs = new ArrayList<>();
List<String> submitArgs = args;
if (args.size() > 0 && args.get(0).equals(PYSPARK_SHELL)) {
this.allowsMixedArguments = true;
@@ -123,7 +123,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
}
List<String> buildSparkSubmitArgs() {
- List<String> args = new ArrayList<String>();
+ List<String> args = new ArrayList<>();
SparkSubmitOptionParser parser = new SparkSubmitOptionParser();
if (verbose) {
@@ -244,7 +244,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
// The executable is the PYSPARK_DRIVER_PYTHON env variable set by the pyspark script,
// followed by PYSPARK_DRIVER_PYTHON_OPTS.
- List<String> pyargs = new ArrayList<String>();
+ List<String> pyargs = new ArrayList<>();
pyargs.add(firstNonEmpty(System.getenv("PYSPARK_DRIVER_PYTHON"), "python"));
String pyOpts = System.getenv("PYSPARK_DRIVER_PYTHON_OPTS");
if (!isEmpty(pyOpts)) {
@@ -270,7 +270,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
env.put("R_PROFILE_USER",
join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R"));
- List<String> args = new ArrayList<String>();
+ List<String> args = new ArrayList<>();
args.add(firstNonEmpty(System.getenv("SPARKR_DRIVER_R"), "R"));
return args;
}
diff --git a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
index 6aad47adbc..d36731840b 100644
--- a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
+++ b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
@@ -73,7 +73,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
"spark.randomOption=foo",
parser.CONF,
SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH + "=/driverLibPath");
- Map<String, String> env = new HashMap<String, String>();
+ Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(sparkSubmitArgs, env);
assertTrue(findInStringList(env.get(CommandBuilderUtils.getLibPathEnvName()),
@@ -125,7 +125,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
"--master=foo",
"--deploy-mode=bar");
- Map<String, String> env = new HashMap<String, String>();
+ Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(sparkSubmitArgs, env);
assertEquals("python", cmd.get(cmd.size() - 1));
assertEquals(
@@ -142,7 +142,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
"script.py",
"arg1");
- Map<String, String> env = new HashMap<String, String>();
+ Map<String, String> env = new HashMap<>();
List<String> cmd = buildCommand(sparkSubmitArgs, env);
assertEquals("foo", findArgValue(cmd, "--master"));
@@ -178,7 +178,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
+ "/launcher/src/test/resources");
}
- Map<String, String> env = new HashMap<String, String>();
+ Map<String, String> env = new HashMap<>();
List<String> cmd = launcher.buildCommand(env);
// Checks below are different for driver and non-driver mode.
@@ -258,7 +258,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
}
private Map<String, String> parseConf(List<String> cmd, SparkSubmitOptionParser parser) {
- Map<String, String> conf = new HashMap<String, String>();
+ Map<String, String> conf = new HashMap<>();
for (int i = 0; i < cmd.size(); i++) {
if (cmd.get(i).equals(parser.CONF)) {
String[] val = cmd.get(i + 1).split("=", 2);