From c3689bc24e03a9471cd6e8169da61963c4528252 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Wed, 9 Mar 2016 10:31:26 +0000 Subject: [SPARK-13702][CORE][SQL][MLLIB] Use diamond operator for generic instance creation in Java code. ## What changes were proposed in this pull request? In order to make `docs/examples` (and other related code) more simple/readable/user-friendly, this PR replaces existing codes like the followings by using `diamond` operator. ``` - final ArrayList> dataToWrite = - new ArrayList>(); + final ArrayList> dataToWrite = new ArrayList<>(); ``` Java 7 or higher supports **diamond** operator which replaces the type arguments required to invoke the constructor of a generic class with an empty set of type parameters (<>). Currently, Spark Java code use mixed usage of this. ## How was this patch tested? Manual. Pass the existing tests. Author: Dongjoon Hyun Closes #11541 from dongjoon-hyun/SPARK-13702. --- .../apache/spark/launcher/AbstractCommandBuilder.java | 16 ++++++++-------- .../org/apache/spark/launcher/CommandBuilderUtils.java | 2 +- .../java/org/apache/spark/launcher/LauncherServer.java | 2 +- .../src/main/java/org/apache/spark/launcher/Main.java | 8 ++++---- .../apache/spark/launcher/SparkClassCommandBuilder.java | 2 +- .../java/org/apache/spark/launcher/SparkLauncher.java | 6 +++--- .../apache/spark/launcher/SparkSubmitCommandBuilder.java | 12 ++++++------ .../spark/launcher/SparkSubmitCommandBuilderSuite.java | 10 +++++----- 8 files changed, 29 insertions(+), 29 deletions(-) (limited to 'launcher') diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java index c7ab51357c..46410327a5 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java @@ -58,12 +58,12 @@ abstract class AbstractCommandBuilder { private Map effectiveConfig; public AbstractCommandBuilder() { - this.appArgs = new ArrayList(); - this.childEnv = new HashMap(); - this.conf = new HashMap(); - this.files = new ArrayList(); - this.jars = new ArrayList(); - this.pyFiles = new ArrayList(); + this.appArgs = new ArrayList<>(); + this.childEnv = new HashMap<>(); + this.conf = new HashMap<>(); + this.files = new ArrayList<>(); + this.jars = new ArrayList<>(); + this.pyFiles = new ArrayList<>(); } /** @@ -87,7 +87,7 @@ abstract class AbstractCommandBuilder { * class. */ List buildJavaCommand(String extraClassPath) throws IOException { - List cmd = new ArrayList(); + List cmd = new ArrayList<>(); String envJavaHome; if (javaHome != null) { @@ -134,7 +134,7 @@ abstract class AbstractCommandBuilder { List buildClassPath(String appClassPath) throws IOException { String sparkHome = getSparkHome(); - List cp = new ArrayList(); + List cp = new ArrayList<>(); addToClassPath(cp, getenv("SPARK_CLASSPATH")); addToClassPath(cp, appClassPath); diff --git a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java index e328c8a341..7942d7372f 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java +++ b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java @@ -147,7 +147,7 @@ class CommandBuilderUtils { * Output: [ "ab cd", "efgh", "i \" j" ] */ static List parseOptionString(String s) { - List opts = new ArrayList(); + List opts = new ArrayList<>(); StringBuilder opt = new StringBuilder(); boolean inOpt = false; boolean inSingleQuote = false; diff --git a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java index 414ffc2c84..69fbf4387b 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java +++ b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java @@ -129,7 +129,7 @@ class LauncherServer implements Closeable { server.setReuseAddress(true); server.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); - this.clients = new ArrayList(); + this.clients = new ArrayList<>(); this.threadIds = new AtomicLong(); this.factory = new NamedThreadFactory(THREAD_NAME_FMT); this.pending = new ConcurrentHashMap<>(); diff --git a/launcher/src/main/java/org/apache/spark/launcher/Main.java b/launcher/src/main/java/org/apache/spark/launcher/Main.java index e751e948e3..1e34bb8c73 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/Main.java +++ b/launcher/src/main/java/org/apache/spark/launcher/Main.java @@ -50,7 +50,7 @@ class Main { public static void main(String[] argsArray) throws Exception { checkArgument(argsArray.length > 0, "Not enough arguments: missing class name."); - List args = new ArrayList(Arrays.asList(argsArray)); + List args = new ArrayList<>(Arrays.asList(argsArray)); String className = args.remove(0); boolean printLaunchCommand = !isEmpty(System.getenv("SPARK_PRINT_LAUNCH_COMMAND")); @@ -70,7 +70,7 @@ class Main { // Ignore parsing exceptions. } - List help = new ArrayList(); + List help = new ArrayList<>(); if (parser.className != null) { help.add(parser.CLASS); help.add(parser.className); @@ -82,7 +82,7 @@ class Main { builder = new SparkClassCommandBuilder(className, args); } - Map env = new HashMap(); + Map env = new HashMap<>(); List cmd = builder.buildCommand(env); if (printLaunchCommand) { System.err.println("Spark Command: " + join(" ", cmd)); @@ -130,7 +130,7 @@ class Main { return cmd; } - List newCmd = new ArrayList(); + List newCmd = new ArrayList<>(); newCmd.add("env"); for (Map.Entry e : childEnv.entrySet()) { diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java index e575fd3308..40187236f2 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java @@ -44,7 +44,7 @@ class SparkClassCommandBuilder extends AbstractCommandBuilder { @Override public List buildCommand(Map env) throws IOException { - List javaOptsKeys = new ArrayList(); + List javaOptsKeys = new ArrayList<>(); String memKey = null; String extraClassPath = null; diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java index 20e6003a00..a542159901 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java @@ -75,7 +75,7 @@ public class SparkLauncher { /** Used internally to create unique logger names. */ private static final AtomicInteger COUNTER = new AtomicInteger(); - static final Map launcherConfig = new HashMap(); + static final Map launcherConfig = new HashMap<>(); /** * Set a configuration value for the launcher library. These config values do not affect the @@ -428,7 +428,7 @@ public class SparkLauncher { } private ProcessBuilder createBuilder() { - List cmd = new ArrayList(); + List cmd = new ArrayList<>(); String script = isWindows() ? "spark-submit.cmd" : "spark-submit"; cmd.add(join(File.separator, builder.getSparkHome(), "bin", script)); cmd.addAll(builder.buildSparkSubmitArgs()); @@ -437,7 +437,7 @@ public class SparkLauncher { // preserved, otherwise the batch interpreter will mess up the arguments. Batch scripts are // weird. if (isWindows()) { - List winCmd = new ArrayList(); + List winCmd = new ArrayList<>(); for (String arg : cmd) { winCmd.add(quoteForBatchScript(arg)); } diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java index 269c89c310..b2dd6ac4c3 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java @@ -67,7 +67,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder { * command line parsing works. This maps the class name to the resource to use when calling * spark-submit. */ - private static final Map specialClasses = new HashMap(); + private static final Map specialClasses = new HashMap<>(); static { specialClasses.put("org.apache.spark.repl.Main", "spark-shell"); specialClasses.put("org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver", @@ -87,12 +87,12 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder { private boolean allowsMixedArguments; SparkSubmitCommandBuilder() { - this.sparkArgs = new ArrayList(); + this.sparkArgs = new ArrayList<>(); this.printInfo = false; } SparkSubmitCommandBuilder(List args) { - this.sparkArgs = new ArrayList(); + this.sparkArgs = new ArrayList<>(); List submitArgs = args; if (args.size() > 0 && args.get(0).equals(PYSPARK_SHELL)) { this.allowsMixedArguments = true; @@ -123,7 +123,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder { } List buildSparkSubmitArgs() { - List args = new ArrayList(); + List args = new ArrayList<>(); SparkSubmitOptionParser parser = new SparkSubmitOptionParser(); if (verbose) { @@ -244,7 +244,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder { // The executable is the PYSPARK_DRIVER_PYTHON env variable set by the pyspark script, // followed by PYSPARK_DRIVER_PYTHON_OPTS. - List pyargs = new ArrayList(); + List pyargs = new ArrayList<>(); pyargs.add(firstNonEmpty(System.getenv("PYSPARK_DRIVER_PYTHON"), "python")); String pyOpts = System.getenv("PYSPARK_DRIVER_PYTHON_OPTS"); if (!isEmpty(pyOpts)) { @@ -270,7 +270,7 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder { env.put("R_PROFILE_USER", join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R")); - List args = new ArrayList(); + List args = new ArrayList<>(); args.add(firstNonEmpty(System.getenv("SPARKR_DRIVER_R"), "R")); return args; } diff --git a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java index 6aad47adbc..d36731840b 100644 --- a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java +++ b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java @@ -73,7 +73,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite { "spark.randomOption=foo", parser.CONF, SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH + "=/driverLibPath"); - Map env = new HashMap(); + Map env = new HashMap<>(); List cmd = buildCommand(sparkSubmitArgs, env); assertTrue(findInStringList(env.get(CommandBuilderUtils.getLibPathEnvName()), @@ -125,7 +125,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite { "--master=foo", "--deploy-mode=bar"); - Map env = new HashMap(); + Map env = new HashMap<>(); List cmd = buildCommand(sparkSubmitArgs, env); assertEquals("python", cmd.get(cmd.size() - 1)); assertEquals( @@ -142,7 +142,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite { "script.py", "arg1"); - Map env = new HashMap(); + Map env = new HashMap<>(); List cmd = buildCommand(sparkSubmitArgs, env); assertEquals("foo", findArgValue(cmd, "--master")); @@ -178,7 +178,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite { + "/launcher/src/test/resources"); } - Map env = new HashMap(); + Map env = new HashMap<>(); List cmd = launcher.buildCommand(env); // Checks below are different for driver and non-driver mode. @@ -258,7 +258,7 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite { } private Map parseConf(List cmd, SparkSubmitOptionParser parser) { - Map conf = new HashMap(); + Map conf = new HashMap<>(); for (int i = 0; i < cmd.size(); i++) { if (cmd.get(i).equals(parser.CONF)) { String[] val = cmd.get(i + 1).split("=", 2); -- cgit v1.2.3