diff options
Diffstat (limited to 'repl')
-rw-r--r-- | repl/pom.xml | 280 | ||||
-rwxr-xr-x | repl/src/deb/bin/run | 41 | ||||
-rwxr-xr-x | repl/src/deb/bin/spark-executor | 5 | ||||
-rwxr-xr-x | repl/src/deb/bin/spark-shell | 4 | ||||
-rw-r--r-- | repl/src/deb/control/control | 8 |
5 files changed, 338 insertions, 0 deletions
diff --git a/repl/pom.xml b/repl/pom.xml new file mode 100644 index 0000000000..1c5cb2c7fb --- /dev/null +++ b/repl/pom.xml @@ -0,0 +1,280 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.spark-project</groupId> + <artifactId>parent</artifactId> + <version>0.6.1-SNAPSHOT</version> + </parent> + + <groupId>org.spark-project</groupId> + <artifactId>spark-repl</artifactId> + <packaging>jar</packaging> + <name>Spark Project REPL</name> + <url>http://spark-project.org/</url> + + <properties> + <deb.install.path>/usr/share/spark</deb.install.path> + <deb.user>root</deb.user> + </properties> + + <dependencies> + <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> + </dependency> + <dependency> + <groupId>org.scala-lang</groupId> + <artifactId>scala-compiler</artifactId> + </dependency> + <dependency> + <groupId>org.scala-lang</groupId> + <artifactId>jline</artifactId> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jul-to-slf4j</artifactId> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + </dependency> + + <dependency> + <groupId>org.scalatest</groupId> + <artifactId>scalatest_${scala.version}</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.scalacheck</groupId> + <artifactId>scalacheck_${scala.version}</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + <build> + <plugins> + <plugin> + <groupId>org.scalatest</groupId> + <artifactId>scalatest-maven-plugin</artifactId> + <configuration> + <environmentVariables> + <SPARK_HOME>${basedir}/..</SPARK_HOME> + <SPARK_TESTING>1</SPARK_TESTING> + </environmentVariables> + </configuration> + </plugin> + </plugins> + </build> + + <profiles> + <profile> + <id>hadoop1</id> + <properties> + <classifier>hadoop1</classifier> + </properties> + <dependencies> + <dependency> + <groupId>org.spark-project</groupId> + <artifactId>spark-core</artifactId> + <version>${project.version}</version> + <classifier>hadoop1</classifier> + </dependency> + <dependency> + <groupId>org.spark-project</groupId> + <artifactId>spark-bagel</artifactId> + <version>${project.version}</version> + <classifier>hadoop1</classifier> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.spark-project</groupId> + <artifactId>spark-examples</artifactId> + <version>${project.version}</version> + <classifier>hadoop1</classifier> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-core</artifactId> + </dependency> + </dependencies> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-shade-plugin</artifactId> + <configuration> + <shadedArtifactAttached>true</shadedArtifactAttached> + <shadedClassifierName>shaded-hadoop1</shadedClassifierName> + <filters> + <filter> + <artifact>*:*</artifact> + <excludes> + <exclude>META-INF/*.SF</exclude> + <exclude>META-INF/*.DSA</exclude> + <exclude>META-INF/*.RSA</exclude> + </excludes> + </filter> + </filters> + </configuration> + <executions> + <execution> + <phase>package</phase> + <goals> + <goal>shade</goal> + </goals> + <configuration> + <transformers> + <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/> + <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> + <mainClass>spark.repl.Main</mainClass> + </transformer> + </transformers> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> + </profile> + <profile> + <id>hadoop2</id> + <properties> + <classifier>hadoop2</classifier> + </properties> + <dependencies> + <dependency> + <groupId>org.spark-project</groupId> + <artifactId>spark-core</artifactId> + <version>${project.version}</version> + <classifier>hadoop2</classifier> + </dependency> + <dependency> + <groupId>org.spark-project</groupId> + <artifactId>spark-bagel</artifactId> + <version>${project.version}</version> + <classifier>hadoop2</classifier> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.spark-project</groupId> + <artifactId>spark-examples</artifactId> + <version>${project.version}</version> + <classifier>hadoop2</classifier> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-core</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + </dependency> + </dependencies> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-shade-plugin</artifactId> + <configuration> + <shadedArtifactAttached>true</shadedArtifactAttached> + <shadedClassifierName>shaded-hadoop2</shadedClassifierName> + <filters> + <filter> + <artifact>*:*</artifact> + <excludes> + <exclude>META-INF/*.SF</exclude> + <exclude>META-INF/*.DSA</exclude> + <exclude>META-INF/*.RSA</exclude> + </excludes> + </filter> + </filters> + </configuration> + <executions> + <execution> + <phase>package</phase> + <goals> + <goal>shade</goal> + </goals> + <configuration> + <transformers> + <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/> + <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"> + <resource>reference.conf</resource> + </transformer> + <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> + <mainClass>spark.repl.Main</mainClass> + </transformer> + </transformers> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> + </profile> + <profile> + <id>deb</id> + <build> + <plugins> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>buildnumber-maven-plugin</artifactId> + <version>1.1</version> + <executions> + <execution> + <phase>validate</phase> + <goals> + <goal>create</goal> + </goals> + <configuration> + <shortRevisionLength>8</shortRevisionLength> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.vafer</groupId> + <artifactId>jdeb</artifactId> + <version>0.11</version> + <executions> + <execution> + <phase>package</phase> + <goals> + <goal>jdeb</goal> + </goals> + <configuration> + <deb>${project.build.directory}/${project.artifactId}-${classifier}_${project.version}-${buildNumber}.deb</deb> + <dataSet> + <data> + <src>${project.build.directory}/${project.artifactId}-${project.version}-shaded-${classifier}.jar</src> + <type>file</type> + <mapper> + <type>perm</type> + <user>${deb.user}</user> + <group>${deb.user}</group> + <prefix>${deb.install.path}</prefix> + </mapper> + </data> + <data> + <src>${basedir}/src/deb/bin</src> + <type>directory</type> + <mapper> + <type>perm</type> + <user>${deb.user}</user> + <group>${deb.user}</group> + <prefix>${deb.install.path}</prefix> + <filemode>744</filemode> + </mapper> + </data> + </dataSet> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> + </profile> + </profiles> +</project> diff --git a/repl/src/deb/bin/run b/repl/src/deb/bin/run new file mode 100755 index 0000000000..c54c9e97a0 --- /dev/null +++ b/repl/src/deb/bin/run @@ -0,0 +1,41 @@ +#!/bin/bash + +SCALA_VERSION=2.9.2 + +# Figure out where the Scala framework is installed +FWDIR="$(cd `dirname $0`; pwd)" + +# Export this as SPARK_HOME +export SPARK_HOME="$FWDIR" + +# Load environment variables from conf/spark-env.sh, if it exists +if [ -e $FWDIR/conf/spark-env.sh ] ; then + . $FWDIR/conf/spark-env.sh +fi + +# Figure out how much memory to use per executor and set it as an environment +# variable so that our process sees it and can report it to Mesos +if [ -z "$SPARK_MEM" ] ; then + SPARK_MEM="512m" +fi +export SPARK_MEM + +# Set JAVA_OPTS to be able to load native libraries and to set heap size +JAVA_OPTS="$SPARK_JAVA_OPTS" +JAVA_OPTS+=" -Djava.library.path=$SPARK_LIBRARY_PATH" +JAVA_OPTS+=" -Xms$SPARK_MEM -Xmx$SPARK_MEM" +# Load extra JAVA_OPTS from conf/java-opts, if it exists +if [ -e $FWDIR/conf/java-opts ] ; then + JAVA_OPTS+=" `cat $FWDIR/conf/java-opts`" +fi +export JAVA_OPTS + +# Build up classpath +CLASSPATH="$SPARK_CLASSPATH" +CLASSPATH+=":$FWDIR/conf" +for jar in `find $FWDIR -name '*jar'`; do + CLASSPATH+=":$jar" +done +export CLASSPATH + +exec java -Dscala.usejavacp=true -Djline.shutdownhook=true -cp "$CLASSPATH" $JAVA_OPTS $EXTRA_ARGS "$@" diff --git a/repl/src/deb/bin/spark-executor b/repl/src/deb/bin/spark-executor new file mode 100755 index 0000000000..47b9cccdfe --- /dev/null +++ b/repl/src/deb/bin/spark-executor @@ -0,0 +1,5 @@ +#!/bin/bash + +FWDIR="$(cd `dirname $0`; pwd)" +echo "Running spark-executor with framework dir = $FWDIR" +exec $FWDIR/run spark.executor.MesosExecutorBackend diff --git a/repl/src/deb/bin/spark-shell b/repl/src/deb/bin/spark-shell new file mode 100755 index 0000000000..219c66eb0b --- /dev/null +++ b/repl/src/deb/bin/spark-shell @@ -0,0 +1,4 @@ +#!/bin/bash + +FWDIR="$(cd `dirname $0`; pwd)" +exec $FWDIR/run spark.repl.Main "$@" diff --git a/repl/src/deb/control/control b/repl/src/deb/control/control new file mode 100644 index 0000000000..6586986c76 --- /dev/null +++ b/repl/src/deb/control/control @@ -0,0 +1,8 @@ +Package: spark-repl +Version: [[version]]-[[buildNumber]] +Section: misc +Priority: extra +Architecture: all +Maintainer: Matei Zaharia <matei.zaharia@gmail.com> +Description: spark repl +Distribution: development |