diff --git a/core/pom.xml b/core/pom.xml index 57a95328c397b4e2053379d1b5e9b9cb3f7fb706..d8687bf9911b3d45a26b93310041bf359e66f28c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -87,6 +87,10 @@ <groupId>org.apache.mesos</groupId> <artifactId>mesos</artifactId> </dependency> + <dependency> + <groupId>io.netty</groupId> + <artifactId>netty-all</artifactId> + </dependency> <dependency> <groupId>log4j</groupId> <artifactId>log4j</artifactId> diff --git a/pom.xml b/pom.xml index d7cdc591cf487d32e43186b32479031c7279c6df..eda18fdd12d333f254da20e8831855b5d8a1d585 100644 --- a/pom.xml +++ b/pom.xml @@ -256,6 +256,11 @@ <artifactId>mesos</artifactId> <version>${mesos.version}</version> </dependency> + <dependency> + <groupId>io.netty</groupId> + <artifactId>netty-all</artifactId> + <version>4.0.0.Beta2</version> + </dependency> <dependency> <groupId>org.apache.derby</groupId> <artifactId>derby</artifactId> diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 234b021c93d07aa49e4e54ada4d2b0f6a8909e62..0ea23b446fd31baad7011873b09c183feba169bb 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -142,7 +142,6 @@ object SparkBuild extends Build { ), libraryDependencies ++= Seq( - "io.netty" % "netty" % "3.5.3.Final", "com.google.guava" % "guava" % "11.0.1", "log4j" % "log4j" % "1.2.16", "org.slf4j" % "slf4j-api" % slf4jVersion, diff --git a/run b/run index c744bbd3dc86c79af664ad24be1c418ab62c9f7d..c0065c53f17090aef3ef0627477d6eaf9360de84 100755 --- a/run +++ b/run @@ -102,12 +102,18 @@ STREAMING_DIR="$FWDIR/streaming" PYSPARK_DIR="$FWDIR/python" # Exit if the user hasn't compiled Spark -if [ ! -e "$REPL_DIR/target" ]; then - echo "Failed to find Spark classes in $REPL_DIR/target" >&2 +if [ ! -e "$CORE_DIR/target" ]; then + echo "Failed to find Spark classes in $CORE_DIR/target" >&2 echo "You need to compile Spark before running this program" >&2 exit 1 fi +if [[ "$@" = *repl* && ! -e "$REPL_DIR/target" ]]; then + echo "Failed to find Spark classes in $REPL_DIR/target" >&2 + echo "You need to compile Spark repl module before running this program" >&2 + exit 1 +fi + # Build up classpath CLASSPATH="$SPARK_CLASSPATH" CLASSPATH="$CLASSPATH:$FWDIR/conf"