Skip to content
Snippets Groups Projects
Commit 48cac417 authored by Denny's avatar Denny
Browse files

Renamed EXAMPLES_JAR to SPARK_EXAMPLES_JAR

parent 63c2020f
No related branches found
No related tags found
No related merge requests found
Showing
with 13 additions and 13 deletions
......@@ -9,7 +9,7 @@ object BroadcastTest {
System.exit(1)
}
val spark = new SparkContext(args(0), "Broadcast Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val spark = new SparkContext(args(0), "Broadcast Test", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val slices = if (args.length > 1) args(1).toInt else 2
val num = if (args.length > 2) args(2).toInt else 1000000
......
......@@ -9,7 +9,7 @@ object ExceptionHandlingTest {
System.exit(1)
}
val sc = new SparkContext(args(0), "ExceptionHandlingTest", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val sc = new SparkContext(args(0), "ExceptionHandlingTest", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
sc.parallelize(0 until sc.defaultParallelism).foreach { i =>
if (Math.random > 0.75)
throw new Exception("Testing exception handling")
......
......@@ -16,7 +16,7 @@ object GroupByTest {
var valSize = if (args.length > 3) args(3).toInt else 1000
var numReducers = if (args.length > 4) args(4).toInt else numMappers
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p =>
val ranGen = new Random
......
......@@ -4,7 +4,7 @@ import spark._
object HdfsTest {
def main(args: Array[String]) {
val sc = new SparkContext(args(0), "HdfsTest", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val sc = new SparkContext(args(0), "HdfsTest", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val file = sc.textFile(args(1))
val mapped = file.map(s => s.length).cache()
for (iter <- 1 to 10) {
......
......@@ -9,7 +9,7 @@ object MultiBroadcastTest {
System.exit(1)
}
val spark = new SparkContext(args(0), "Broadcast Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val spark = new SparkContext(args(0), "Broadcast Test", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val slices = if (args.length > 1) args(1).toInt else 2
val num = if (args.length > 2) args(2).toInt else 1000000
......
......@@ -18,7 +18,7 @@ object SimpleSkewedGroupByTest {
var numReducers = if (args.length > 4) args(4).toInt else numMappers
var ratio = if (args.length > 5) args(5).toInt else 5.0
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p =>
val ranGen = new Random
......
......@@ -16,7 +16,7 @@ object SkewedGroupByTest {
var valSize = if (args.length > 3) args(3).toInt else 1000
var numReducers = if (args.length > 4) args(4).toInt else numMappers
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p =>
val ranGen = new Random
......
......@@ -112,7 +112,7 @@ object SparkALS {
}
}
printf("Running with M=%d, U=%d, F=%d, iters=%d\n", M, U, F, ITERATIONS);
val spark = new SparkContext(host, "SparkALS", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val spark = new SparkContext(host, "SparkALS", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val R = generateR()
......
......@@ -29,7 +29,7 @@ object SparkHdfsLR {
System.err.println("Usage: SparkHdfsLR <master> <file> <iters>")
System.exit(1)
}
val sc = new SparkContext(args(0), "SparkHdfsLR", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val sc = new SparkContext(args(0), "SparkHdfsLR", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val lines = sc.textFile(args(1))
val points = lines.map(parsePoint _).cache()
val ITERATIONS = args(2).toInt
......
......@@ -37,7 +37,7 @@ object SparkKMeans {
System.err.println("Usage: SparkLocalKMeans <master> <file> <k> <convergeDist>")
System.exit(1)
}
val sc = new SparkContext(args(0), "SparkLocalKMeans", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val sc = new SparkContext(args(0), "SparkLocalKMeans", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val lines = sc.textFile(args(1))
val data = lines.map(parseVector _).cache()
val K = args(2).toInt
......
......@@ -28,7 +28,7 @@ object SparkLR {
System.err.println("Usage: SparkLR <host> [<slices>]")
System.exit(1)
}
val sc = new SparkContext(args(0), "SparkLR", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val sc = new SparkContext(args(0), "SparkLR", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val numSlices = if (args.length > 1) args(1).toInt else 2
val data = generateData
......
......@@ -12,7 +12,7 @@ object SparkPi {
System.exit(1)
}
val spark = new SparkContext(args(0), "SparkPi", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val spark = new SparkContext(args(0), "SparkPi", System.getenv("SPARK_HOME"), List(System.getenv("SPARK_EXAMPLES_JAR")))
val slices = if (args.length > 1) args(1).toInt else 2
val n = 100000 * slices
val count = spark.parallelize(1 to n, slices).map { i =>
......
......@@ -65,7 +65,7 @@ export CLASSPATH # Needed for spark-shell
# The JAR file used in the examples.
for jar in `find $EXAMPLES_DIR/target/scala-$SCALA_VERSION -name '*jar'`; do
export EXAMPLES_JAR="$jar"
export SPARK_EXAMPLES_JAR="$jar"
done
if [ -n "$SCALA_HOME" ]; then
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment