From 284ba90958df2d6efc08e3f8381bb9ef09f8b322 Mon Sep 17 00:00:00 2001
From: Patrick Wendell <pwendell@gmail.com>
Date: Mon, 25 Feb 2013 19:40:52 -0800
Subject: [PATCH] createNewSparkContext should use sparkHome/jars/environment.

This fixes a bug introduced by Matei's recent change.
---
 streaming/src/main/scala/spark/streaming/StreamingContext.scala | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/streaming/src/main/scala/spark/streaming/StreamingContext.scala b/streaming/src/main/scala/spark/streaming/StreamingContext.scala
index 31b5d2c8bc..b8b60aab43 100644
--- a/streaming/src/main/scala/spark/streaming/StreamingContext.scala
+++ b/streaming/src/main/scala/spark/streaming/StreamingContext.scala
@@ -505,7 +505,7 @@ object StreamingContext {
     if (MetadataCleaner.getDelaySeconds < 0) {
       MetadataCleaner.setDelaySeconds(3600)
     }
-    new SparkContext(master, appName)
+    new SparkContext(master, appName, sparkHome, jars, environment)
   }
 
   protected[streaming] def rddToFileName[T](prefix: String, suffix: String, time: Time): String = {
-- 
GitLab