diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index e1ce66a547bc6a6cfee9939f87a68b8ff71cedfc..531646891499bd1ca32c22ee352ee615859eefe2 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -145,9 +145,8 @@ class SparkContext(config: SparkConf) extends Logging {
     this(SparkContext.updatedConf(new SparkConf(), master, appName, sparkHome, jars, environment))
   }
 
-  // NOTE: The below constructors could be consolidated using default arguments. Due to
-  // Scala bug SI-8479, however, this causes the compile step to fail when generating docs.
-  // Until we have a good workaround for that bug the constructors remain broken out.
+  // The following constructors are required when Java code accesses SparkContext directly.
+  // Please see SI-4278
 
   /**
    * Alternative constructor that allows setting common Spark properties directly
diff --git a/core/src/test/java/test/org/apache/spark/JavaSparkContextSuite.java b/core/src/test/java/test/org/apache/spark/JavaSparkContextSuite.java
new file mode 100644
index 0000000000000000000000000000000000000000..7e9cc70d8651f2ec3cf400f011dabbae6c8501b9
--- /dev/null
+++ b/core/src/test/java/test/org/apache/spark/JavaSparkContextSuite.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package test.org.apache.spark;
+
+import java.io.*;
+
+import scala.collection.immutable.List;
+import scala.collection.immutable.List$;
+import scala.collection.immutable.Map;
+import scala.collection.immutable.Map$;
+
+import org.junit.Test;
+
+import org.apache.spark.api.java.*;
+import org.apache.spark.*;
+
+/**
+ * Java apps can uses both Java-friendly JavaSparkContext and Scala SparkContext.
+ */
+public class JavaSparkContextSuite implements Serializable {
+
+  @Test
+  public void javaSparkContext() {
+    String[] jars = new String[] {};
+    java.util.Map<String, String> environment = new java.util.HashMap<>();
+
+    new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
+    new JavaSparkContext("local", "name", new SparkConf()).stop();
+    new JavaSparkContext("local", "name").stop();
+    new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop();
+    new JavaSparkContext("local", "name", "sparkHome", jars).stop();
+    new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop();
+  }
+
+  @Test
+  public void scalaSparkContext() {
+    List<String> jars = List$.MODULE$.empty();
+    Map<String, String> environment = Map$.MODULE$.empty();
+
+    new SparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
+    new SparkContext("local", "name", new SparkConf()).stop();
+    new SparkContext("local", "name").stop();
+    new SparkContext("local", "name", "sparkHome").stop();
+    new SparkContext("local", "name", "sparkHome", jars).stop();
+    new SparkContext("local", "name", "sparkHome", jars, environment).stop();
+  }
+}