From b8f4379ba1c5c1a8f3b4c88bd97031dc8ad2dfea Mon Sep 17 00:00:00 2001
From: somideshmukh <somilde@us.ibm.com>
Date: Wed, 18 Nov 2015 08:51:01 +0000
Subject: [PATCH] [SPARK-10946][SQL] JDBC - Use Statement.executeUpdate instead
 of PreparedStatement.executeUpdate for DDLs

New changes with JDBCRDD

Author: somideshmukh <somilde@us.ibm.com>

Closes #9733 from somideshmukh/SomilBranch-1.1.
---
 .../src/main/scala/org/apache/spark/sql/DataFrameWriter.scala   | 2 +-
 .../apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
index e63a4d5e8b..03867beb78 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
@@ -297,7 +297,7 @@ final class DataFrameWriter private[sql](df: DataFrame) {
       if (!tableExists) {
         val schema = JdbcUtils.schemaString(df, url)
         val sql = s"CREATE TABLE $table ($schema)"
-        conn.prepareStatement(sql).executeUpdate()
+        conn.createStatement.executeUpdate(sql)
       }
     } finally {
       conn.close()
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 32d28e5937..7375a5c091 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -55,7 +55,7 @@ object JdbcUtils extends Logging {
    * Drops a table from the JDBC database.
    */
   def dropTable(conn: Connection, table: String): Unit = {
-    conn.prepareStatement(s"DROP TABLE $table").executeUpdate()
+    conn.createStatement.executeUpdate(s"DROP TABLE $table")
   }
 
   /**
-- 
GitLab