From 71a65825c5d5d0886ac3e11f9945cfcb39573ac3 Mon Sep 17 00:00:00 2001
From: John Muller <jmuller@us.imshealth.com>
Date: Thu, 15 Sep 2016 10:00:28 +0100
Subject: [PATCH] [SPARK-17536][SQL] Minor performance improvement to JDBC
 batch inserts

## What changes were proposed in this pull request?

Optimize a while loop during batch inserts

## How was this patch tested?

Unit tests were done, specifically "mvn  test" for sql

Author: John Muller <jmuller@us.imshealth.com>

Closes #15098 from blue666man/SPARK-17536.
---
 .../apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 132472ad0c..b09fd511a9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -590,12 +590,12 @@ object JdbcUtils extends Logging {
       val stmt = insertStatement(conn, table, rddSchema, dialect)
       val setters: Array[JDBCValueSetter] = rddSchema.fields.map(_.dataType)
         .map(makeSetter(conn, dialect, _)).toArray
+      val numFields = rddSchema.fields.length
 
       try {
         var rowCount = 0
         while (iterator.hasNext) {
           val row = iterator.next()
-          val numFields = rddSchema.fields.length
           var i = 0
           while (i < numFields) {
             if (row.isNullAt(i)) {
-- 
GitLab