Skip to content
Snippets Groups Projects
Commit f73891e0 authored by Xin Ren's avatar Xin Ren Committed by Sean Owen
Browse files

[MINOR] Fix Java style errors and remove unused imports

## What changes were proposed in this pull request?

Fix Java style errors and remove unused imports, which are randomly found

## How was this patch tested?

Tested on my local machine.

Author: Xin Ren <iamshrek@126.com>

Closes #14161 from keypointt/SPARK-16437.
parent f156136d
No related branches found
No related tags found
No related merge requests found
......@@ -24,7 +24,6 @@ import java.util.LinkedList;
import java.util.Map;
import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.memory.MemoryAllocator;
/**
* A simple {@link MemoryAllocator} that can allocate up to 16GB using a JVM long primitive array.
......
......@@ -23,12 +23,12 @@ public interface MemoryAllocator {
* Whether to fill newly allocated and deallocated memory with 0xa5 and 0x5a bytes respectively.
* This helps catch misuse of uninitialized or freed memory, but imposes some overhead.
*/
public static final boolean MEMORY_DEBUG_FILL_ENABLED = Boolean.parseBoolean(
boolean MEMORY_DEBUG_FILL_ENABLED = Boolean.parseBoolean(
System.getProperty("spark.memory.debugFill", "false"));
// Same as jemalloc's debug fill values.
public static final byte MEMORY_DEBUG_FILL_CLEAN_VALUE = (byte)0xa5;
public static final byte MEMORY_DEBUG_FILL_FREED_VALUE = (byte)0x5a;
byte MEMORY_DEBUG_FILL_CLEAN_VALUE = (byte)0xa5;
byte MEMORY_DEBUG_FILL_FREED_VALUE = (byte)0x5a;
/**
* Allocates a contiguous block of memory. Note that the allocated memory is not guaranteed
......
......@@ -780,8 +780,7 @@ private[sql] object ParquetFileFormat extends Logging {
val assumeBinaryIsString = sparkSession.sessionState.conf.isParquetBinaryAsString
val assumeInt96IsTimestamp = sparkSession.sessionState.conf.isParquetINT96AsTimestamp
val writeLegacyParquetFormat = sparkSession.sessionState.conf.writeLegacyParquetFormat
val serializedConf =
new SerializableConfiguration(sparkSession.sessionState.newHadoopConf())
val serializedConf = new SerializableConfiguration(sparkSession.sessionState.newHadoopConf())
// !! HACK ALERT !!
//
......
......@@ -18,7 +18,6 @@
package org.apache.spark.sql.sources
import org.apache.spark.sql._
import org.apache.spark.sql.internal.SQLConf
private[sql] abstract class DataSourceTest extends QueryTest {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment